@mastra/server 1.0.0-beta.19 → 1.0.0-beta.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (110) hide show
  1. package/CHANGELOG.md +104 -0
  2. package/dist/{chunk-MZXGNP3J.cjs → chunk-3PZVR6W3.cjs} +70 -106
  3. package/dist/chunk-3PZVR6W3.cjs.map +1 -0
  4. package/dist/{chunk-K73YS7YB.cjs → chunk-AQI6GLTN.cjs} +74 -74
  5. package/dist/{chunk-K73YS7YB.cjs.map → chunk-AQI6GLTN.cjs.map} +1 -1
  6. package/dist/{chunk-3XI22UQR.cjs → chunk-GWLR6K3H.cjs} +7 -3
  7. package/dist/chunk-GWLR6K3H.cjs.map +1 -0
  8. package/dist/{chunk-E646Y4FQ.js → chunk-H66LLFDL.js} +3 -3
  9. package/dist/chunk-H66LLFDL.js.map +1 -0
  10. package/dist/{chunk-7ZYZHQRF.cjs → chunk-IMPZBQ5U.cjs} +350 -216
  11. package/dist/chunk-IMPZBQ5U.cjs.map +1 -0
  12. package/dist/{chunk-MXJEVTGK.js → chunk-J6DCK7KG.js} +5 -11
  13. package/dist/chunk-J6DCK7KG.js.map +1 -0
  14. package/dist/{chunk-OCJ3BRZM.js → chunk-JQPDZZLI.js} +178 -51
  15. package/dist/chunk-JQPDZZLI.js.map +1 -0
  16. package/dist/{chunk-5ZYHM57F.cjs → chunk-KXYDJIKD.cjs} +177 -49
  17. package/dist/chunk-KXYDJIKD.cjs.map +1 -0
  18. package/dist/{chunk-SV4AUWGY.js → chunk-LOA7RQYO.js} +3 -3
  19. package/dist/{chunk-SV4AUWGY.js.map → chunk-LOA7RQYO.js.map} +1 -1
  20. package/dist/{chunk-AWK2DVRO.js → chunk-OKURLPIJ.js} +275 -138
  21. package/dist/chunk-OKURLPIJ.js.map +1 -0
  22. package/dist/{chunk-E4ZPLXZT.js → chunk-Q4BOUQ2O.js} +66 -100
  23. package/dist/chunk-Q4BOUQ2O.js.map +1 -0
  24. package/dist/{chunk-WBLT2HL3.js → chunk-QKXZF7YS.js} +7 -3
  25. package/dist/chunk-QKXZF7YS.js.map +1 -0
  26. package/dist/{chunk-BVIEHV3I.cjs → chunk-T3ZHTCEI.cjs} +2 -2
  27. package/dist/{chunk-BVIEHV3I.cjs.map → chunk-T3ZHTCEI.cjs.map} +1 -1
  28. package/dist/{chunk-BMLUV4BH.cjs → chunk-VX3VN7FN.cjs} +3 -3
  29. package/dist/chunk-VX3VN7FN.cjs.map +1 -0
  30. package/dist/{chunk-VYNS3ZKV.cjs → chunk-Y3VSCOCE.cjs} +7 -13
  31. package/dist/chunk-Y3VSCOCE.cjs.map +1 -0
  32. package/dist/{chunk-T2AYFHKB.js → chunk-ZRBZESWG.js} +2 -2
  33. package/dist/{chunk-T2AYFHKB.js.map → chunk-ZRBZESWG.js.map} +1 -1
  34. package/dist/{dist-AF7EUPXA.cjs → dist-7NTDEVLP.cjs} +28 -28
  35. package/dist/{dist-AF7EUPXA.cjs.map → dist-7NTDEVLP.cjs.map} +1 -1
  36. package/dist/{dist-4CMHRWC4.js → dist-B5GMANA6.js} +3 -3
  37. package/dist/{dist-4CMHRWC4.js.map → dist-B5GMANA6.js.map} +1 -1
  38. package/dist/dist-EDO7GEGI.js +3 -0
  39. package/dist/{dist-MEN73GGI.js.map → dist-EDO7GEGI.js.map} +1 -1
  40. package/dist/{dist-Q2ST4SUQ.cjs → dist-OF24ZMHH.cjs} +30 -30
  41. package/dist/{dist-Q2ST4SUQ.cjs.map → dist-OF24ZMHH.cjs.map} +1 -1
  42. package/dist/{dist-Y5SYUVLY.cjs → dist-P6YSNLL3.cjs} +20 -20
  43. package/dist/{dist-Y5SYUVLY.cjs.map → dist-P6YSNLL3.cjs.map} +1 -1
  44. package/dist/{dist-AEJONJSS.js → dist-SMBO5KVB.js} +3 -3
  45. package/dist/{dist-AEJONJSS.js.map → dist-SMBO5KVB.js.map} +1 -1
  46. package/dist/{dist-TE7XRSWH.js → dist-THPMW5QR.js} +3 -3
  47. package/dist/{dist-TE7XRSWH.js.map → dist-THPMW5QR.js.map} +1 -1
  48. package/dist/{dist-NVXXJWBO.cjs → dist-VZTG56X2.cjs} +26 -26
  49. package/dist/{dist-NVXXJWBO.cjs.map → dist-VZTG56X2.cjs.map} +1 -1
  50. package/dist/dist-ZLC23RVC.cjs +16 -0
  51. package/dist/{dist-2J26LQO2.cjs.map → dist-ZLC23RVC.cjs.map} +1 -1
  52. package/dist/{dist-VPYZNWNG.js → dist-ZOZ6MHJH.js} +3 -3
  53. package/dist/{dist-VPYZNWNG.js.map → dist-ZOZ6MHJH.js.map} +1 -1
  54. package/dist/docs/README.md +31 -0
  55. package/dist/docs/SKILL.md +32 -0
  56. package/dist/docs/SOURCE_MAP.json +6 -0
  57. package/dist/docs/server/01-custom-adapters.md +377 -0
  58. package/dist/docs/server/02-reference.md +828 -0
  59. package/dist/server/handlers/a2a.cjs +9 -9
  60. package/dist/server/handlers/a2a.js +1 -1
  61. package/dist/server/handlers/agent-builder.cjs +16 -28
  62. package/dist/server/handlers/agent-builder.d.ts +51 -85
  63. package/dist/server/handlers/agent-builder.d.ts.map +1 -1
  64. package/dist/server/handlers/agent-builder.js +1 -1
  65. package/dist/server/handlers/agents.cjs +24 -24
  66. package/dist/server/handlers/agents.d.ts.map +1 -1
  67. package/dist/server/handlers/agents.js +1 -1
  68. package/dist/server/handlers/memory.cjs +28 -24
  69. package/dist/server/handlers/memory.d.ts +114 -30
  70. package/dist/server/handlers/memory.d.ts.map +1 -1
  71. package/dist/server/handlers/memory.js +1 -1
  72. package/dist/server/handlers/observability.d.ts +17 -17
  73. package/dist/server/handlers/observability.d.ts.map +1 -1
  74. package/dist/server/handlers/workflows.cjs +24 -36
  75. package/dist/server/handlers/workflows.d.ts +61 -108
  76. package/dist/server/handlers/workflows.d.ts.map +1 -1
  77. package/dist/server/handlers/workflows.js +1 -1
  78. package/dist/server/handlers.cjs +10 -10
  79. package/dist/server/handlers.js +5 -5
  80. package/dist/server/schemas/agent-builder.d.ts +1 -1
  81. package/dist/server/schemas/agent-builder.d.ts.map +1 -1
  82. package/dist/server/schemas/memory.d.ts +161 -31
  83. package/dist/server/schemas/memory.d.ts.map +1 -1
  84. package/dist/server/schemas/workflows.d.ts +214 -34
  85. package/dist/server/schemas/workflows.d.ts.map +1 -1
  86. package/dist/server/server-adapter/index.cjs +102 -94
  87. package/dist/server/server-adapter/index.cjs.map +1 -1
  88. package/dist/server/server-adapter/index.d.ts +24 -6
  89. package/dist/server/server-adapter/index.d.ts.map +1 -1
  90. package/dist/server/server-adapter/index.js +20 -13
  91. package/dist/server/server-adapter/index.js.map +1 -1
  92. package/dist/server/server-adapter/routes/agent-builder.d.ts.map +1 -1
  93. package/dist/server/server-adapter/routes/memory.d.ts.map +1 -1
  94. package/dist/server/server-adapter/routes/workflows.d.ts.map +1 -1
  95. package/dist/server/utils.d.ts.map +1 -1
  96. package/package.json +10 -9
  97. package/dist/chunk-3XI22UQR.cjs.map +0 -1
  98. package/dist/chunk-5ZYHM57F.cjs.map +0 -1
  99. package/dist/chunk-7ZYZHQRF.cjs.map +0 -1
  100. package/dist/chunk-AWK2DVRO.js.map +0 -1
  101. package/dist/chunk-BMLUV4BH.cjs.map +0 -1
  102. package/dist/chunk-E4ZPLXZT.js.map +0 -1
  103. package/dist/chunk-E646Y4FQ.js.map +0 -1
  104. package/dist/chunk-MXJEVTGK.js.map +0 -1
  105. package/dist/chunk-MZXGNP3J.cjs.map +0 -1
  106. package/dist/chunk-OCJ3BRZM.js.map +0 -1
  107. package/dist/chunk-VYNS3ZKV.cjs.map +0 -1
  108. package/dist/chunk-WBLT2HL3.js.map +0 -1
  109. package/dist/dist-2J26LQO2.cjs +0 -16
  110. package/dist/dist-MEN73GGI.js +0 -3
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../node_modules/.pnpm/@ai-sdk+openai-compatible@0.2.16_zod@3.25.76/node_modules/@ai-sdk/openai-compatible/src/convert-to-openai-compatible-chat-messages.ts","../../../node_modules/.pnpm/@ai-sdk+openai-compatible@0.2.16_zod@3.25.76/node_modules/@ai-sdk/openai-compatible/src/get-response-metadata.ts","../../../node_modules/.pnpm/@ai-sdk+openai-compatible@0.2.16_zod@3.25.76/node_modules/@ai-sdk/openai-compatible/src/map-openai-compatible-finish-reason.ts","../../../node_modules/.pnpm/@ai-sdk+openai-compatible@0.2.16_zod@3.25.76/node_modules/@ai-sdk/openai-compatible/src/openai-compatible-error.ts","../../../node_modules/.pnpm/@ai-sdk+openai-compatible@0.2.16_zod@3.25.76/node_modules/@ai-sdk/openai-compatible/src/openai-compatible-prepare-tools.ts","../../../node_modules/.pnpm/@ai-sdk+openai-compatible@0.2.16_zod@3.25.76/node_modules/@ai-sdk/openai-compatible/src/openai-compatible-chat-language-model.ts","../../../node_modules/.pnpm/@ai-sdk+openai-compatible@0.2.16_zod@3.25.76/node_modules/@ai-sdk/openai-compatible/src/openai-compatible-completion-language-model.ts","../../../node_modules/.pnpm/@ai-sdk+openai-compatible@0.2.16_zod@3.25.76/node_modules/@ai-sdk/openai-compatible/src/openai-compatible-embedding-model.ts","../../../node_modules/.pnpm/@ai-sdk+openai-compatible@0.2.16_zod@3.25.76/node_modules/@ai-sdk/openai-compatible/src/openai-compatible-image-model.ts","../../../node_modules/.pnpm/@ai-sdk+xai@1.2.18_zod@3.25.76/node_modules/@ai-sdk/xai/src/xai-chat-settings.ts","../../../node_modules/.pnpm/@ai-sdk+xai@1.2.18_zod@3.25.76/node_modules/@ai-sdk/xai/src/xai-error.ts","../../../node_modules/.pnpm/@ai-sdk+xai@1.2.18_zod@3.25.76/node_modules/@ai-sdk/xai/src/xai-provider.ts"],"names":["UnsupportedFunctionalityError","_a","toolCall","z","postJsonToApi","combineHeaders","createJsonErrorResponseHandler","createJsonResponseHandler"],"mappings":";;;AAQA,SAAS,kBAAkB,OAAA,EAExB;AAVH,EAAA,IAAA,EAAA,EAAA,EAAA;AAWE,EAAA,OAAA,CAAO,EAAA,GAAA,CAAA,EAAA,GAAA,OAAA,IAAA,IAAA,GAAA,MAAA,GAAA,OAAA,CAAS,gBAAA,KAAT,IAAA,GAAA,MAAA,GAAA,EAAA,CAA2B,gBAAA,KAA3B,IAAA,GAAA,KAA+C,EAAC;AACzD;AAEO,SAAS,sCACd,MAAA,EAC4B;AAC5B,EAAA,MAAM,WAAuC,EAAC;AAC9C,EAAA,KAAA,MAAW,EAAE,IAAA,EAAM,OAAA,EAAS,GAAG,OAAA,MAAa,MAAA,EAAQ;AAClD,IAAA,MAAM,QAAA,GAAW,iBAAA,CAAkB,EAAE,GAAG,SAAS,CAAA;AACjD,IAAA,QAAQ,IAAA;AACN,MAAA,KAAK,QAAA,EAAU;AACb,QAAA,QAAA,CAAS,KAAK,EAAE,IAAA,EAAM,UAAU,OAAA,EAAS,GAAG,UAAU,CAAA;AACtD,QAAA;AACF,MAAA;AAEA,MAAA,KAAK,MAAA,EAAQ;AACX,QAAA,IAAI,QAAQ,MAAA,KAAW,CAAA,IAAK,QAAQ,CAAC,CAAA,CAAE,SAAS,MAAA,EAAQ;AACtD,UAAA,QAAA,CAAS,IAAA,CAAK;YACZ,IAAA,EAAM,MAAA;YACN,OAAA,EAAS,OAAA,CAAQ,CAAC,CAAA,CAAE,IAAA;YACpB,GAAG,iBAAA,CAAkB,OAAA,CAAQ,CAAC,CAAC;WAChC,CAAA;AACD,UAAA;AACF,QAAA;AAEA,QAAA,QAAA,CAAS,IAAA,CAAK;UACZ,IAAA,EAAM,MAAA;UACN,OAAA,EAAS,OAAA,CAAQ,GAAA,CAAI,CAAA,IAAA,KAAQ;AAtCvC,YAAA,IAAA,EAAA;AAuCY,YAAA,MAAM,YAAA,GAAe,kBAAkB,IAAI,CAAA;AAC3C,YAAA,QAAQ,KAAK,IAAA;AACX,cAAA,KAAK,MAAA,EAAQ;AACX,gBAAA,OAAO,EAAE,IAAA,EAAM,MAAA,EAAQ,MAAM,IAAA,CAAK,IAAA,EAAM,GAAG,YAAA,EAAa;AAC1D,cAAA;AACA,cAAA,KAAK,OAAA,EAAS;AACZ,gBAAA,OAAO;kBACL,IAAA,EAAM,WAAA;kBACN,SAAA,EAAW;AACT,oBAAA,GAAA,EACE,KAAK,KAAA,YAAiB,GAAA,GAClB,KAAK,KAAA,CAAM,QAAA,KACX,CAAA,KAAA,EAAA,CACE,EAAA,GAAA,IAAA,CAAK,QAAA,KAAL,OAAA,EAAA,GAAiB,YACnB,WAAW,yBAAA,CAA0B,IAAA,CAAK,KAAK,CAAC,CAAA;AACxD,mBAAA;kBACA,GAAG;AACL,iBAAA;AACF,cAAA;AACA,cAAA,KAAK,MAAA,EAAQ;AACX,gBAAA,MAAM,IAAI,6BAAA,CAA8B;kBACtC,aAAA,EAAe;iBAChB,CAAA;AACH,cAAA;AACF;UACF,CAAC,CAAA;UACD,GAAG;SACJ,CAAA;AAED,QAAA;AACF,MAAA;AAEA,MAAA,KAAK,WAAA,EAAa;AAChB,QAAA,IAAI,IAAA,GAAO,EAAA;AACX,QAAA,MAAM,YAID,EAAC;AAEN,QAAA,KAAA,MAAW,QAAQ,OAAA,EAAS;AAC1B,UAAA,MAAM,YAAA,GAAe,kBAAkB,IAAI,CAAA;AAC3C,UAAA,QAAQ,KAAK,IAAA;AACX,YAAA,KAAK,MAAA,EAAQ;AACX,cAAA,IAAA,IAAQ,IAAA,CAAK,IAAA;AACb,cAAA;AACF,YAAA;AACA,YAAA,KAAK,WAAA,EAAa;AAChB,cAAA,SAAA,CAAU,IAAA,CAAK;AACb,gBAAA,EAAA,EAAI,IAAA,CAAK,UAAA;gBACT,IAAA,EAAM,UAAA;gBACN,QAAA,EAAU;AACR,kBAAA,IAAA,EAAM,IAAA,CAAK,QAAA;kBACX,SAAA,EAAW,IAAA,CAAK,SAAA,CAAU,IAAA,CAAK,IAAI;AACrC,iBAAA;gBACA,GAAG;eACJ,CAAA;AACD,cAAA;AACF,YAAA;AACF;AACF,QAAA;AAEA,QAAA,QAAA,CAAS,IAAA,CAAK;UACZ,IAAA,EAAM,WAAA;UACN,OAAA,EAAS,IAAA;UACT,UAAA,EAAY,SAAA,CAAU,MAAA,GAAS,CAAA,GAAI,SAAA,GAAY,MAAA;UAC/C,GAAG;SACJ,CAAA;AAED,QAAA;AACF,MAAA;AAEA,MAAA,KAAK,MAAA,EAAQ;AACX,QAAA,KAAA,MAAW,gBAAgB,OAAA,EAAS;AAClC,UAAA,MAAM,oBAAA,GAAuB,kBAAkB,YAAY,CAAA;AAC3D,UAAA,QAAA,CAAS,IAAA,CAAK;YACZ,IAAA,EAAM,MAAA;AACN,YAAA,YAAA,EAAc,YAAA,CAAa,UAAA;YAC3B,OAAA,EAAS,IAAA,CAAK,SAAA,CAAU,YAAA,CAAa,MAAM,CAAA;YAC3C,GAAG;WACJ,CAAA;AACH,QAAA;AACA,QAAA;AACF,MAAA;MAEA,SAAS;AACP,QAAA,MAAM,gBAAA,GAA0B,IAAA;AAChC,QAAA,MAAM,IAAI,KAAA,CAAM,CAAA,kBAAA,EAAqB,gBAAgB,CAAA,CAAE,CAAA;AACzD,MAAA;AACF;AACF,EAAA;AAEA,EAAA,OAAO,QAAA;AACT;ACpIO,SAAS,mBAAA,CAAoB;AAClC,EAAA,EAAA;AACA,EAAA,KAAA;AACA,EAAA;AACF,CAAA,EAIG;AACD,EAAA,OAAO;IACL,EAAA,EAAI,EAAA,IAAA,OAAA,EAAA,GAAM,MAAA;IACV,OAAA,EAAS,KAAA,IAAA,OAAA,KAAA,GAAS,MAAA;AAClB,IAAA,SAAA,EAAW,WAAW,IAAA,GAAO,IAAI,IAAA,CAAK,OAAA,GAAU,GAAI,CAAA,GAAI;AAC1D,GAAA;AACF;ACZO,SAAS,gCACd,YAAA,EAC6B;AAC7B,EAAA,QAAQ,YAAA;IACN,KAAK,MAAA;AACH,MAAA,OAAO,MAAA;IACT,KAAK,QAAA;AACH,MAAA,OAAO,QAAA;IACT,KAAK,gBAAA;AACH,MAAA,OAAO,gBAAA;IACT,KAAK,eAAA;IACL,KAAK,YAAA;AACH,MAAA,OAAO,YAAA;AACT,IAAA;AACE,MAAA,OAAO,SAAA;AACX;AACF;AChBO,IAAM,+BAAA,GAAkC,EAAE,MAAA,CAAO;AACtD,EAAA,KAAA,EAAO,EAAE,MAAA,CAAO;AACd,IAAA,OAAA,EAAS,EAAE,MAAA,EAAO;;;;IAKlB,IAAA,EAAM,CAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;IACzB,KAAA,EAAO,CAAA,CAAE,GAAA,EAAI,CAAE,OAAA,EAAQ;IACvB,IAAA,EAAM,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,CAAE,MAAA,EAAO,EAAG,CAAA,CAAE,MAAA,EAAQ,CAAC,CAAA,CAAE,OAAA;GACzC;AACH,CAAC,CAAA;AAYM,IAAM,qCAAA,GACX;EACE,WAAA,EAAa,+BAAA;EACb,cAAA,EAAgB,CAAA,IAAA,KAAQ,IAAA,CAAK,KAAA,CAAM;AACrC,CAAA;ACvBK,SAAS,YAAA,CAAa;AAC3B,EAAA,IAAA;AACA,EAAA;AACF,CAAA,EAuBE;AAhCF,EAAA,IAAA,EAAA;AAkCE,EAAA,MAAM,KAAA,GAAA,CAAA,CAAQ,KAAA,IAAA,CAAK,KAAA,KAAL,OAAA,MAAA,GAAA,EAAA,CAAY,MAAA,IAAS,IAAA,CAAK,KAAA,GAAQ,MAAA;AAChD,EAAA,MAAM,eAA6C,EAAC;AAEpD,EAAA,IAAI,SAAS,IAAA,EAAM;AACjB,IAAA,OAAO,EAAE,KAAA,EAAO,MAAA,EAAW,WAAA,EAAa,QAAW,YAAA,EAAa;AAClE,EAAA;AAEA,EAAA,MAAM,aAAa,IAAA,CAAK,UAAA;AAExB,EAAA,MAAM,oBAOD,EAAC;AAEN,EAAA,KAAA,MAAW,QAAQ,KAAA,EAAO;AACxB,IAAA,IAAI,IAAA,CAAK,SAAS,kBAAA,EAAoB;AACpC,MAAA,YAAA,CAAa,IAAA,CAAK,EAAE,IAAA,EAAM,kBAAA,EAAoB,MAAM,CAAA;IACtD,CAAA,MAAO;AACL,MAAA,iBAAA,CAAkB,IAAA,CAAK;QACrB,IAAA,EAAM,UAAA;QACN,QAAA,EAAU;AACR,UAAA,IAAA,EAAM,IAAA,CAAK,IAAA;AACX,UAAA,WAAA,EAAa,IAAA,CAAK,WAAA;AAClB,UAAA,UAAA,EAAY,IAAA,CAAK;AACnB;OACD,CAAA;AACH,IAAA;AACF,EAAA;AAEA,EAAA,IAAI,cAAc,IAAA,EAAM;AACtB,IAAA,OAAO,EAAE,KAAA,EAAO,iBAAA,EAAmB,WAAA,EAAa,QAAW,YAAA,EAAa;AAC1E,EAAA;AAEA,EAAA,MAAM,OAAO,UAAA,CAAW,IAAA;AAExB,EAAA,QAAQ,IAAA;IACN,KAAK,MAAA;IACL,KAAK,MAAA;IACL,KAAK,UAAA;AACH,MAAA,OAAO,EAAE,KAAA,EAAO,iBAAA,EAAmB,WAAA,EAAa,MAAM,YAAA,EAAa;IACrE,KAAK,MAAA;AACH,MAAA,OAAO;QACL,KAAA,EAAO,iBAAA;QACP,WAAA,EAAa;UACX,IAAA,EAAM,UAAA;UACN,QAAA,EAAU;AACR,YAAA,IAAA,EAAM,UAAA,CAAW;AACnB;AACF,SAAA;AACA,QAAA;AACF,OAAA;IACF,SAAS;AACP,MAAA,MAAM,gBAAA,GAA0B,IAAA;AAChC,MAAA,MAAM,IAAIA,6BAAAA,CAA8B;AACtC,QAAA,aAAA,EAAe,iCAAiC,gBAAgB,CAAA;OACjE,CAAA;AACH,IAAA;AACF;AACF;ACrCO,IAAM,oCAAN,MAAmE;;EAYxE,WAAA,CACE,OAAA,EACA,UACA,MAAA,EACA;AAfF,IAAA,IAAA,CAAS,oBAAA,GAAuB,IAAA;AA5DlC,IAAA,IAAA,EAAA,EAAA,EAAA;AA4EI,IAAA,IAAA,CAAK,OAAA,GAAU,OAAA;AACf,IAAA,IAAA,CAAK,QAAA,GAAW,QAAA;AAChB,IAAA,IAAA,CAAK,MAAA,GAAS,MAAA;AAGd,IAAA,MAAM,cAAA,GAAA,CACJ,EAAA,GAAA,MAAA,CAAO,cAAA,KAAP,OAAA,EAAA,GAAyB,qCAAA;AAC3B,IAAA,IAAA,CAAK,WAAA,GAAc,qCAAA;MACjB,cAAA,CAAe;AACjB,KAAA;AACA,IAAA,IAAA,CAAK,qBAAA,GAAwB,+BAA+B,cAAc,CAAA;AAE1E,IAAA,IAAA,CAAK,yBAAA,GAAA,CAA4B,EAAA,GAAA,MAAA,CAAO,yBAAA,KAAP,OAAA,EAAA,GAAoC,KAAA;AACvE,EAAA;AAEA,EAAA,IAAI,2BAAA,GAA2D;AAC7D,IAAA,OAAO,KAAK,MAAA,CAAO,2BAAA;AACrB,EAAA;AAEA,EAAA,IAAI,QAAA,GAAmB;AACrB,IAAA,OAAO,KAAK,MAAA,CAAO,QAAA;AACrB,EAAA;AAEA,EAAA,IAAY,mBAAA,GAA8B;AACxC,IAAA,OAAO,IAAA,CAAK,OAAO,QAAA,CAAS,KAAA,CAAM,GAAG,CAAA,CAAE,CAAC,EAAE,IAAA,EAAK;AACjD,EAAA;EAEQ,OAAA,CAAQ;AACd,IAAA,IAAA;AACA,IAAA,MAAA;AACA,IAAA,SAAA;AACA,IAAA,WAAA;AACA,IAAA,IAAA;AACA,IAAA,IAAA;AACA,IAAA,gBAAA;AACA,IAAA,eAAA;AACA,IAAA,gBAAA;AACA,IAAA,aAAA;AACA,IAAA,cAAA;AACA,IAAA;GACF,EAAiD;AApHnD,IAAA,IAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA;AAqHI,IAAA,MAAM,OAAO,IAAA,CAAK,IAAA;AAElB,IAAA,MAAM,WAAyC,EAAC;AAEhD,IAAA,IAAI,QAAQ,IAAA,EAAM;AAChB,MAAA,QAAA,CAAS,IAAA,CAAK;QACZ,IAAA,EAAM,qBAAA;QACN,OAAA,EAAS;OACV,CAAA;AACH,IAAA;AAEA,IAAA,IAAA,CACE,cAAA,IAAA,IAAA,GAAA,MAAA,GAAA,cAAA,CAAgB,IAAA,MAAS,MAAA,IACzB,cAAA,CAAe,MAAA,IAAU,IAAA,IACzB,CAAC,IAAA,CAAK,yBAAA,EACN;AACA,MAAA,QAAA,CAAS,IAAA,CAAK;QACZ,IAAA,EAAM,qBAAA;QACN,OAAA,EAAS,gBAAA;QACT,OAAA,EACE;OACH,CAAA;AACH,IAAA;AAEA,IAAA,MAAM,QAAA,GAAW;;AAEf,MAAA,KAAA,EAAO,IAAA,CAAK,OAAA;;AAGZ,MAAA,IAAA,EAAM,KAAK,QAAA,CAAS,IAAA;;MAGpB,UAAA,EAAY,SAAA;AACZ,MAAA,WAAA;MACA,KAAA,EAAO,IAAA;MACP,iBAAA,EAAmB,gBAAA;MACnB,gBAAA,EAAkB,eAAA;MAClB,eAAA,EAAA,CACE,cAAA,IAAA,IAAA,GAAA,MAAA,GAAA,cAAA,CAAgB,IAAA,MAAS,MAAA,GACrB,IAAA,CAAK,yBAAA,KAA8B,IAAA,IACnC,cAAA,CAAe,MAAA,IAAU,IAAA,GACvB;QACE,IAAA,EAAM,aAAA;QACN,WAAA,EAAa;AACX,UAAA,MAAA,EAAQ,cAAA,CAAe,MAAA;AACvB,UAAA,IAAA,EAAA,CAAM,EAAA,GAAA,cAAA,CAAe,IAAA,KAAf,IAAA,GAAA,EAAA,GAAuB,UAAA;AAC7B,UAAA,WAAA,EAAa,cAAA,CAAe;AAC9B;OACF,GACA,EAAE,IAAA,EAAM,aAAA,EAAc,GACxB,MAAA;MAEN,IAAA,EAAM,aAAA;AACN,MAAA,IAAA;AACA,MAAA,GAAG,gBAAA,IAAA,IAAA,GAAA,MAAA,GAAA,gBAAA,CAAmB,KAAK,mBAAA,CAAA;MAE3B,gBAAA,EAAA,CACE,EAAA,GAAA,CAAA,EAAA,GAAA,gBAAA,IAAA,IAAA,GAAA,MAAA,GAAA,iBAAmB,IAAA,CAAK,mBAAA,CAAA,KAAxB,IAAA,GAAA,MAAA,GAAA,EAAA,CAA8C,oBAA9C,IAAA,GAAA,EAAA,GAAA,CACA,EAAA,GAAA,gBAAA,IAAA,IAAA,GAAA,MAAA,GAAA,iBAAmB,mBAAA,CAAA,KAAnB,IAAA,GAAA,MAAA,GAAA,EAAA,CAAyC,eAAA;;AAG3C,MAAA,QAAA,EAAU,sCAAsC,MAAM;AACxD,KAAA;AAEA,IAAA,QAAQ,IAAA;AACN,MAAA,KAAK,SAAA,EAAW;AACd,QAAA,MAAM,EAAE,KAAA,EAAO,WAAA,EAAa,YAAA,KAAiB,YAAA,CAAa;AACxD,UAAA,IAAA;AACA,UAAA,iBAAA,EAAmB,IAAA,CAAK;SACzB,CAAA;AAED,QAAA,OAAO;AACL,UAAA,IAAA,EAAM,EAAE,GAAG,QAAA,EAAU,KAAA,EAAO,WAAA,EAAY;AACxC,UAAA,QAAA,EAAU,CAAC,GAAG,QAAA,EAAU,GAAG,YAAY;AACzC,SAAA;AACF,MAAA;AAEA,MAAA,KAAK,aAAA,EAAe;AAClB,QAAA,OAAO;UACL,IAAA,EAAM;YACJ,GAAG,QAAA;AACH,YAAA,eAAA,EACE,IAAA,CAAK,yBAAA,KAA8B,IAAA,IAAQ,IAAA,CAAK,UAAU,IAAA,GACtD;cACE,IAAA,EAAM,aAAA;cACN,WAAA,EAAa;AACX,gBAAA,MAAA,EAAQ,IAAA,CAAK,MAAA;AACb,gBAAA,IAAA,EAAA,CAAM,EAAA,GAAA,IAAA,CAAK,IAAA,KAAL,IAAA,GAAA,EAAA,GAAa,UAAA;AACnB,gBAAA,WAAA,EAAa,IAAA,CAAK;AACpB;aACF,GACA,EAAE,MAAM,aAAA;AAChB,WAAA;AACA,UAAA;AACF,SAAA;AACF,MAAA;AAEA,MAAA,KAAK,aAAA,EAAe;AAClB,QAAA,OAAO;UACL,IAAA,EAAM;YACJ,GAAG,QAAA;YACH,WAAA,EAAa;cACX,IAAA,EAAM,UAAA;AACN,cAAA,QAAA,EAAU,EAAE,IAAA,EAAM,IAAA,CAAK,IAAA,CAAK,IAAA;AAC9B,aAAA;YACA,KAAA,EAAO;AACL,cAAA;gBACE,IAAA,EAAM,UAAA;gBACN,QAAA,EAAU;AACR,kBAAA,IAAA,EAAM,KAAK,IAAA,CAAK,IAAA;AAChB,kBAAA,WAAA,EAAa,KAAK,IAAA,CAAK,WAAA;AACvB,kBAAA,UAAA,EAAY,KAAK,IAAA,CAAK;AACxB;AACF;AACF;AACF,WAAA;AACA,UAAA;AACF,SAAA;AACF,MAAA;MAEA,SAAS;AACP,QAAA,MAAM,gBAAA,GAA0B,IAAA;AAChC,QAAA,MAAM,IAAI,KAAA,CAAM,CAAA,kBAAA,EAAqB,gBAAgB,CAAA,CAAE,CAAA;AACzD,MAAA;AACF;AACF,EAAA;AAEA,EAAA,MAAM,WACJ,OAAA,EAC6D;AAtPjE,IAAA,IAAA,EAAA,EAAA,IAAA,EAAA,EAAA,EAAA,EAAA,IAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA;AAuPI,IAAA,MAAM,EAAE,MAAM,QAAA,EAAS,GAAI,KAAK,OAAA,CAAQ,EAAE,GAAG,OAAA,EAAS,CAAA;AAEtD,IAAA,MAAM,IAAA,GAAO,IAAA,CAAK,SAAA,CAAU,IAAI,CAAA;AAEhC,IAAA,MAAM;AACJ,MAAA,eAAA;MACA,KAAA,EAAO,YAAA;MACP,QAAA,EAAU;AACZ,KAAA,GAAI,MAAM,aAAA,CAAc;MACtB,GAAA,EAAK,IAAA,CAAK,OAAO,GAAA,CAAI;QACnB,IAAA,EAAM,mBAAA;AACN,QAAA,OAAA,EAAS,IAAA,CAAK;OACf,CAAA;AACD,MAAA,OAAA,EAAS,eAAe,IAAA,CAAK,MAAA,CAAO,OAAA,EAAQ,EAAG,QAAQ,OAAO,CAAA;MAC9D,IAAA,EAAM,IAAA;AACN,MAAA,qBAAA,EAAuB,IAAA,CAAK,qBAAA;MAC5B,yBAAA,EAA2B,yBAAA;AACzB,QAAA;AACF,OAAA;AACA,MAAA,WAAA,EAAa,OAAA,CAAQ,WAAA;AACrB,MAAA,KAAA,EAAO,KAAK,MAAA,CAAO;KACpB,CAAA;AAED,IAAA,MAAM,EAAE,QAAA,EAAU,SAAA,EAAW,GAAG,aAAY,GAAI,IAAA;AAChD,IAAA,MAAM,MAAA,GAAS,YAAA,CAAa,OAAA,CAAQ,CAAC,CAAA;AAGrC,IAAA,MAAM,gBAAA,GAAoD;MACxD,CAAC,IAAA,CAAK,mBAAmB,GAAG,EAAC;AAC7B,MAAA,GAAA,CAAG,EAAA,GAAA,CAAA,EAAA,GAAA,IAAA,CAAK,MAAA,CAAO,iBAAA,KAAZ,IAAA,GAAA,MAAA,GAAA,EAAA,CAA+B,eAAA,KAA/B,IAAA,GAAA,MAAA,GAAA,EAAA,CAAA,KAAA,EAAA,EAAiD;QAClD,UAAA,EAAY;OACd;AACF,KAAA;AACA,IAAA,MAAM,0BACJ,EAAA,GAAA,YAAA,CAAa,KAAA,KAAb,IAAA,GAAA,SAAA,EAAA,CAAoB,yBAAA;AACtB,IAAA,MAAM,sBAAqB,EAAA,GAAA,YAAA,CAAa,KAAA,KAAb,IAAA,GAAA,SAAA,EAAA,CAAoB,qBAAA;AAC/C,IAAA,IAAA,CAAI,sBAAA,IAAA,IAAA,GAAA,MAAA,GAAA,sBAAA,CAAwB,qBAAoB,IAAA,EAAM;AACpD,MAAA,gBAAA,CAAiB,KAAK,mBAAmB,CAAA,CAAE,kBACzC,sBAAA,IAAA,IAAA,GAAA,SAAA,sBAAA,CAAwB,gBAAA;AAC5B,IAAA;AACA,IAAA,IAAA,CAAI,sBAAA,IAAA,IAAA,GAAA,MAAA,GAAA,sBAAA,CAAwB,+BAA8B,IAAA,EAAM;AAC9D,MAAA,gBAAA,CAAiB,KAAK,mBAAmB,CAAA,CAAE,2BACzC,sBAAA,IAAA,IAAA,GAAA,SAAA,sBAAA,CAAwB,0BAAA;AAC5B,IAAA;AACA,IAAA,IAAA,CAAI,sBAAA,IAAA,IAAA,GAAA,MAAA,GAAA,sBAAA,CAAwB,+BAA8B,IAAA,EAAM;AAC9D,MAAA,gBAAA,CAAiB,KAAK,mBAAmB,CAAA,CAAE,2BACzC,sBAAA,IAAA,IAAA,GAAA,SAAA,sBAAA,CAAwB,0BAAA;AAC5B,IAAA;AACA,IAAA,IAAA,CAAI,kBAAA,IAAA,IAAA,GAAA,MAAA,GAAA,kBAAA,CAAoB,kBAAiB,IAAA,EAAM;AAC7C,MAAA,gBAAA,CAAiB,KAAK,mBAAmB,CAAA,CAAE,qBACzC,kBAAA,IAAA,IAAA,GAAA,SAAA,kBAAA,CAAoB,aAAA;AACxB,IAAA;AAEA,IAAA,OAAO;AACL,MAAA,IAAA,EAAA,CAAM,EAAA,GAAA,MAAA,CAAO,OAAA,CAAQ,OAAA,KAAf,OAAA,EAAA,GAA0B,MAAA;AAChC,MAAA,SAAA,EAAA,CAAW,EAAA,GAAA,MAAA,CAAO,OAAA,CAAQ,iBAAA,KAAf,OAAA,EAAA,GAAoC,MAAA;MAC/C,SAAA,EAAA,CAAW,EAAA,GAAA,OAAO,OAAA,CAAQ,UAAA,KAAf,OAAA,MAAA,GAAA,EAAA,CAA2B,GAAA,CAAI,CAAA,QAAA,KAAS;AA/SzD,QAAA,IAAAC,GAAAA;AA+S6D,QAAA,OAAA;UACrD,YAAA,EAAc,UAAA;AACd,UAAA,UAAA,EAAA,CAAYA,GAAAA,GAAA,QAAA,CAAS,EAAA,KAAT,IAAA,GAAAA,MAAe,UAAA,EAAW;AACtC,UAAA,QAAA,EAAU,SAAS,QAAA,CAAS,IAAA;AAC5B,UAAA,IAAA,EAAM,SAAS,QAAA,CAAS;AAC1B,SAAA;MAAA,CAAA,CAAA;MACA,YAAA,EAAc,+BAAA,CAAgC,OAAO,aAAa,CAAA;MAClE,KAAA,EAAO;QACL,YAAA,EAAA,CAAc,EAAA,GAAA,CAAA,KAAA,YAAA,CAAa,KAAA,KAAb,OAAA,MAAA,GAAA,EAAA,CAAoB,aAAA,KAApB,IAAA,GAAA,EAAA,GAAqC,GAAA;QACnD,gBAAA,EAAA,CAAkB,EAAA,GAAA,CAAA,KAAA,YAAA,CAAa,KAAA,KAAb,OAAA,MAAA,GAAA,EAAA,CAAoB,iBAAA,KAApB,IAAA,GAAA,EAAA,GAAyC;AAC7D,OAAA;AACA,MAAA,gBAAA;MACA,OAAA,EAAS,EAAE,WAAW,WAAA,EAAY;AAClC,MAAA,WAAA,EAAa,EAAE,OAAA,EAAS,eAAA,EAAiB,IAAA,EAAM,WAAA,EAAY;AAC3D,MAAA,QAAA,EAAU,oBAAoB,YAAY,CAAA;AAC1C,MAAA,QAAA;AACA,MAAA,OAAA,EAAS,EAAE,IAAA;AACb,KAAA;AACF,EAAA;AAEA,EAAA,MAAM,SACJ,OAAA,EAC2D;AArU/D,IAAA,IAAA,EAAA;AAsUI,IAAA,IAAI,IAAA,CAAK,SAAS,iBAAA,EAAmB;AACnC,MAAA,MAAM,MAAA,GAAS,MAAM,IAAA,CAAK,UAAA,CAAW,OAAO,CAAA;AAC5C,MAAA,MAAM,eAAA,GAAkB,IAAI,cAAA,CAA0C;AACpE,QAAA,KAAA,CAAM,UAAA,EAAY;AAChB,UAAA,UAAA,CAAW,QAAQ,EAAE,IAAA,EAAM,qBAAqB,GAAG,MAAA,CAAO,UAAU,CAAA;AACpE,UAAA,IAAI,OAAO,SAAA,EAAW;AACpB,YAAA,IAAI,KAAA,CAAM,OAAA,CAAQ,MAAA,CAAO,SAAS,CAAA,EAAG;AACnC,cAAA,KAAA,MAAW,IAAA,IAAQ,OAAO,SAAA,EAAW;AACnC,gBAAA,IAAI,IAAA,CAAK,SAAS,MAAA,EAAQ;AACxB,kBAAA,UAAA,CAAW,OAAA,CAAQ;oBACjB,IAAA,EAAM,WAAA;AACN,oBAAA,SAAA,EAAW,IAAA,CAAK;mBACjB,CAAA;AACH,gBAAA;AACF,cAAA;YACF,CAAA,MAAO;AACL,cAAA,UAAA,CAAW,OAAA,CAAQ;gBACjB,IAAA,EAAM,WAAA;AACN,gBAAA,SAAA,EAAW,MAAA,CAAO;eACnB,CAAA;AACH,YAAA;AACF,UAAA;AACA,UAAA,IAAI,OAAO,IAAA,EAAM;AACf,YAAA,UAAA,CAAW,OAAA,CAAQ;cACjB,IAAA,EAAM,YAAA;AACN,cAAA,SAAA,EAAW,MAAA,CAAO;aACnB,CAAA;AACH,UAAA;AACA,UAAA,IAAI,OAAO,SAAA,EAAW;AACpB,YAAA,KAAA,MAAW,QAAA,IAAY,OAAO,SAAA,EAAW;AACvC,cAAA,UAAA,CAAW,OAAA,CAAQ;gBACjB,IAAA,EAAM,WAAA;gBACN,GAAG;eACJ,CAAA;AACH,YAAA;AACF,UAAA;AACA,UAAA,UAAA,CAAW,OAAA,CAAQ;YACjB,IAAA,EAAM,QAAA;AACN,YAAA,YAAA,EAAc,MAAA,CAAO,YAAA;AACrB,YAAA,KAAA,EAAO,MAAA,CAAO,KAAA;AACd,YAAA,QAAA,EAAU,MAAA,CAAO,QAAA;AACjB,YAAA,gBAAA,EAAkB,MAAA,CAAO;WAC1B,CAAA;AACD,UAAA,UAAA,CAAW,KAAA,EAAM;AACnB,QAAA;OACD,CAAA;AACD,MAAA,OAAO;QACL,MAAA,EAAQ,eAAA;AACR,QAAA,OAAA,EAAS,MAAA,CAAO,OAAA;AAChB,QAAA,WAAA,EAAa,MAAA,CAAO,WAAA;AACpB,QAAA,QAAA,EAAU,MAAA,CAAO;AACnB,OAAA;AACF,IAAA;AAEA,IAAA,MAAM,EAAE,MAAM,QAAA,EAAS,GAAI,KAAK,OAAA,CAAQ,EAAE,GAAG,OAAA,EAAS,CAAA;AAEtD,IAAA,MAAM,IAAA,GAAO;MACX,GAAG,IAAA;MACH,MAAA,EAAQ,IAAA;;AAGR,MAAA,cAAA,EAAgB,KAAK,MAAA,CAAO,YAAA,GACxB,EAAE,aAAA,EAAe,MAAK,GACtB;AACN,KAAA;AAEA,IAAA,MAAM,iBAAA,GAAA,CACJ,KAAA,IAAA,CAAK,MAAA,CAAO,sBAAZ,IAAA,GAAA,MAAA,GAAA,GAA+B,qBAAA,EAAA;AAEjC,IAAA,MAAM,EAAE,eAAA,EAAiB,KAAA,EAAO,QAAA,EAAS,GAAI,MAAM,aAAA,CAAc;MAC/D,GAAA,EAAK,IAAA,CAAK,OAAO,GAAA,CAAI;QACnB,IAAA,EAAM,mBAAA;AACN,QAAA,OAAA,EAAS,IAAA,CAAK;OACf,CAAA;AACD,MAAA,OAAA,EAAS,eAAe,IAAA,CAAK,MAAA,CAAO,OAAA,EAAQ,EAAG,QAAQ,OAAO,CAAA;AAC9D,MAAA,IAAA;AACA,MAAA,qBAAA,EAAuB,IAAA,CAAK,qBAAA;MAC5B,yBAAA,EAA2B,gCAAA;QACzB,IAAA,CAAK;AACP,OAAA;AACA,MAAA,WAAA,EAAa,OAAA,CAAQ,WAAA;AACrB,MAAA,KAAA,EAAO,KAAK,MAAA,CAAO;KACpB,CAAA;AAED,IAAA,MAAM,EAAE,QAAA,EAAU,SAAA,EAAW,GAAG,aAAY,GAAI,IAAA;AAEhD,IAAA,MAAM,YAQD,EAAC;AAEN,IAAA,IAAI,YAAA,GAA4C,SAAA;AAChD,IAAA,IAAI,KAAA,GAWA;MACF,gBAAA,EAAkB,MAAA;MAClB,uBAAA,EAAyB;QACvB,eAAA,EAAiB,MAAA;QACjB,wBAAA,EAA0B,MAAA;QAC1B,wBAAA,EAA0B;AAC5B,OAAA;MACA,YAAA,EAAc,MAAA;MACd,mBAAA,EAAqB;QACnB,YAAA,EAAc;AAChB;AACF,KAAA;AACA,IAAA,IAAI,YAAA,GAAe,IAAA;AACnB,IAAA,IAAI,sBAAsB,IAAA,CAAK,mBAAA;AAE/B,IAAA,OAAO;AACL,MAAA,MAAA,EAAQ,QAAA,CAAS,WAAA;AACf,QAAA,IAAI,eAAA,CAGF;;AAEA,UAAA,SAAA,CAAU,OAAO,UAAA,EAAY;AAxcvC,YAAA,IAAAA,GAAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,IAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA;AA0cY,YAAA,IAAI,CAAC,MAAM,OAAA,EAAS;AAClB,cAAA,YAAA,GAAe,OAAA;AACf,cAAA,UAAA,CAAW,QAAQ,EAAE,IAAA,EAAM,SAAS,KAAA,EAAO,KAAA,CAAM,OAAO,CAAA;AACxD,cAAA;AACF,YAAA;AACA,YAAA,MAAM,QAAQ,KAAA,CAAM,KAAA;AAEpB,YAAA,iBAAA,IAAA,IAAA,GAAA,MAAA,GAAA,iBAAA,CAAmB,YAAA,CAAa,MAAM,QAAA,CAAA;AAGtC,YAAA,IAAI,WAAW,KAAA,EAAO;AACpB,cAAA,YAAA,GAAe,OAAA;AACf,cAAA,UAAA,CAAW,OAAA,CAAQ,EAAE,IAAA,EAAM,OAAA,EAAS,OAAO,KAAA,CAAM,KAAA,CAAM,SAAS,CAAA;AAChE,cAAA;AACF,YAAA;AAEA,YAAA,IAAI,YAAA,EAAc;AAChB,cAAA,YAAA,GAAe,KAAA;AAEf,cAAA,UAAA,CAAW,OAAA,CAAQ;gBACjB,IAAA,EAAM,mBAAA;AACN,gBAAA,GAAG,oBAAoB,KAAK;eAC7B,CAAA;AACH,YAAA;AAEA,YAAA,IAAI,KAAA,CAAM,SAAS,IAAA,EAAM;AACvB,cAAA,MAAM;AACJ,gBAAA,aAAA;AACA,gBAAA,iBAAA;AACA,gBAAA,qBAAA;AACA,gBAAA;AACF,eAAA,GAAI,KAAA,CAAM,KAAA;AAEV,cAAA,KAAA,CAAM,YAAA,GAAe,aAAA,IAAA,IAAA,GAAA,aAAA,GAAiB,MAAA;AACtC,cAAA,KAAA,CAAM,gBAAA,GAAmB,iBAAA,IAAA,IAAA,GAAA,iBAAA,GAAqB,MAAA;AAE9C,cAAA,IAAA,CAAI,yBAAA,IAAA,IAAA,GAAA,MAAA,GAAA,yBAAA,CAA2B,qBAAoB,IAAA,EAAM;AACvD,gBAAA,KAAA,CAAM,uBAAA,CAAwB,eAAA,GAC5B,yBAAA,IAAA,IAAA,GAAA,SAAA,yBAAA,CAA2B,gBAAA;AAC/B,cAAA;AACA,cAAA,IAAA,CACE,yBAAA,IAAA,IAAA,GAAA,MAAA,GAAA,yBAAA,CAA2B,+BAA8B,IAAA,EACzD;AACA,gBAAA,KAAA,CAAM,uBAAA,CAAwB,wBAAA,GAC5B,yBAAA,IAAA,IAAA,GAAA,SAAA,yBAAA,CAA2B,0BAAA;AAC/B,cAAA;AACA,cAAA,IAAA,CACE,yBAAA,IAAA,IAAA,GAAA,MAAA,GAAA,yBAAA,CAA2B,+BAA8B,IAAA,EACzD;AACA,gBAAA,KAAA,CAAM,uBAAA,CAAwB,wBAAA,GAC5B,yBAAA,IAAA,IAAA,GAAA,SAAA,yBAAA,CAA2B,0BAAA;AAC/B,cAAA;AACA,cAAA,IAAA,CAAI,qBAAA,IAAA,IAAA,GAAA,MAAA,GAAA,qBAAA,CAAuB,kBAAiB,IAAA,EAAM;AAChD,gBAAA,KAAA,CAAM,mBAAA,CAAoB,YAAA,GACxB,qBAAA,IAAA,IAAA,GAAA,SAAA,qBAAA,CAAuB,aAAA;AAC3B,cAAA;AACF,YAAA;AAEA,YAAA,MAAM,MAAA,GAAS,KAAA,CAAM,OAAA,CAAQ,CAAC,CAAA;AAE9B,YAAA,IAAA,CAAI,MAAA,IAAA,IAAA,GAAA,MAAA,GAAA,MAAA,CAAQ,kBAAiB,IAAA,EAAM;AACjC,cAAA,YAAA,GAAe,+BAAA;gBACb,MAAA,CAAO;AACT,eAAA;AACF,YAAA;AAEA,YAAA,IAAA,CAAI,MAAA,IAAA,IAAA,GAAA,MAAA,GAAA,MAAA,CAAQ,UAAS,IAAA,EAAM;AACzB,cAAA;AACF,YAAA;AAEA,YAAA,MAAM,QAAQ,MAAA,CAAO,KAAA;AAGrB,YAAA,IAAI,KAAA,CAAM,qBAAqB,IAAA,EAAM;AACnC,cAAA,UAAA,CAAW,OAAA,CAAQ;gBACjB,IAAA,EAAM,WAAA;AACN,gBAAA,SAAA,EAAW,KAAA,CAAM;eAClB,CAAA;AACH,YAAA;AAEA,YAAA,IAAI,KAAA,CAAM,WAAW,IAAA,EAAM;AACzB,cAAA,UAAA,CAAW,OAAA,CAAQ;gBACjB,IAAA,EAAM,YAAA;AACN,gBAAA,SAAA,EAAW,KAAA,CAAM;eAClB,CAAA;AACH,YAAA;AAEA,YAAA,IAAI,KAAA,CAAM,cAAc,IAAA,EAAM;AAC5B,cAAA,KAAA,MAAW,aAAA,IAAiB,MAAM,UAAA,EAAY;AAC5C,gBAAA,MAAM,QAAQ,aAAA,CAAc,KAAA;AAE5B,gBAAA,IAAI,SAAA,CAAU,KAAK,CAAA,IAAK,IAAA,EAAM;AAC5B,kBAAA,IAAI,aAAA,CAAc,SAAS,UAAA,EAAY;AACrC,oBAAA,MAAM,IAAI,wBAAA,CAAyB;sBACjC,IAAA,EAAM,aAAA;sBACN,OAAA,EAAS,CAAA,yBAAA;qBACV,CAAA;AACH,kBAAA;AAEA,kBAAA,IAAI,aAAA,CAAc,MAAM,IAAA,EAAM;AAC5B,oBAAA,MAAM,IAAI,wBAAA,CAAyB;sBACjC,IAAA,EAAM,aAAA;sBACN,OAAA,EAAS,CAAA,6BAAA;qBACV,CAAA;AACH,kBAAA;AAEA,kBAAA,IAAA,CAAA,CAAIA,MAAA,aAAA,CAAc,QAAA,KAAd,OAAA,MAAA,GAAAA,GAAAA,CAAwB,SAAQ,IAAA,EAAM;AACxC,oBAAA,MAAM,IAAI,wBAAA,CAAyB;sBACjC,IAAA,EAAM,aAAA;sBACN,OAAA,EAAS,CAAA,wCAAA;qBACV,CAAA;AACH,kBAAA;AAEA,kBAAA,SAAA,CAAU,KAAK,CAAA,GAAI;AACjB,oBAAA,EAAA,EAAI,aAAA,CAAc,EAAA;oBAClB,IAAA,EAAM,UAAA;oBACN,QAAA,EAAU;AACR,sBAAA,IAAA,EAAM,cAAc,QAAA,CAAS,IAAA;AAC7B,sBAAA,SAAA,EAAA,CAAW,EAAA,GAAA,aAAA,CAAc,QAAA,CAAS,SAAA,KAAvB,OAAA,EAAA,GAAoC;AACjD,qBAAA;oBACA,WAAA,EAAa;AACf,mBAAA;AAEA,kBAAA,MAAMC,SAAAA,GAAW,UAAU,KAAK,CAAA;AAEhC,kBAAA,IAAA,CAAA,CACE,EAAA,GAAAA,SAAAA,CAAS,QAAA,KAAT,IAAA,GAAA,SAAA,EAAA,CAAmB,IAAA,KAAQ,IAAA,IAAA,CAAA,CAC3B,EAAA,GAAAA,UAAS,QAAA,KAAT,IAAA,GAAA,MAAA,GAAA,EAAA,CAAmB,cAAa,IAAA,EAChC;AAEA,oBAAA,IAAIA,SAAAA,CAAS,QAAA,CAAS,SAAA,CAAU,MAAA,GAAS,CAAA,EAAG;AAC1C,sBAAA,UAAA,CAAW,OAAA,CAAQ;wBACjB,IAAA,EAAM,iBAAA;wBACN,YAAA,EAAc,UAAA;AACd,wBAAA,UAAA,EAAYA,SAAAA,CAAS,EAAA;AACrB,wBAAA,QAAA,EAAUA,UAAS,QAAA,CAAS,IAAA;AAC5B,wBAAA,aAAA,EAAeA,UAAS,QAAA,CAAS;uBAClC,CAAA;AACH,oBAAA;AAIA,oBAAA,IAAI,cAAA,CAAeA,SAAAA,CAAS,QAAA,CAAS,SAAS,CAAA,EAAG;AAC/C,sBAAA,UAAA,CAAW,OAAA,CAAQ;wBACjB,IAAA,EAAM,WAAA;wBACN,YAAA,EAAc,UAAA;AACd,wBAAA,UAAA,EAAA,CAAY,EAAA,GAAAA,SAAAA,CAAS,EAAA,KAAT,IAAA,GAAA,KAAe,UAAA,EAAW;AACtC,wBAAA,QAAA,EAAUA,UAAS,QAAA,CAAS,IAAA;AAC5B,wBAAA,IAAA,EAAMA,UAAS,QAAA,CAAS;uBACzB,CAAA;AACDA,sBAAAA,SAAAA,CAAS,WAAA,GAAc,IAAA;AACzB,oBAAA;AACF,kBAAA;AAEA,kBAAA;AACF,gBAAA;AAGA,gBAAA,MAAM,QAAA,GAAW,UAAU,KAAK,CAAA;AAEhC,gBAAA,IAAI,SAAS,WAAA,EAAa;AACxB,kBAAA;AACF,gBAAA;AAEA,gBAAA,IAAA,CAAA,CAAI,KAAA,aAAA,CAAc,QAAA,KAAd,OAAA,MAAA,GAAA,EAAA,CAAwB,cAAa,IAAA,EAAM;AAC7C,kBAAA,QAAA,CAAS,QAAA,CAAU,SAAA,IAAA,CACjB,EAAA,GAAA,CAAA,EAAA,GAAA,aAAA,CAAc,QAAA,KAAd,IAAA,GAAA,MAAA,GAAA,EAAA,CAAwB,SAAA,KAAxB,IAAA,GAAA,EAAA,GAAqC,EAAA;AACzC,gBAAA;AAGA,gBAAA,UAAA,CAAW,OAAA,CAAQ;kBACjB,IAAA,EAAM,iBAAA;kBACN,YAAA,EAAc,UAAA;AACd,kBAAA,UAAA,EAAY,QAAA,CAAS,EAAA;AACrB,kBAAA,QAAA,EAAU,SAAS,QAAA,CAAS,IAAA;AAC5B,kBAAA,aAAA,EAAA,CAAe,EAAA,GAAA,aAAA,CAAc,QAAA,CAAS,SAAA,KAAvB,OAAA,EAAA,GAAoC;iBACpD,CAAA;AAGD,gBAAA,IAAA,CAAA,CACE,EAAA,GAAA,SAAS,QAAA,KAAT,IAAA,GAAA,SAAA,EAAA,CAAmB,IAAA,KAAQ,UAC3B,EAAA,GAAA,QAAA,CAAS,aAAT,IAAA,GAAA,MAAA,GAAA,GAAmB,SAAA,KAAa,IAAA,IAChC,eAAe,QAAA,CAAS,QAAA,CAAS,SAAS,CAAA,EAC1C;AACA,kBAAA,UAAA,CAAW,OAAA,CAAQ;oBACjB,IAAA,EAAM,WAAA;oBACN,YAAA,EAAc,UAAA;AACd,oBAAA,UAAA,EAAA,CAAY,EAAA,GAAA,QAAA,CAAS,EAAA,KAAT,IAAA,GAAA,KAAe,UAAA,EAAW;AACtC,oBAAA,QAAA,EAAU,SAAS,QAAA,CAAS,IAAA;AAC5B,oBAAA,IAAA,EAAM,SAAS,QAAA,CAAS;mBACzB,CAAA;AACD,kBAAA,QAAA,CAAS,WAAA,GAAc,IAAA;AACzB,gBAAA;AACF,cAAA;AACF,YAAA;AACF,UAAA,CAAA;AAEA,UAAA,KAAA,CAAM,UAAA,EAAY;AA/oB5B,YAAA,IAAAD,GAAAA,EAAA,EAAA;AAgpBY,YAAA,MAAM,gBAAA,GAAoD;cACxD,CAAC,mBAAmB,GAAG,EAAC;AACxB,cAAA,GAAG,iBAAA,IAAA,IAAA,GAAA,MAAA,GAAA,iBAAA,CAAmB,aAAA;AACxB,aAAA;AACA,YAAA,IAAI,KAAA,CAAM,uBAAA,CAAwB,eAAA,IAAmB,IAAA,EAAM;AACzD,cAAA,gBAAA,CAAiB,mBAAmB,CAAA,CAAE,eAAA,GACpC,KAAA,CAAM,uBAAA,CAAwB,eAAA;AAClC,YAAA;AACA,YAAA,IACE,KAAA,CAAM,uBAAA,CAAwB,wBAAA,IAA4B,IAAA,EAC1D;AACA,cAAA,gBAAA,CAAiB,mBAAmB,CAAA,CAAE,wBAAA,GACpC,KAAA,CAAM,uBAAA,CAAwB,wBAAA;AAClC,YAAA;AACA,YAAA,IACE,KAAA,CAAM,uBAAA,CAAwB,wBAAA,IAA4B,IAAA,EAC1D;AACA,cAAA,gBAAA,CAAiB,mBAAmB,CAAA,CAAE,wBAAA,GACpC,KAAA,CAAM,uBAAA,CAAwB,wBAAA;AAClC,YAAA;AACA,YAAA,IAAI,KAAA,CAAM,mBAAA,CAAoB,YAAA,IAAgB,IAAA,EAAM;AAClD,cAAA,gBAAA,CAAiB,mBAAmB,CAAA,CAAE,kBAAA,GACpC,KAAA,CAAM,mBAAA,CAAoB,YAAA;AAC9B,YAAA;AAEA,YAAA,UAAA,CAAW,OAAA,CAAQ;cACjB,IAAA,EAAM,QAAA;AACN,cAAA,YAAA;cACA,KAAA,EAAO;AACL,gBAAA,YAAA,EAAA,CAAcA,GAAAA,GAAA,KAAA,CAAM,YAAA,KAAN,IAAA,GAAAA,GAAAA,GAAsB,GAAA;AACpC,gBAAA,gBAAA,EAAA,CAAkB,EAAA,GAAA,KAAA,CAAM,gBAAA,KAAN,IAAA,GAAA,EAAA,GAA0B;AAC9C,eAAA;AACA,cAAA;aACD,CAAA;AACH,UAAA;SACD;AACH,OAAA;MACA,OAAA,EAAS,EAAE,WAAW,WAAA,EAAY;MAClC,WAAA,EAAa,EAAE,SAAS,eAAA,EAAgB;AACxC,MAAA,QAAA;AACA,MAAA,OAAA,EAAS,EAAE,IAAA,EAAM,IAAA,CAAK,SAAA,CAAU,IAAI,CAAA;AACtC,KAAA;AACF,EAAA;AACF,CAAA;AAEA,IAAM,gCAAA,GAAmCE,EACtC,MAAA,CAAO;EACN,aAAA,EAAeA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;EAClC,iBAAA,EAAmBA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;AACtC,EAAA,qBAAA,EAAuBA,EACpB,MAAA,CAAO;IACN,aAAA,EAAeA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA;AAC5B,GAAC,EACA,OAAA,EAAQ;AACX,EAAA,yBAAA,EAA2BA,EACxB,MAAA,CAAO;IACN,gBAAA,EAAkBA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;IACrC,0BAAA,EAA4BA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;IAC/C,0BAAA,EAA4BA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA;AACzC,GAAC,EACA,OAAA;AACL,CAAC,EACA,OAAA,EAAQ;AAIX,IAAM,kCAAA,GAAqCA,EAAE,MAAA,CAAO;EAClD,EAAA,EAAIA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;EACvB,OAAA,EAASA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;EAC5B,KAAA,EAAOA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;AAC1B,EAAA,OAAA,EAASA,CAAAA,CAAE,KAAA;AACTA,IAAAA,CAAAA,CAAE,MAAA,CAAO;AACP,MAAA,OAAA,EAASA,EAAE,MAAA,CAAO;AAChB,QAAA,IAAA,EAAMA,CAAAA,CAAE,OAAA,CAAQ,WAAW,CAAA,CAAE,OAAA,EAAQ;QACrC,OAAA,EAASA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;QAC5B,iBAAA,EAAmBA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;AACtC,QAAA,UAAA,EAAYA,CAAAA,CACT,KAAA;AACCA,UAAAA,CAAAA,CAAE,MAAA,CAAO;YACP,EAAA,EAAIA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;YACvB,IAAA,EAAMA,CAAAA,CAAE,QAAQ,UAAU,CAAA;AAC1B,YAAA,QAAA,EAAUA,EAAE,MAAA,CAAO;AACjB,cAAA,IAAA,EAAMA,EAAE,MAAA,EAAO;AACf,cAAA,SAAA,EAAWA,EAAE,MAAA;aACd;WACF;AACH,SAAA,CACC,OAAA;OACJ,CAAA;MACD,aAAA,EAAeA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA;KAC3B;AACH,GAAA;EACA,KAAA,EAAO;AACT,CAAC,CAAA;AAID,IAAM,qCAAA,GAAwC,CAC5C,WAAA,KAEAA,CAAAA,CAAE,KAAA,CAAM;AACNA,EAAAA,CAAAA,CAAE,MAAA,CAAO;IACP,EAAA,EAAIA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;IACvB,OAAA,EAASA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;IAC5B,KAAA,EAAOA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;AAC1B,IAAA,OAAA,EAASA,CAAAA,CAAE,KAAA;AACTA,MAAAA,CAAAA,CAAE,MAAA,CAAO;AACP,QAAA,KAAA,EAAOA,EACJ,MAAA,CAAO;AACN,UAAA,IAAA,EAAMA,EAAE,IAAA,CAAK,CAAC,WAAW,CAAC,EAAE,OAAA,EAAQ;UACpC,OAAA,EAASA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;UAC5B,iBAAA,EAAmBA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;AACtC,UAAA,UAAA,EAAYA,CAAAA,CACT,KAAA;AACCA,YAAAA,CAAAA,CAAE,MAAA,CAAO;cACP,KAAA,EAAOA,CAAAA,CAAE,MAAA,EAAO,CAAE,QAAA,EAAS;cAC3B,EAAA,EAAIA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;AACvB,cAAA,IAAA,EAAMA,CAAAA,CAAE,OAAA,CAAQ,UAAU,CAAA,CAAE,OAAA,EAAQ;AACpC,cAAA,QAAA,EAAUA,EAAE,MAAA,CAAO;gBACjB,IAAA,EAAMA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;gBACzB,SAAA,EAAWA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA;eACvB;aACF;AACH,WAAA,CACC,OAAA;AACL,SAAC,EACA,OAAA,EAAQ;QACX,aAAA,EAAeA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA;OAC3B;AACH,KAAA;IACA,KAAA,EAAO;GACR,CAAA;AACD,EAAA;AACF,CAAC,CAAA;ACtc8CA,EAAE,MAAA,CAAO;EACxD,EAAA,EAAIA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;EACvB,OAAA,EAASA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;EAC5B,KAAA,EAAOA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;AAC1B,EAAA,OAAA,EAASA,CAAAA,CAAE,KAAA;AACTA,IAAAA,CAAAA,CAAE,MAAA,CAAO;AACP,MAAA,IAAA,EAAMA,EAAE,MAAA,EAAO;AACf,MAAA,aAAA,EAAeA,EAAE,MAAA;KAClB;AACH,GAAA;AACA,EAAA,KAAA,EAAOA,EACJ,MAAA,CAAO;AACN,IAAA,aAAA,EAAeA,EAAE,MAAA,EAAO;AACxB,IAAA,iBAAA,EAAmBA,EAAE,MAAA;AACvB,GAAC,EACA,OAAA;AACL,CAAC;ACtOyCA,EAAE,MAAA,CAAO;AACjD,EAAA,IAAA,EAAMA,CAAAA,CAAE,KAAA,CAAMA,CAAAA,CAAE,MAAA,CAAO,EAAE,SAAA,EAAWA,CAAAA,CAAE,KAAA,CAAMA,CAAAA,CAAE,MAAA,EAAQ,CAAA,EAAG,CAAC,CAAA;EAC1D,KAAA,EAAOA,CAAAA,CAAE,OAAO,EAAE,aAAA,EAAeA,EAAE,MAAA,EAAO,EAAG,CAAA,CAAE,OAAA;AACjD,CAAC;ACjGM,IAAM,6BAAN,MAAyD;EAW9D,WAAA,CACW,OAAA,EACQ,UACA,MAAA,EACjB;AAHS,IAAA,IAAA,CAAA,OAAA,GAAA,OAAA;AACQ,IAAA,IAAA,CAAA,QAAA,GAAA,QAAA;AACA,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AAbnB,IAAA,IAAA,CAAS,oBAAA,GAAuB,IAAA;AAc7B,EAAA;AAZH,EAAA,IAAI,gBAAA,GAA2B;AA9BjC,IAAA,IAAA,EAAA;AA+BI,IAAA,OAAA,CAAO,EAAA,GAAA,IAAA,CAAK,QAAA,CAAS,gBAAA,KAAd,OAAA,EAAA,GAAkC,EAAA;AAC3C,EAAA;AAEA,EAAA,IAAI,QAAA,GAAmB;AACrB,IAAA,OAAO,KAAK,MAAA,CAAO,QAAA;AACrB,EAAA;AAQA,EAAA,MAAM,UAAA,CAAW;AACf,IAAA,MAAA;AACA,IAAA,CAAA;AACA,IAAA,IAAA;AACA,IAAA,WAAA;AACA,IAAA,IAAA;AACA,IAAA,eAAA;AACA,IAAA,OAAA;AACA,IAAA;GACF,EAEE;AAvDJ,IAAA,IAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA;AAwDI,IAAA,MAAM,WAA2C,EAAC;AAElD,IAAA,IAAI,eAAe,IAAA,EAAM;AACvB,MAAA,QAAA,CAAS,IAAA,CAAK;QACZ,IAAA,EAAM,qBAAA;QACN,OAAA,EAAS,aAAA;QACT,OAAA,EACE;OACH,CAAA;AACH,IAAA;AAEA,IAAA,IAAI,QAAQ,IAAA,EAAM;AAChB,MAAA,QAAA,CAAS,KAAK,EAAE,IAAA,EAAM,qBAAA,EAAuB,OAAA,EAAS,QAAQ,CAAA;AAChE,IAAA;AAEA,IAAA,MAAM,WAAA,GAAA,CAAc,MAAA,EAAA,GAAA,CAAA,EAAA,GAAA,KAAK,MAAA,CAAO,SAAA,KAAZ,OAAA,MAAA,GAAA,EAAA,CAAuB,gBAAvB,IAAA,GAAA,MAAA,GAAA,GAAA,IAAA,CAAA,EAAA,MAAA,IAAA,GAAA,EAAA,uBAA8C,IAAA,EAAK;AACvE,IAAA,MAAM,EAAE,KAAA,EAAO,QAAA,EAAU,eAAA,EAAgB,GAAI,MAAMC,aAAAA,CAAc;MAC/D,GAAA,EAAK,IAAA,CAAK,OAAO,GAAA,CAAI;QACnB,IAAA,EAAM,qBAAA;AACN,QAAA,OAAA,EAAS,IAAA,CAAK;OACf,CAAA;AACD,MAAA,OAAA,EAASC,cAAAA,CAAe,IAAA,CAAK,MAAA,CAAO,OAAA,IAAW,OAAO,CAAA;MACtD,IAAA,EAAM;AACJ,QAAA,KAAA,EAAO,IAAA,CAAK,OAAA;AACZ,QAAA,MAAA;AACA,QAAA,CAAA;AACA,QAAA,IAAA;AACA,QAAA,GAAA,CAAI,EAAA,GAAA,eAAA,CAAgB,MAAA,KAAhB,IAAA,GAAA,KAA0B,EAAC;QAC/B,eAAA,EAAiB,UAAA;QACjB,GAAI,IAAA,CAAK,SAAS,IAAA,GAAO,EAAE,MAAM,IAAA,CAAK,QAAA,CAAS,IAAA,EAAK,GAAI;AAC1D,OAAA;MACA,qBAAA,EAAuBC,8BAAAA;AACrB,QAAA,CAAA,EAAA,GAAA,IAAA,CAAK,MAAA,CAAO,cAAA,KAAZ,IAAA,GAAA,EAAA,GAA8B;AAChC,OAAA;MACA,yBAAA,EAA2BC,yBAAAA;AACzB,QAAA;AACF,OAAA;AACA,MAAA,WAAA;AACA,MAAA,KAAA,EAAO,KAAK,MAAA,CAAO;KACpB,CAAA;AAED,IAAA,OAAO;AACL,MAAA,MAAA,EAAQ,SAAS,IAAA,CAAK,GAAA,CAAI,CAAA,IAAA,KAAQ,KAAK,QAAQ,CAAA;AAC/C,MAAA,QAAA;MACA,QAAA,EAAU;QACR,SAAA,EAAW,WAAA;AACX,QAAA,OAAA,EAAS,IAAA,CAAK,OAAA;QACd,OAAA,EAAS;AACX;AACF,KAAA;AACF,EAAA;AACF,CAAA;AAIA,IAAM,mCAAA,GAAsCJ,EAAE,MAAA,CAAO;EACnD,IAAA,EAAMA,CAAAA,CAAE,KAAA,CAAMA,CAAAA,CAAE,MAAA,CAAO,EAAE,UAAUA,CAAAA,CAAE,MAAA,EAAO,EAAG,CAAC;AAClD,CAAC,CAAA;ACnFM,SAAS,0BAA0B,OAAA,EAAyB;AACjE,EAAA,OAAO;AACL,IAAA,QAAA;AACA,IAAA,aAAA;AACA,IAAA,eAAA;AACA,IAAA,aAAA;AACA,IAAA,kBAAA;AACA,IAAA,oBAAA;AACA,IAAA,aAAA;AACA,IAAA,kBAAA;AACA,IAAA,oBAAA;AACA,IAAA,kBAAA;AACA,IAAA,uBAAA;AACA,IAAA,yBAAA;AACA,IAAA,aAAA;AACA,IAAA;AACF,GAAA,CAAE,SAAS,OAAO,CAAA;AACpB;AC5CO,IAAM,cAAA,GAAiBA,EAAE,MAAA,CAAO;AACrC,EAAA,IAAA,EAAMA,EAAE,MAAA,EAAO;AACf,EAAA,KAAA,EAAOA,EAAE,MAAA;AACX,CAAC,CAAA;ACmBD,IAAM,iBAAA,GAA0D;EAC9D,WAAA,EAAa,cAAA;EACb,cAAA,EAAgB,CAAA,SAAQ,IAAA,CAAK;AAC/B,CAAA;AA6DO,SAAS,SAAA,CAAU,OAAA,GAA+B,EAAC,EAAgB;AAzF1E,EAAA,IAAA,EAAA;AA0FE,EAAA,MAAM,OAAA,GAAU,oBAAA;KACd,EAAA,GAAA,OAAA,CAAQ,OAAA,KAAR,IAAA,GAAA,EAAA,GAAmB;AACrB,GAAA;AACA,EAAA,MAAM,aAAa,OAAO;AACxB,IAAA,aAAA,EAAe,UAAU,UAAA,CAAW;AAClC,MAAA,MAAA,EAAQ,OAAA,CAAQ,MAAA;MAChB,uBAAA,EAAyB,aAAA;MACzB,WAAA,EAAa;AACf,KAAC,CAAC,CAAA,CAAA;AACF,IAAA,GAAG,OAAA,CAAQ;AACb,GAAA,CAAA;AAEA,EAAA,MAAM,mBAAA,GAAsB,CAC1B,OAAA,EACA,QAAA,GAA4B,EAAC,KAC1B;AACH,IAAA,MAAM,iBAAA,GAAoB,0BAA0B,OAAO,CAAA;AAC3D,IAAA,OAAO,IAAI,iCAAA,CAAkC,OAAA,EAAS,QAAA,EAAU;MAC9D,QAAA,EAAU,UAAA;AACV,MAAA,GAAA,EAAK,CAAC,EAAE,IAAA,OAAW,CAAA,EAAG,OAAO,GAAG,IAAI,CAAA,CAAA;MACpC,OAAA,EAAS,UAAA;AACT,MAAA,KAAA,EAAO,OAAA,CAAQ,KAAA;AACf,MAAA,2BAAA,EAA6B,oBAAoB,MAAA,GAAS,MAAA;MAC1D,cAAA,EAAgB,iBAAA;MAChB,yBAAA,EAA2B,iBAAA;MAC3B,YAAA,EAAc;KACf,CAAA;AACH,EAAA,CAAA;AAEA,EAAA,MAAM,gBAAA,GAAmB,CACvB,OAAA,EACA,QAAA,GAA6B,EAAC,KAC3B;AACH,IAAA,OAAO,IAAI,0BAAA,CAA2B,OAAA,EAAS,QAAA,EAAU;MACvD,QAAA,EAAU,WAAA;AACV,MAAA,GAAA,EAAK,CAAC,EAAE,IAAA,OAAW,CAAA,EAAG,OAAO,GAAG,IAAI,CAAA,CAAA;MACpC,OAAA,EAAS,UAAA;AACT,MAAA,KAAA,EAAO,OAAA,CAAQ,KAAA;MACf,cAAA,EAAgB;KACjB,CAAA;AACH,EAAA,CAAA;AAEA,EAAA,MAAM,WAAW,CAAC,OAAA,EAAyB,QAAA,KACzC,mBAAA,CAAoB,SAAS,QAAQ,CAAA;AAEvC,EAAA,QAAA,CAAS,aAAA,GAAgB,mBAAA;AACzB,EAAA,QAAA,CAAS,IAAA,GAAO,mBAAA;AAChB,EAAA,QAAA,CAAS,kBAAA,GAAqB,CAAC,OAAA,KAAoB;AACjD,IAAA,MAAM,IAAI,gBAAA,CAAiB,EAAE,OAAA,EAAS,SAAA,EAAW,sBAAsB,CAAA;AACzE,EAAA,CAAA;AACA,EAAA,QAAA,CAAS,UAAA,GAAa,gBAAA;AACtB,EAAA,QAAA,CAAS,KAAA,GAAQ,gBAAA;AAEjB,EAAA,OAAO,QAAA;AACT;AAEO,IAAM,MAAM,SAAA","file":"dist-VPYZNWNG.js","sourcesContent":["import {\n LanguageModelV1Prompt,\n LanguageModelV1ProviderMetadata,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertUint8ArrayToBase64 } from '@ai-sdk/provider-utils';\nimport { OpenAICompatibleChatPrompt } from './openai-compatible-api-types';\n\nfunction getOpenAIMetadata(message: {\n providerMetadata?: LanguageModelV1ProviderMetadata;\n}) {\n return message?.providerMetadata?.openaiCompatible ?? {};\n}\n\nexport function convertToOpenAICompatibleChatMessages(\n prompt: LanguageModelV1Prompt,\n): OpenAICompatibleChatPrompt {\n const messages: OpenAICompatibleChatPrompt = [];\n for (const { role, content, ...message } of prompt) {\n const metadata = getOpenAIMetadata({ ...message });\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content, ...metadata });\n break;\n }\n\n case 'user': {\n if (content.length === 1 && content[0].type === 'text') {\n messages.push({\n role: 'user',\n content: content[0].text,\n ...getOpenAIMetadata(content[0]),\n });\n break;\n }\n\n messages.push({\n role: 'user',\n content: content.map(part => {\n const partMetadata = getOpenAIMetadata(part);\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text, ...partMetadata };\n }\n case 'image': {\n return {\n type: 'image_url',\n image_url: {\n url:\n part.image instanceof URL\n ? part.image.toString()\n : `data:${\n part.mimeType ?? 'image/jpeg'\n };base64,${convertUint8ArrayToBase64(part.image)}`,\n },\n ...partMetadata,\n };\n }\n case 'file': {\n throw new UnsupportedFunctionalityError({\n functionality: 'File content parts in user messages',\n });\n }\n }\n }),\n ...metadata,\n });\n\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n const partMetadata = getOpenAIMetadata(part);\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n },\n ...partMetadata,\n });\n break;\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n ...metadata,\n });\n\n break;\n }\n\n case 'tool': {\n for (const toolResponse of content) {\n const toolResponseMetadata = getOpenAIMetadata(toolResponse);\n messages.push({\n role: 'tool',\n tool_call_id: toolResponse.toolCallId,\n content: JSON.stringify(toolResponse.result),\n ...toolResponseMetadata,\n });\n }\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV1FinishReason } from '@ai-sdk/provider';\n\nexport function mapOpenAICompatibleFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV1FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n return 'length';\n case 'content_filter':\n return 'content-filter';\n case 'function_call':\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z, ZodSchema } from 'zod';\n\nexport const openaiCompatibleErrorDataSchema = z.object({\n error: z.object({\n message: z.string(),\n\n // The additional information below is handled loosely to support\n // OpenAI-compatible providers that have slightly different error\n // responses:\n type: z.string().nullish(),\n param: z.any().nullish(),\n code: z.union([z.string(), z.number()]).nullish(),\n }),\n});\n\nexport type OpenAICompatibleErrorData = z.infer<\n typeof openaiCompatibleErrorDataSchema\n>;\n\nexport type ProviderErrorStructure<T> = {\n errorSchema: ZodSchema<T>;\n errorToMessage: (error: T) => string;\n isRetryable?: (response: Response, error?: T) => boolean;\n};\n\nexport const defaultOpenAICompatibleErrorStructure: ProviderErrorStructure<OpenAICompatibleErrorData> =\n {\n errorSchema: openaiCompatibleErrorDataSchema,\n errorToMessage: data => data.error.message,\n };\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function prepareTools({\n mode,\n structuredOutputs,\n}: {\n mode: Parameters<LanguageModelV1['doGenerate']>[0]['mode'] & {\n type: 'regular';\n };\n structuredOutputs: boolean;\n}): {\n tools:\n | undefined\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>;\n tool_choice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined;\n toolWarnings: LanguageModelV1CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n const tools = mode.tools?.length ? mode.tools : undefined;\n const toolWarnings: LanguageModelV1CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, tool_choice: undefined, toolWarnings };\n }\n\n const toolChoice = mode.toolChoice;\n\n const openaiCompatTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n openaiCompatTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: openaiCompatTools, tool_choice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n return { tools: openaiCompatTools, tool_choice: type, toolWarnings };\n case 'tool':\n return {\n tools: openaiCompatTools,\n tool_choice: {\n type: 'function',\n function: {\n name: toolChoice.toolName,\n },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1ObjectGenerationMode,\n LanguageModelV1ProviderMetadata,\n LanguageModelV1StreamPart,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n generateId,\n isParsableJson,\n ParseResult,\n postJsonToApi,\n ResponseHandler,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAICompatibleChatMessages } from './convert-to-openai-compatible-chat-messages';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompatibleFinishReason } from './map-openai-compatible-finish-reason';\nimport {\n OpenAICompatibleChatModelId,\n OpenAICompatibleChatSettings,\n} from './openai-compatible-chat-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\nimport { prepareTools } from './openai-compatible-prepare-tools';\nimport { MetadataExtractor } from './openai-compatible-metadata-extractor';\n\nexport type OpenAICompatibleChatConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n includeUsage?: boolean;\n errorStructure?: ProviderErrorStructure<any>;\n metadataExtractor?: MetadataExtractor;\n\n /**\nDefault object generation mode that should be used with this model when\nno mode is specified. Should be the mode with the best results for this\nmodel. `undefined` can be specified if object generation is not supported.\n */\n defaultObjectGenerationMode?: LanguageModelV1ObjectGenerationMode;\n\n /**\n * Whether the model supports structured outputs.\n */\n supportsStructuredOutputs?: boolean;\n};\n\nexport class OpenAICompatibleChatLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n\n readonly supportsStructuredOutputs: boolean;\n\n readonly modelId: OpenAICompatibleChatModelId;\n readonly settings: OpenAICompatibleChatSettings;\n\n private readonly config: OpenAICompatibleChatConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(\n modelId: OpenAICompatibleChatModelId,\n settings: OpenAICompatibleChatSettings,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n\n // initialize error handling:\n const errorStructure =\n config.errorStructure ?? defaultOpenAICompatibleErrorStructure;\n this.chunkSchema = createOpenAICompatibleChatChunkSchema(\n errorStructure.errorSchema,\n );\n this.failedResponseHandler = createJsonErrorResponseHandler(errorStructure);\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? false;\n }\n\n get defaultObjectGenerationMode(): 'json' | 'tool' | undefined {\n return this.config.defaultObjectGenerationMode;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n private getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n providerMetadata,\n stopSequences,\n responseFormat,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n user: this.settings.user,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : undefined,\n\n stop: stopSequences,\n seed,\n ...providerMetadata?.[this.providerOptionsName],\n\n reasoning_effort:\n providerMetadata?.[this.providerOptionsName]?.reasoningEffort ??\n providerMetadata?.['openai-compatible']?.reasoningEffort,\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n };\n\n switch (type) {\n case 'regular': {\n const { tools, tool_choice, toolWarnings } = prepareTools({\n mode,\n structuredOutputs: this.supportsStructuredOutputs,\n });\n\n return {\n args: { ...baseArgs, tools, tool_choice },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n case 'object-json': {\n return {\n args: {\n ...baseArgs,\n response_format:\n this.supportsStructuredOutputs === true && mode.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: mode.schema,\n name: mode.name ?? 'response',\n description: mode.description,\n },\n }\n : { type: 'json_object' },\n },\n warnings,\n };\n }\n\n case 'object-tool': {\n return {\n args: {\n ...baseArgs,\n tool_choice: {\n type: 'function',\n function: { name: mode.tool.name },\n },\n tools: [\n {\n type: 'function',\n function: {\n name: mode.tool.name,\n description: mode.tool.description,\n parameters: mode.tool.parameters,\n },\n },\n ],\n },\n warnings,\n };\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = this.getArgs({ ...options });\n\n const body = JSON.stringify(args);\n\n const {\n responseHeaders,\n value: responseBody,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n OpenAICompatibleChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n const choice = responseBody.choices[0];\n\n // provider metadata:\n const providerMetadata: LanguageModelV1ProviderMetadata = {\n [this.providerOptionsName]: {},\n ...this.config.metadataExtractor?.extractMetadata?.({\n parsedBody: rawResponse,\n }),\n };\n const completionTokenDetails =\n responseBody.usage?.completion_tokens_details;\n const promptTokenDetails = responseBody.usage?.prompt_tokens_details;\n if (completionTokenDetails?.reasoning_tokens != null) {\n providerMetadata[this.providerOptionsName].reasoningTokens =\n completionTokenDetails?.reasoning_tokens;\n }\n if (completionTokenDetails?.accepted_prediction_tokens != null) {\n providerMetadata[this.providerOptionsName].acceptedPredictionTokens =\n completionTokenDetails?.accepted_prediction_tokens;\n }\n if (completionTokenDetails?.rejected_prediction_tokens != null) {\n providerMetadata[this.providerOptionsName].rejectedPredictionTokens =\n completionTokenDetails?.rejected_prediction_tokens;\n }\n if (promptTokenDetails?.cached_tokens != null) {\n providerMetadata[this.providerOptionsName].cachedPromptTokens =\n promptTokenDetails?.cached_tokens;\n }\n\n return {\n text: choice.message.content ?? undefined,\n reasoning: choice.message.reasoning_content ?? undefined,\n toolCalls: choice.message.tool_calls?.map(toolCall => ({\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n })),\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: {\n promptTokens: responseBody.usage?.prompt_tokens ?? NaN,\n completionTokens: responseBody.usage?.completion_tokens ?? NaN,\n },\n providerMetadata,\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders, body: rawResponse },\n response: getResponseMetadata(responseBody),\n warnings,\n request: { body },\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n if (this.settings.simulateStreaming) {\n const result = await this.doGenerate(options);\n const simulatedStream = new ReadableStream<LanguageModelV1StreamPart>({\n start(controller) {\n controller.enqueue({ type: 'response-metadata', ...result.response });\n if (result.reasoning) {\n if (Array.isArray(result.reasoning)) {\n for (const part of result.reasoning) {\n if (part.type === 'text') {\n controller.enqueue({\n type: 'reasoning',\n textDelta: part.text,\n });\n }\n }\n } else {\n controller.enqueue({\n type: 'reasoning',\n textDelta: result.reasoning,\n });\n }\n }\n if (result.text) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: result.text,\n });\n }\n if (result.toolCalls) {\n for (const toolCall of result.toolCalls) {\n controller.enqueue({\n type: 'tool-call',\n ...toolCall,\n });\n }\n }\n controller.enqueue({\n type: 'finish',\n finishReason: result.finishReason,\n usage: result.usage,\n logprobs: result.logprobs,\n providerMetadata: result.providerMetadata,\n });\n controller.close();\n },\n });\n return {\n stream: simulatedStream,\n rawCall: result.rawCall,\n rawResponse: result.rawResponse,\n warnings: result.warnings,\n };\n }\n\n const { args, warnings } = this.getArgs({ ...options });\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n };\n\n const metadataExtractor =\n this.config.metadataExtractor?.createStreamExtractor();\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: {\n name: string;\n arguments: string;\n };\n hasFinished: boolean;\n }> = [];\n\n let finishReason: LanguageModelV1FinishReason = 'unknown';\n let usage: {\n completionTokens: number | undefined;\n completionTokensDetails: {\n reasoningTokens: number | undefined;\n acceptedPredictionTokens: number | undefined;\n rejectedPredictionTokens: number | undefined;\n };\n promptTokens: number | undefined;\n promptTokensDetails: {\n cachedTokens: number | undefined;\n };\n } = {\n completionTokens: undefined,\n completionTokensDetails: {\n reasoningTokens: undefined,\n acceptedPredictionTokens: undefined,\n rejectedPredictionTokens: undefined,\n },\n promptTokens: undefined,\n promptTokensDetails: {\n cachedTokens: undefined,\n },\n };\n let isFirstChunk = true;\n let providerOptionsName = this.providerOptionsName;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV1StreamPart\n >({\n // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n const value = chunk.value;\n\n metadataExtractor?.processChunk(chunk.rawValue);\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error.message });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n const {\n prompt_tokens,\n completion_tokens,\n prompt_tokens_details,\n completion_tokens_details,\n } = value.usage;\n\n usage.promptTokens = prompt_tokens ?? undefined;\n usage.completionTokens = completion_tokens ?? undefined;\n\n if (completion_tokens_details?.reasoning_tokens != null) {\n usage.completionTokensDetails.reasoningTokens =\n completion_tokens_details?.reasoning_tokens;\n }\n if (\n completion_tokens_details?.accepted_prediction_tokens != null\n ) {\n usage.completionTokensDetails.acceptedPredictionTokens =\n completion_tokens_details?.accepted_prediction_tokens;\n }\n if (\n completion_tokens_details?.rejected_prediction_tokens != null\n ) {\n usage.completionTokensDetails.rejectedPredictionTokens =\n completion_tokens_details?.rejected_prediction_tokens;\n }\n if (prompt_tokens_details?.cached_tokens != null) {\n usage.promptTokensDetails.cachedTokens =\n prompt_tokens_details?.cached_tokens;\n }\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n // enqueue reasoning before text deltas:\n if (delta.reasoning_content != null) {\n controller.enqueue({\n type: 'reasoning',\n textDelta: delta.reasoning_content,\n });\n }\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: delta.content,\n });\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index;\n\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n });\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n });\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n });\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n };\n\n const toolCall = toolCalls[index];\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n });\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n\n continue;\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index];\n\n if (toolCall.hasFinished) {\n continue;\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? '';\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCallDelta.function.arguments ?? '',\n });\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n }\n },\n\n flush(controller) {\n const providerMetadata: LanguageModelV1ProviderMetadata = {\n [providerOptionsName]: {},\n ...metadataExtractor?.buildMetadata(),\n };\n if (usage.completionTokensDetails.reasoningTokens != null) {\n providerMetadata[providerOptionsName].reasoningTokens =\n usage.completionTokensDetails.reasoningTokens;\n }\n if (\n usage.completionTokensDetails.acceptedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].acceptedPredictionTokens =\n usage.completionTokensDetails.acceptedPredictionTokens;\n }\n if (\n usage.completionTokensDetails.rejectedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].rejectedPredictionTokens =\n usage.completionTokensDetails.rejectedPredictionTokens;\n }\n if (usage.promptTokensDetails.cachedTokens != null) {\n providerMetadata[providerOptionsName].cachedPromptTokens =\n usage.promptTokensDetails.cachedTokens;\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: {\n promptTokens: usage.promptTokens ?? NaN,\n completionTokens: usage.completionTokens ?? NaN,\n },\n providerMetadata,\n });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings,\n request: { body: JSON.stringify(body) },\n };\n }\n}\n\nconst openaiCompatibleTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish();\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst OpenAICompatibleChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleChatChunkSchema = <ERROR_SCHEMA extends z.ZodType>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number().optional(),\n id: z.string().nullish(),\n type: z.literal('function').nullish(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n }),\n errorSchema,\n ]);\n","import {\n APICallError,\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1StreamPart,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n ParseResult,\n postJsonToApi,\n ResponseHandler,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAICompatibleCompletionPrompt } from './convert-to-openai-compatible-completion-prompt';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompatibleFinishReason } from './map-openai-compatible-finish-reason';\nimport {\n OpenAICompatibleCompletionModelId,\n OpenAICompatibleCompletionSettings,\n} from './openai-compatible-completion-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\n\ntype OpenAICompatibleCompletionConfig = {\n provider: string;\n includeUsage?: boolean;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n};\n\nexport class OpenAICompatibleCompletionLanguageModel\n implements LanguageModelV1\n{\n readonly specificationVersion = 'v1';\n readonly defaultObjectGenerationMode = undefined;\n\n readonly modelId: OpenAICompatibleCompletionModelId;\n readonly settings: OpenAICompatibleCompletionSettings;\n\n private readonly config: OpenAICompatibleCompletionConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(\n modelId: OpenAICompatibleCompletionModelId,\n settings: OpenAICompatibleCompletionSettings,\n config: OpenAICompatibleCompletionConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n\n // initialize error handling:\n const errorStructure =\n config.errorStructure ?? defaultOpenAICompatibleErrorStructure;\n this.chunkSchema = createOpenAICompatibleCompletionChunkSchema(\n errorStructure.errorSchema,\n );\n this.failedResponseHandler = createJsonErrorResponseHandler(errorStructure);\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n private getArgs({\n mode,\n inputFormat,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences: userStopSequences,\n responseFormat,\n seed,\n providerMetadata,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (responseFormat != null && responseFormat.type !== 'text') {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format is not supported.',\n });\n }\n\n const { prompt: completionPrompt, stopSequences } =\n convertToOpenAICompatibleCompletionPrompt({ prompt, inputFormat });\n\n const stop = [...(stopSequences ?? []), ...(userStopSequences ?? [])];\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n echo: this.settings.echo,\n logit_bias: this.settings.logitBias,\n suffix: this.settings.suffix,\n user: this.settings.user,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n seed,\n ...providerMetadata?.[this.providerOptionsName],\n\n // prompt:\n prompt: completionPrompt,\n\n // stop sequences:\n stop: stop.length > 0 ? stop : undefined,\n };\n\n switch (type) {\n case 'regular': {\n if (mode.tools?.length) {\n throw new UnsupportedFunctionalityError({\n functionality: 'tools',\n });\n }\n\n if (mode.toolChoice) {\n throw new UnsupportedFunctionalityError({\n functionality: 'toolChoice',\n });\n }\n\n return { args: baseArgs, warnings };\n }\n\n case 'object-json': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-json mode',\n });\n }\n\n case 'object-tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-tool mode',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleCompletionResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { prompt: rawPrompt, ...rawSettings } = args;\n const choice = response.choices[0];\n\n return {\n text: choice.text,\n usage: {\n promptTokens: response.usage?.prompt_tokens ?? NaN,\n completionTokens: response.usage?.completion_tokens ?? NaN,\n },\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders, body: rawResponse },\n response: getResponseMetadata(response),\n warnings,\n request: { body: JSON.stringify(args) },\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { prompt: rawPrompt, ...rawSettings } = args;\n\n let finishReason: LanguageModelV1FinishReason = 'unknown';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n let isFirstChunk = true;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage = {\n promptTokens: value.usage.prompt_tokens,\n completionTokens: value.usage.completion_tokens,\n };\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.text != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: choice.text,\n });\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings,\n request: { body: JSON.stringify(body) },\n };\n }\n}\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompatibleCompletionResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleCompletionChunkSchema = <\n ERROR_SCHEMA extends z.ZodType,\n>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n }),\n errorSchema,\n ]);\n","import {\n EmbeddingModelV1,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport {\n OpenAICompatibleEmbeddingModelId,\n OpenAICompatibleEmbeddingSettings,\n} from './openai-compatible-embedding-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\n\ntype OpenAICompatibleEmbeddingConfig = {\n /**\nOverride the maximum number of embeddings per call.\n */\n maxEmbeddingsPerCall?: number;\n\n /**\nOverride the parallelism of embedding calls.\n */\n supportsParallelCalls?: boolean;\n\n provider: string;\n url: (options: { modelId: string; path: string }) => string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n};\n\nexport class OpenAICompatibleEmbeddingModel\n implements EmbeddingModelV1<string>\n{\n readonly specificationVersion = 'v1';\n readonly modelId: OpenAICompatibleEmbeddingModelId;\n\n private readonly config: OpenAICompatibleEmbeddingConfig;\n private readonly settings: OpenAICompatibleEmbeddingSettings;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return this.config.maxEmbeddingsPerCall ?? 2048;\n }\n\n get supportsParallelCalls(): boolean {\n return this.config.supportsParallelCalls ?? true;\n }\n\n constructor(\n modelId: OpenAICompatibleEmbeddingModelId,\n settings: OpenAICompatibleEmbeddingSettings,\n config: OpenAICompatibleEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n }: Parameters<EmbeddingModelV1<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV1<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/embeddings',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n dimensions: this.settings.dimensions,\n user: this.settings.user,\n },\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n rawResponse: { headers: responseHeaders },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n","import { ImageModelV1, ImageModelV1CallWarning } from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { OpenAICompatibleImageModelId } from './openai-compatible-image-settings';\nimport { OpenAICompatibleImageSettings } from './openai-compatible-image-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\n\nexport type OpenAICompatibleImageModelConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n _internal?: {\n currentDate?: () => Date;\n };\n};\n\nexport class OpenAICompatibleImageModel implements ImageModelV1 {\n readonly specificationVersion = 'v1';\n\n get maxImagesPerCall(): number {\n return this.settings.maxImagesPerCall ?? 10;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n readonly modelId: OpenAICompatibleImageModelId,\n private readonly settings: OpenAICompatibleImageSettings,\n private readonly config: OpenAICompatibleImageModelConfig,\n ) {}\n\n async doGenerate({\n prompt,\n n,\n size,\n aspectRatio,\n seed,\n providerOptions,\n headers,\n abortSignal,\n }: Parameters<ImageModelV1['doGenerate']>[0]): Promise<\n Awaited<ReturnType<ImageModelV1['doGenerate']>>\n > {\n const warnings: Array<ImageModelV1CallWarning> = [];\n\n if (aspectRatio != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'aspectRatio',\n details:\n 'This model does not support aspect ratio. Use `size` instead.',\n });\n }\n\n if (seed != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'seed' });\n }\n\n const currentDate = this.config._internal?.currentDate?.() ?? new Date();\n const { value: response, responseHeaders } = await postJsonToApi({\n url: this.config.url({\n path: '/images/generations',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n prompt,\n n,\n size,\n ...(providerOptions.openai ?? {}),\n response_format: 'b64_json',\n ...(this.settings.user ? { user: this.settings.user } : {}),\n },\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleImageResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n images: response.data.map(item => item.b64_json),\n warnings,\n response: {\n timestamp: currentDate,\n modelId: this.modelId,\n headers: responseHeaders,\n },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompatibleImageResponseSchema = z.object({\n data: z.array(z.object({ b64_json: z.string() })),\n});\n","import { OpenAICompatibleChatSettings } from '@ai-sdk/openai-compatible';\n\n// https://console.x.ai and see \"View models\"\nexport type XaiChatModelId =\n | 'grok-3'\n | 'grok-3-latest'\n | 'grok-3-fast'\n | 'grok-3-fast-latest'\n | 'grok-3-mini'\n | 'grok-3-mini-latest'\n | 'grok-3-mini-fast'\n | 'grok-3-mini-fast-latest'\n | 'grok-2-vision-1212'\n | 'grok-2-vision'\n | 'grok-2-vision-latest'\n | 'grok-2-image-1212'\n | 'grok-2-image'\n | 'grok-2-image-latest'\n | 'grok-2-1212'\n | 'grok-2'\n | 'grok-2-latest'\n | 'grok-vision-beta'\n | 'grok-beta'\n | (string & {});\n\nexport interface XaiChatSettings extends OpenAICompatibleChatSettings {}\n\n/**\n * https://docs.x.ai/docs/guides/structured-outputs\n */\nexport function supportsStructuredOutputs(modelId: XaiChatModelId) {\n return [\n 'grok-3',\n 'grok-3-beta',\n 'grok-3-latest',\n 'grok-3-fast',\n 'grok-3-fast-beta',\n 'grok-3-fast-latest',\n 'grok-3-mini',\n 'grok-3-mini-beta',\n 'grok-3-mini-latest',\n 'grok-3-mini-fast',\n 'grok-3-mini-fast-beta',\n 'grok-3-mini-fast-latest',\n 'grok-2-1212',\n 'grok-2-vision-1212',\n ].includes(modelId);\n}\n","import { z } from 'zod';\n\n// Add error schema and structure\nexport const xaiErrorSchema = z.object({\n code: z.string(),\n error: z.string(),\n});\n\nexport type XaiErrorData = z.infer<typeof xaiErrorSchema>;\n","import {\n ImageModelV1,\n LanguageModelV1,\n NoSuchModelError,\n ProviderV1,\n} from '@ai-sdk/provider';\nimport {\n OpenAICompatibleChatLanguageModel,\n OpenAICompatibleImageModel,\n ProviderErrorStructure,\n} from '@ai-sdk/openai-compatible';\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport {\n XaiChatModelId,\n XaiChatSettings,\n supportsStructuredOutputs,\n} from './xai-chat-settings';\nimport { XaiImageSettings } from './xai-image-settings';\nimport { XaiImageModelId } from './xai-image-settings';\nimport { XaiErrorData, xaiErrorSchema } from './xai-error';\n\nconst xaiErrorStructure: ProviderErrorStructure<XaiErrorData> = {\n errorSchema: xaiErrorSchema,\n errorToMessage: data => data.error,\n};\n\nexport interface XaiProvider extends ProviderV1 {\n /**\nCreates an Xai chat model for text generation.\n */\n (modelId: XaiChatModelId, settings?: XaiChatSettings): LanguageModelV1;\n\n /**\nCreates an Xai language model for text generation.\n */\n languageModel(\n modelId: XaiChatModelId,\n settings?: XaiChatSettings,\n ): LanguageModelV1;\n\n /**\nCreates an Xai chat model for text generation.\n */\n chat: (\n modelId: XaiChatModelId,\n settings?: XaiChatSettings,\n ) => LanguageModelV1;\n\n /**\nCreates an Xai image model for image generation.\n */\n image(modelId: XaiImageModelId, settings?: XaiImageSettings): ImageModelV1;\n\n /**\nCreates an Xai image model for image generation.\n */\n imageModel(\n modelId: XaiImageModelId,\n settings?: XaiImageSettings,\n ): ImageModelV1;\n}\n\nexport interface XaiProviderSettings {\n /**\nBase URL for the xAI API calls.\n */\n baseURL?: string;\n\n /**\nAPI key for authenticating requests.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n}\n\nexport function createXai(options: XaiProviderSettings = {}): XaiProvider {\n const baseURL = withoutTrailingSlash(\n options.baseURL ?? 'https://api.x.ai/v1',\n );\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'XAI_API_KEY',\n description: 'xAI API key',\n })}`,\n ...options.headers,\n });\n\n const createLanguageModel = (\n modelId: XaiChatModelId,\n settings: XaiChatSettings = {},\n ) => {\n const structuredOutputs = supportsStructuredOutputs(modelId);\n return new OpenAICompatibleChatLanguageModel(modelId, settings, {\n provider: 'xai.chat',\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n defaultObjectGenerationMode: structuredOutputs ? 'json' : 'tool',\n errorStructure: xaiErrorStructure,\n supportsStructuredOutputs: structuredOutputs,\n includeUsage: true,\n });\n };\n\n const createImageModel = (\n modelId: XaiImageModelId,\n settings: XaiImageSettings = {},\n ) => {\n return new OpenAICompatibleImageModel(modelId, settings, {\n provider: 'xai.image',\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n errorStructure: xaiErrorStructure,\n });\n };\n\n const provider = (modelId: XaiChatModelId, settings?: XaiChatSettings) =>\n createLanguageModel(modelId, settings);\n\n provider.languageModel = createLanguageModel;\n provider.chat = createLanguageModel;\n provider.textEmbeddingModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'textEmbeddingModel' });\n };\n provider.imageModel = createImageModel;\n provider.image = createImageModel;\n\n return provider;\n}\n\nexport const xai = createXai();\n"]}
1
+ {"version":3,"sources":["../../../node_modules/.pnpm/@ai-sdk+openai-compatible@0.2.16_zod@3.25.76/node_modules/@ai-sdk/openai-compatible/src/convert-to-openai-compatible-chat-messages.ts","../../../node_modules/.pnpm/@ai-sdk+openai-compatible@0.2.16_zod@3.25.76/node_modules/@ai-sdk/openai-compatible/src/get-response-metadata.ts","../../../node_modules/.pnpm/@ai-sdk+openai-compatible@0.2.16_zod@3.25.76/node_modules/@ai-sdk/openai-compatible/src/map-openai-compatible-finish-reason.ts","../../../node_modules/.pnpm/@ai-sdk+openai-compatible@0.2.16_zod@3.25.76/node_modules/@ai-sdk/openai-compatible/src/openai-compatible-error.ts","../../../node_modules/.pnpm/@ai-sdk+openai-compatible@0.2.16_zod@3.25.76/node_modules/@ai-sdk/openai-compatible/src/openai-compatible-prepare-tools.ts","../../../node_modules/.pnpm/@ai-sdk+openai-compatible@0.2.16_zod@3.25.76/node_modules/@ai-sdk/openai-compatible/src/openai-compatible-chat-language-model.ts","../../../node_modules/.pnpm/@ai-sdk+openai-compatible@0.2.16_zod@3.25.76/node_modules/@ai-sdk/openai-compatible/src/openai-compatible-completion-language-model.ts","../../../node_modules/.pnpm/@ai-sdk+openai-compatible@0.2.16_zod@3.25.76/node_modules/@ai-sdk/openai-compatible/src/openai-compatible-embedding-model.ts","../../../node_modules/.pnpm/@ai-sdk+openai-compatible@0.2.16_zod@3.25.76/node_modules/@ai-sdk/openai-compatible/src/openai-compatible-image-model.ts","../../../node_modules/.pnpm/@ai-sdk+xai@1.2.18_zod@3.25.76/node_modules/@ai-sdk/xai/src/xai-chat-settings.ts","../../../node_modules/.pnpm/@ai-sdk+xai@1.2.18_zod@3.25.76/node_modules/@ai-sdk/xai/src/xai-error.ts","../../../node_modules/.pnpm/@ai-sdk+xai@1.2.18_zod@3.25.76/node_modules/@ai-sdk/xai/src/xai-provider.ts"],"names":["UnsupportedFunctionalityError","_a","toolCall","z","postJsonToApi","combineHeaders","createJsonErrorResponseHandler","createJsonResponseHandler"],"mappings":";;;AAQA,SAAS,kBAAkB,OAAA,EAExB;AAVH,EAAA,IAAA,EAAA,EAAA,EAAA;AAWE,EAAA,OAAA,CAAO,EAAA,GAAA,CAAA,EAAA,GAAA,OAAA,IAAA,IAAA,GAAA,MAAA,GAAA,OAAA,CAAS,gBAAA,KAAT,IAAA,GAAA,MAAA,GAAA,EAAA,CAA2B,gBAAA,KAA3B,IAAA,GAAA,KAA+C,EAAC;AACzD;AAEO,SAAS,sCACd,MAAA,EAC4B;AAC5B,EAAA,MAAM,WAAuC,EAAC;AAC9C,EAAA,KAAA,MAAW,EAAE,IAAA,EAAM,OAAA,EAAS,GAAG,OAAA,MAAa,MAAA,EAAQ;AAClD,IAAA,MAAM,QAAA,GAAW,iBAAA,CAAkB,EAAE,GAAG,SAAS,CAAA;AACjD,IAAA,QAAQ,IAAA;AACN,MAAA,KAAK,QAAA,EAAU;AACb,QAAA,QAAA,CAAS,KAAK,EAAE,IAAA,EAAM,UAAU,OAAA,EAAS,GAAG,UAAU,CAAA;AACtD,QAAA;AACF,MAAA;AAEA,MAAA,KAAK,MAAA,EAAQ;AACX,QAAA,IAAI,QAAQ,MAAA,KAAW,CAAA,IAAK,QAAQ,CAAC,CAAA,CAAE,SAAS,MAAA,EAAQ;AACtD,UAAA,QAAA,CAAS,IAAA,CAAK;YACZ,IAAA,EAAM,MAAA;YACN,OAAA,EAAS,OAAA,CAAQ,CAAC,CAAA,CAAE,IAAA;YACpB,GAAG,iBAAA,CAAkB,OAAA,CAAQ,CAAC,CAAC;WAChC,CAAA;AACD,UAAA;AACF,QAAA;AAEA,QAAA,QAAA,CAAS,IAAA,CAAK;UACZ,IAAA,EAAM,MAAA;UACN,OAAA,EAAS,OAAA,CAAQ,GAAA,CAAI,CAAA,IAAA,KAAQ;AAtCvC,YAAA,IAAA,EAAA;AAuCY,YAAA,MAAM,YAAA,GAAe,kBAAkB,IAAI,CAAA;AAC3C,YAAA,QAAQ,KAAK,IAAA;AACX,cAAA,KAAK,MAAA,EAAQ;AACX,gBAAA,OAAO,EAAE,IAAA,EAAM,MAAA,EAAQ,MAAM,IAAA,CAAK,IAAA,EAAM,GAAG,YAAA,EAAa;AAC1D,cAAA;AACA,cAAA,KAAK,OAAA,EAAS;AACZ,gBAAA,OAAO;kBACL,IAAA,EAAM,WAAA;kBACN,SAAA,EAAW;AACT,oBAAA,GAAA,EACE,KAAK,KAAA,YAAiB,GAAA,GAClB,KAAK,KAAA,CAAM,QAAA,KACX,CAAA,KAAA,EAAA,CACE,EAAA,GAAA,IAAA,CAAK,QAAA,KAAL,OAAA,EAAA,GAAiB,YACnB,WAAW,yBAAA,CAA0B,IAAA,CAAK,KAAK,CAAC,CAAA;AACxD,mBAAA;kBACA,GAAG;AACL,iBAAA;AACF,cAAA;AACA,cAAA,KAAK,MAAA,EAAQ;AACX,gBAAA,MAAM,IAAI,6BAAA,CAA8B;kBACtC,aAAA,EAAe;iBAChB,CAAA;AACH,cAAA;AACF;UACF,CAAC,CAAA;UACD,GAAG;SACJ,CAAA;AAED,QAAA;AACF,MAAA;AAEA,MAAA,KAAK,WAAA,EAAa;AAChB,QAAA,IAAI,IAAA,GAAO,EAAA;AACX,QAAA,MAAM,YAID,EAAC;AAEN,QAAA,KAAA,MAAW,QAAQ,OAAA,EAAS;AAC1B,UAAA,MAAM,YAAA,GAAe,kBAAkB,IAAI,CAAA;AAC3C,UAAA,QAAQ,KAAK,IAAA;AACX,YAAA,KAAK,MAAA,EAAQ;AACX,cAAA,IAAA,IAAQ,IAAA,CAAK,IAAA;AACb,cAAA;AACF,YAAA;AACA,YAAA,KAAK,WAAA,EAAa;AAChB,cAAA,SAAA,CAAU,IAAA,CAAK;AACb,gBAAA,EAAA,EAAI,IAAA,CAAK,UAAA;gBACT,IAAA,EAAM,UAAA;gBACN,QAAA,EAAU;AACR,kBAAA,IAAA,EAAM,IAAA,CAAK,QAAA;kBACX,SAAA,EAAW,IAAA,CAAK,SAAA,CAAU,IAAA,CAAK,IAAI;AACrC,iBAAA;gBACA,GAAG;eACJ,CAAA;AACD,cAAA;AACF,YAAA;AACF;AACF,QAAA;AAEA,QAAA,QAAA,CAAS,IAAA,CAAK;UACZ,IAAA,EAAM,WAAA;UACN,OAAA,EAAS,IAAA;UACT,UAAA,EAAY,SAAA,CAAU,MAAA,GAAS,CAAA,GAAI,SAAA,GAAY,MAAA;UAC/C,GAAG;SACJ,CAAA;AAED,QAAA;AACF,MAAA;AAEA,MAAA,KAAK,MAAA,EAAQ;AACX,QAAA,KAAA,MAAW,gBAAgB,OAAA,EAAS;AAClC,UAAA,MAAM,oBAAA,GAAuB,kBAAkB,YAAY,CAAA;AAC3D,UAAA,QAAA,CAAS,IAAA,CAAK;YACZ,IAAA,EAAM,MAAA;AACN,YAAA,YAAA,EAAc,YAAA,CAAa,UAAA;YAC3B,OAAA,EAAS,IAAA,CAAK,SAAA,CAAU,YAAA,CAAa,MAAM,CAAA;YAC3C,GAAG;WACJ,CAAA;AACH,QAAA;AACA,QAAA;AACF,MAAA;MAEA,SAAS;AACP,QAAA,MAAM,gBAAA,GAA0B,IAAA;AAChC,QAAA,MAAM,IAAI,KAAA,CAAM,CAAA,kBAAA,EAAqB,gBAAgB,CAAA,CAAE,CAAA;AACzD,MAAA;AACF;AACF,EAAA;AAEA,EAAA,OAAO,QAAA;AACT;ACpIO,SAAS,mBAAA,CAAoB;AAClC,EAAA,EAAA;AACA,EAAA,KAAA;AACA,EAAA;AACF,CAAA,EAIG;AACD,EAAA,OAAO;IACL,EAAA,EAAI,EAAA,IAAA,OAAA,EAAA,GAAM,MAAA;IACV,OAAA,EAAS,KAAA,IAAA,OAAA,KAAA,GAAS,MAAA;AAClB,IAAA,SAAA,EAAW,WAAW,IAAA,GAAO,IAAI,IAAA,CAAK,OAAA,GAAU,GAAI,CAAA,GAAI;AAC1D,GAAA;AACF;ACZO,SAAS,gCACd,YAAA,EAC6B;AAC7B,EAAA,QAAQ,YAAA;IACN,KAAK,MAAA;AACH,MAAA,OAAO,MAAA;IACT,KAAK,QAAA;AACH,MAAA,OAAO,QAAA;IACT,KAAK,gBAAA;AACH,MAAA,OAAO,gBAAA;IACT,KAAK,eAAA;IACL,KAAK,YAAA;AACH,MAAA,OAAO,YAAA;AACT,IAAA;AACE,MAAA,OAAO,SAAA;AACX;AACF;AChBO,IAAM,+BAAA,GAAkC,EAAE,MAAA,CAAO;AACtD,EAAA,KAAA,EAAO,EAAE,MAAA,CAAO;AACd,IAAA,OAAA,EAAS,EAAE,MAAA,EAAO;;;;IAKlB,IAAA,EAAM,CAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;IACzB,KAAA,EAAO,CAAA,CAAE,GAAA,EAAI,CAAE,OAAA,EAAQ;IACvB,IAAA,EAAM,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,CAAE,MAAA,EAAO,EAAG,CAAA,CAAE,MAAA,EAAQ,CAAC,CAAA,CAAE,OAAA;GACzC;AACH,CAAC,CAAA;AAYM,IAAM,qCAAA,GACX;EACE,WAAA,EAAa,+BAAA;EACb,cAAA,EAAgB,CAAA,IAAA,KAAQ,IAAA,CAAK,KAAA,CAAM;AACrC,CAAA;ACvBK,SAAS,YAAA,CAAa;AAC3B,EAAA,IAAA;AACA,EAAA;AACF,CAAA,EAuBE;AAhCF,EAAA,IAAA,EAAA;AAkCE,EAAA,MAAM,KAAA,GAAA,CAAA,CAAQ,KAAA,IAAA,CAAK,KAAA,KAAL,OAAA,MAAA,GAAA,EAAA,CAAY,MAAA,IAAS,IAAA,CAAK,KAAA,GAAQ,MAAA;AAChD,EAAA,MAAM,eAA6C,EAAC;AAEpD,EAAA,IAAI,SAAS,IAAA,EAAM;AACjB,IAAA,OAAO,EAAE,KAAA,EAAO,MAAA,EAAW,WAAA,EAAa,QAAW,YAAA,EAAa;AAClE,EAAA;AAEA,EAAA,MAAM,aAAa,IAAA,CAAK,UAAA;AAExB,EAAA,MAAM,oBAOD,EAAC;AAEN,EAAA,KAAA,MAAW,QAAQ,KAAA,EAAO;AACxB,IAAA,IAAI,IAAA,CAAK,SAAS,kBAAA,EAAoB;AACpC,MAAA,YAAA,CAAa,IAAA,CAAK,EAAE,IAAA,EAAM,kBAAA,EAAoB,MAAM,CAAA;IACtD,CAAA,MAAO;AACL,MAAA,iBAAA,CAAkB,IAAA,CAAK;QACrB,IAAA,EAAM,UAAA;QACN,QAAA,EAAU;AACR,UAAA,IAAA,EAAM,IAAA,CAAK,IAAA;AACX,UAAA,WAAA,EAAa,IAAA,CAAK,WAAA;AAClB,UAAA,UAAA,EAAY,IAAA,CAAK;AACnB;OACD,CAAA;AACH,IAAA;AACF,EAAA;AAEA,EAAA,IAAI,cAAc,IAAA,EAAM;AACtB,IAAA,OAAO,EAAE,KAAA,EAAO,iBAAA,EAAmB,WAAA,EAAa,QAAW,YAAA,EAAa;AAC1E,EAAA;AAEA,EAAA,MAAM,OAAO,UAAA,CAAW,IAAA;AAExB,EAAA,QAAQ,IAAA;IACN,KAAK,MAAA;IACL,KAAK,MAAA;IACL,KAAK,UAAA;AACH,MAAA,OAAO,EAAE,KAAA,EAAO,iBAAA,EAAmB,WAAA,EAAa,MAAM,YAAA,EAAa;IACrE,KAAK,MAAA;AACH,MAAA,OAAO;QACL,KAAA,EAAO,iBAAA;QACP,WAAA,EAAa;UACX,IAAA,EAAM,UAAA;UACN,QAAA,EAAU;AACR,YAAA,IAAA,EAAM,UAAA,CAAW;AACnB;AACF,SAAA;AACA,QAAA;AACF,OAAA;IACF,SAAS;AACP,MAAA,MAAM,gBAAA,GAA0B,IAAA;AAChC,MAAA,MAAM,IAAIA,6BAAAA,CAA8B;AACtC,QAAA,aAAA,EAAe,iCAAiC,gBAAgB,CAAA;OACjE,CAAA;AACH,IAAA;AACF;AACF;ACrCO,IAAM,oCAAN,MAAmE;;EAYxE,WAAA,CACE,OAAA,EACA,UACA,MAAA,EACA;AAfF,IAAA,IAAA,CAAS,oBAAA,GAAuB,IAAA;AA5DlC,IAAA,IAAA,EAAA,EAAA,EAAA;AA4EI,IAAA,IAAA,CAAK,OAAA,GAAU,OAAA;AACf,IAAA,IAAA,CAAK,QAAA,GAAW,QAAA;AAChB,IAAA,IAAA,CAAK,MAAA,GAAS,MAAA;AAGd,IAAA,MAAM,cAAA,GAAA,CACJ,EAAA,GAAA,MAAA,CAAO,cAAA,KAAP,OAAA,EAAA,GAAyB,qCAAA;AAC3B,IAAA,IAAA,CAAK,WAAA,GAAc,qCAAA;MACjB,cAAA,CAAe;AACjB,KAAA;AACA,IAAA,IAAA,CAAK,qBAAA,GAAwB,+BAA+B,cAAc,CAAA;AAE1E,IAAA,IAAA,CAAK,yBAAA,GAAA,CAA4B,EAAA,GAAA,MAAA,CAAO,yBAAA,KAAP,OAAA,EAAA,GAAoC,KAAA;AACvE,EAAA;AAEA,EAAA,IAAI,2BAAA,GAA2D;AAC7D,IAAA,OAAO,KAAK,MAAA,CAAO,2BAAA;AACrB,EAAA;AAEA,EAAA,IAAI,QAAA,GAAmB;AACrB,IAAA,OAAO,KAAK,MAAA,CAAO,QAAA;AACrB,EAAA;AAEA,EAAA,IAAY,mBAAA,GAA8B;AACxC,IAAA,OAAO,IAAA,CAAK,OAAO,QAAA,CAAS,KAAA,CAAM,GAAG,CAAA,CAAE,CAAC,EAAE,IAAA,EAAK;AACjD,EAAA;EAEQ,OAAA,CAAQ;AACd,IAAA,IAAA;AACA,IAAA,MAAA;AACA,IAAA,SAAA;AACA,IAAA,WAAA;AACA,IAAA,IAAA;AACA,IAAA,IAAA;AACA,IAAA,gBAAA;AACA,IAAA,eAAA;AACA,IAAA,gBAAA;AACA,IAAA,aAAA;AACA,IAAA,cAAA;AACA,IAAA;GACF,EAAiD;AApHnD,IAAA,IAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA;AAqHI,IAAA,MAAM,OAAO,IAAA,CAAK,IAAA;AAElB,IAAA,MAAM,WAAyC,EAAC;AAEhD,IAAA,IAAI,QAAQ,IAAA,EAAM;AAChB,MAAA,QAAA,CAAS,IAAA,CAAK;QACZ,IAAA,EAAM,qBAAA;QACN,OAAA,EAAS;OACV,CAAA;AACH,IAAA;AAEA,IAAA,IAAA,CACE,cAAA,IAAA,IAAA,GAAA,MAAA,GAAA,cAAA,CAAgB,IAAA,MAAS,MAAA,IACzB,cAAA,CAAe,MAAA,IAAU,IAAA,IACzB,CAAC,IAAA,CAAK,yBAAA,EACN;AACA,MAAA,QAAA,CAAS,IAAA,CAAK;QACZ,IAAA,EAAM,qBAAA;QACN,OAAA,EAAS,gBAAA;QACT,OAAA,EACE;OACH,CAAA;AACH,IAAA;AAEA,IAAA,MAAM,QAAA,GAAW;;AAEf,MAAA,KAAA,EAAO,IAAA,CAAK,OAAA;;AAGZ,MAAA,IAAA,EAAM,KAAK,QAAA,CAAS,IAAA;;MAGpB,UAAA,EAAY,SAAA;AACZ,MAAA,WAAA;MACA,KAAA,EAAO,IAAA;MACP,iBAAA,EAAmB,gBAAA;MACnB,gBAAA,EAAkB,eAAA;MAClB,eAAA,EAAA,CACE,cAAA,IAAA,IAAA,GAAA,MAAA,GAAA,cAAA,CAAgB,IAAA,MAAS,MAAA,GACrB,IAAA,CAAK,yBAAA,KAA8B,IAAA,IACnC,cAAA,CAAe,MAAA,IAAU,IAAA,GACvB;QACE,IAAA,EAAM,aAAA;QACN,WAAA,EAAa;AACX,UAAA,MAAA,EAAQ,cAAA,CAAe,MAAA;AACvB,UAAA,IAAA,EAAA,CAAM,EAAA,GAAA,cAAA,CAAe,IAAA,KAAf,IAAA,GAAA,EAAA,GAAuB,UAAA;AAC7B,UAAA,WAAA,EAAa,cAAA,CAAe;AAC9B;OACF,GACA,EAAE,IAAA,EAAM,aAAA,EAAc,GACxB,MAAA;MAEN,IAAA,EAAM,aAAA;AACN,MAAA,IAAA;AACA,MAAA,GAAG,gBAAA,IAAA,IAAA,GAAA,MAAA,GAAA,gBAAA,CAAmB,KAAK,mBAAA,CAAA;MAE3B,gBAAA,EAAA,CACE,EAAA,GAAA,CAAA,EAAA,GAAA,gBAAA,IAAA,IAAA,GAAA,MAAA,GAAA,iBAAmB,IAAA,CAAK,mBAAA,CAAA,KAAxB,IAAA,GAAA,MAAA,GAAA,EAAA,CAA8C,oBAA9C,IAAA,GAAA,EAAA,GAAA,CACA,EAAA,GAAA,gBAAA,IAAA,IAAA,GAAA,MAAA,GAAA,iBAAmB,mBAAA,CAAA,KAAnB,IAAA,GAAA,MAAA,GAAA,EAAA,CAAyC,eAAA;;AAG3C,MAAA,QAAA,EAAU,sCAAsC,MAAM;AACxD,KAAA;AAEA,IAAA,QAAQ,IAAA;AACN,MAAA,KAAK,SAAA,EAAW;AACd,QAAA,MAAM,EAAE,KAAA,EAAO,WAAA,EAAa,YAAA,KAAiB,YAAA,CAAa;AACxD,UAAA,IAAA;AACA,UAAA,iBAAA,EAAmB,IAAA,CAAK;SACzB,CAAA;AAED,QAAA,OAAO;AACL,UAAA,IAAA,EAAM,EAAE,GAAG,QAAA,EAAU,KAAA,EAAO,WAAA,EAAY;AACxC,UAAA,QAAA,EAAU,CAAC,GAAG,QAAA,EAAU,GAAG,YAAY;AACzC,SAAA;AACF,MAAA;AAEA,MAAA,KAAK,aAAA,EAAe;AAClB,QAAA,OAAO;UACL,IAAA,EAAM;YACJ,GAAG,QAAA;AACH,YAAA,eAAA,EACE,IAAA,CAAK,yBAAA,KAA8B,IAAA,IAAQ,IAAA,CAAK,UAAU,IAAA,GACtD;cACE,IAAA,EAAM,aAAA;cACN,WAAA,EAAa;AACX,gBAAA,MAAA,EAAQ,IAAA,CAAK,MAAA;AACb,gBAAA,IAAA,EAAA,CAAM,EAAA,GAAA,IAAA,CAAK,IAAA,KAAL,IAAA,GAAA,EAAA,GAAa,UAAA;AACnB,gBAAA,WAAA,EAAa,IAAA,CAAK;AACpB;aACF,GACA,EAAE,MAAM,aAAA;AAChB,WAAA;AACA,UAAA;AACF,SAAA;AACF,MAAA;AAEA,MAAA,KAAK,aAAA,EAAe;AAClB,QAAA,OAAO;UACL,IAAA,EAAM;YACJ,GAAG,QAAA;YACH,WAAA,EAAa;cACX,IAAA,EAAM,UAAA;AACN,cAAA,QAAA,EAAU,EAAE,IAAA,EAAM,IAAA,CAAK,IAAA,CAAK,IAAA;AAC9B,aAAA;YACA,KAAA,EAAO;AACL,cAAA;gBACE,IAAA,EAAM,UAAA;gBACN,QAAA,EAAU;AACR,kBAAA,IAAA,EAAM,KAAK,IAAA,CAAK,IAAA;AAChB,kBAAA,WAAA,EAAa,KAAK,IAAA,CAAK,WAAA;AACvB,kBAAA,UAAA,EAAY,KAAK,IAAA,CAAK;AACxB;AACF;AACF;AACF,WAAA;AACA,UAAA;AACF,SAAA;AACF,MAAA;MAEA,SAAS;AACP,QAAA,MAAM,gBAAA,GAA0B,IAAA;AAChC,QAAA,MAAM,IAAI,KAAA,CAAM,CAAA,kBAAA,EAAqB,gBAAgB,CAAA,CAAE,CAAA;AACzD,MAAA;AACF;AACF,EAAA;AAEA,EAAA,MAAM,WACJ,OAAA,EAC6D;AAtPjE,IAAA,IAAA,EAAA,EAAA,IAAA,EAAA,EAAA,EAAA,EAAA,IAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA;AAuPI,IAAA,MAAM,EAAE,MAAM,QAAA,EAAS,GAAI,KAAK,OAAA,CAAQ,EAAE,GAAG,OAAA,EAAS,CAAA;AAEtD,IAAA,MAAM,IAAA,GAAO,IAAA,CAAK,SAAA,CAAU,IAAI,CAAA;AAEhC,IAAA,MAAM;AACJ,MAAA,eAAA;MACA,KAAA,EAAO,YAAA;MACP,QAAA,EAAU;AACZ,KAAA,GAAI,MAAM,aAAA,CAAc;MACtB,GAAA,EAAK,IAAA,CAAK,OAAO,GAAA,CAAI;QACnB,IAAA,EAAM,mBAAA;AACN,QAAA,OAAA,EAAS,IAAA,CAAK;OACf,CAAA;AACD,MAAA,OAAA,EAAS,eAAe,IAAA,CAAK,MAAA,CAAO,OAAA,EAAQ,EAAG,QAAQ,OAAO,CAAA;MAC9D,IAAA,EAAM,IAAA;AACN,MAAA,qBAAA,EAAuB,IAAA,CAAK,qBAAA;MAC5B,yBAAA,EAA2B,yBAAA;AACzB,QAAA;AACF,OAAA;AACA,MAAA,WAAA,EAAa,OAAA,CAAQ,WAAA;AACrB,MAAA,KAAA,EAAO,KAAK,MAAA,CAAO;KACpB,CAAA;AAED,IAAA,MAAM,EAAE,QAAA,EAAU,SAAA,EAAW,GAAG,aAAY,GAAI,IAAA;AAChD,IAAA,MAAM,MAAA,GAAS,YAAA,CAAa,OAAA,CAAQ,CAAC,CAAA;AAGrC,IAAA,MAAM,gBAAA,GAAoD;MACxD,CAAC,IAAA,CAAK,mBAAmB,GAAG,EAAC;AAC7B,MAAA,GAAA,CAAG,EAAA,GAAA,CAAA,EAAA,GAAA,IAAA,CAAK,MAAA,CAAO,iBAAA,KAAZ,IAAA,GAAA,MAAA,GAAA,EAAA,CAA+B,eAAA,KAA/B,IAAA,GAAA,MAAA,GAAA,EAAA,CAAA,KAAA,EAAA,EAAiD;QAClD,UAAA,EAAY;OACd;AACF,KAAA;AACA,IAAA,MAAM,0BACJ,EAAA,GAAA,YAAA,CAAa,KAAA,KAAb,IAAA,GAAA,SAAA,EAAA,CAAoB,yBAAA;AACtB,IAAA,MAAM,sBAAqB,EAAA,GAAA,YAAA,CAAa,KAAA,KAAb,IAAA,GAAA,SAAA,EAAA,CAAoB,qBAAA;AAC/C,IAAA,IAAA,CAAI,sBAAA,IAAA,IAAA,GAAA,MAAA,GAAA,sBAAA,CAAwB,qBAAoB,IAAA,EAAM;AACpD,MAAA,gBAAA,CAAiB,KAAK,mBAAmB,CAAA,CAAE,kBACzC,sBAAA,IAAA,IAAA,GAAA,SAAA,sBAAA,CAAwB,gBAAA;AAC5B,IAAA;AACA,IAAA,IAAA,CAAI,sBAAA,IAAA,IAAA,GAAA,MAAA,GAAA,sBAAA,CAAwB,+BAA8B,IAAA,EAAM;AAC9D,MAAA,gBAAA,CAAiB,KAAK,mBAAmB,CAAA,CAAE,2BACzC,sBAAA,IAAA,IAAA,GAAA,SAAA,sBAAA,CAAwB,0BAAA;AAC5B,IAAA;AACA,IAAA,IAAA,CAAI,sBAAA,IAAA,IAAA,GAAA,MAAA,GAAA,sBAAA,CAAwB,+BAA8B,IAAA,EAAM;AAC9D,MAAA,gBAAA,CAAiB,KAAK,mBAAmB,CAAA,CAAE,2BACzC,sBAAA,IAAA,IAAA,GAAA,SAAA,sBAAA,CAAwB,0BAAA;AAC5B,IAAA;AACA,IAAA,IAAA,CAAI,kBAAA,IAAA,IAAA,GAAA,MAAA,GAAA,kBAAA,CAAoB,kBAAiB,IAAA,EAAM;AAC7C,MAAA,gBAAA,CAAiB,KAAK,mBAAmB,CAAA,CAAE,qBACzC,kBAAA,IAAA,IAAA,GAAA,SAAA,kBAAA,CAAoB,aAAA;AACxB,IAAA;AAEA,IAAA,OAAO;AACL,MAAA,IAAA,EAAA,CAAM,EAAA,GAAA,MAAA,CAAO,OAAA,CAAQ,OAAA,KAAf,OAAA,EAAA,GAA0B,MAAA;AAChC,MAAA,SAAA,EAAA,CAAW,EAAA,GAAA,MAAA,CAAO,OAAA,CAAQ,iBAAA,KAAf,OAAA,EAAA,GAAoC,MAAA;MAC/C,SAAA,EAAA,CAAW,EAAA,GAAA,OAAO,OAAA,CAAQ,UAAA,KAAf,OAAA,MAAA,GAAA,EAAA,CAA2B,GAAA,CAAI,CAAA,QAAA,KAAS;AA/SzD,QAAA,IAAAC,GAAAA;AA+S6D,QAAA,OAAA;UACrD,YAAA,EAAc,UAAA;AACd,UAAA,UAAA,EAAA,CAAYA,GAAAA,GAAA,QAAA,CAAS,EAAA,KAAT,IAAA,GAAAA,MAAe,UAAA,EAAW;AACtC,UAAA,QAAA,EAAU,SAAS,QAAA,CAAS,IAAA;AAC5B,UAAA,IAAA,EAAM,SAAS,QAAA,CAAS;AAC1B,SAAA;MAAA,CAAA,CAAA;MACA,YAAA,EAAc,+BAAA,CAAgC,OAAO,aAAa,CAAA;MAClE,KAAA,EAAO;QACL,YAAA,EAAA,CAAc,EAAA,GAAA,CAAA,KAAA,YAAA,CAAa,KAAA,KAAb,OAAA,MAAA,GAAA,EAAA,CAAoB,aAAA,KAApB,IAAA,GAAA,EAAA,GAAqC,GAAA;QACnD,gBAAA,EAAA,CAAkB,EAAA,GAAA,CAAA,KAAA,YAAA,CAAa,KAAA,KAAb,OAAA,MAAA,GAAA,EAAA,CAAoB,iBAAA,KAApB,IAAA,GAAA,EAAA,GAAyC;AAC7D,OAAA;AACA,MAAA,gBAAA;MACA,OAAA,EAAS,EAAE,WAAW,WAAA,EAAY;AAClC,MAAA,WAAA,EAAa,EAAE,OAAA,EAAS,eAAA,EAAiB,IAAA,EAAM,WAAA,EAAY;AAC3D,MAAA,QAAA,EAAU,oBAAoB,YAAY,CAAA;AAC1C,MAAA,QAAA;AACA,MAAA,OAAA,EAAS,EAAE,IAAA;AACb,KAAA;AACF,EAAA;AAEA,EAAA,MAAM,SACJ,OAAA,EAC2D;AArU/D,IAAA,IAAA,EAAA;AAsUI,IAAA,IAAI,IAAA,CAAK,SAAS,iBAAA,EAAmB;AACnC,MAAA,MAAM,MAAA,GAAS,MAAM,IAAA,CAAK,UAAA,CAAW,OAAO,CAAA;AAC5C,MAAA,MAAM,eAAA,GAAkB,IAAI,cAAA,CAA0C;AACpE,QAAA,KAAA,CAAM,UAAA,EAAY;AAChB,UAAA,UAAA,CAAW,QAAQ,EAAE,IAAA,EAAM,qBAAqB,GAAG,MAAA,CAAO,UAAU,CAAA;AACpE,UAAA,IAAI,OAAO,SAAA,EAAW;AACpB,YAAA,IAAI,KAAA,CAAM,OAAA,CAAQ,MAAA,CAAO,SAAS,CAAA,EAAG;AACnC,cAAA,KAAA,MAAW,IAAA,IAAQ,OAAO,SAAA,EAAW;AACnC,gBAAA,IAAI,IAAA,CAAK,SAAS,MAAA,EAAQ;AACxB,kBAAA,UAAA,CAAW,OAAA,CAAQ;oBACjB,IAAA,EAAM,WAAA;AACN,oBAAA,SAAA,EAAW,IAAA,CAAK;mBACjB,CAAA;AACH,gBAAA;AACF,cAAA;YACF,CAAA,MAAO;AACL,cAAA,UAAA,CAAW,OAAA,CAAQ;gBACjB,IAAA,EAAM,WAAA;AACN,gBAAA,SAAA,EAAW,MAAA,CAAO;eACnB,CAAA;AACH,YAAA;AACF,UAAA;AACA,UAAA,IAAI,OAAO,IAAA,EAAM;AACf,YAAA,UAAA,CAAW,OAAA,CAAQ;cACjB,IAAA,EAAM,YAAA;AACN,cAAA,SAAA,EAAW,MAAA,CAAO;aACnB,CAAA;AACH,UAAA;AACA,UAAA,IAAI,OAAO,SAAA,EAAW;AACpB,YAAA,KAAA,MAAW,QAAA,IAAY,OAAO,SAAA,EAAW;AACvC,cAAA,UAAA,CAAW,OAAA,CAAQ;gBACjB,IAAA,EAAM,WAAA;gBACN,GAAG;eACJ,CAAA;AACH,YAAA;AACF,UAAA;AACA,UAAA,UAAA,CAAW,OAAA,CAAQ;YACjB,IAAA,EAAM,QAAA;AACN,YAAA,YAAA,EAAc,MAAA,CAAO,YAAA;AACrB,YAAA,KAAA,EAAO,MAAA,CAAO,KAAA;AACd,YAAA,QAAA,EAAU,MAAA,CAAO,QAAA;AACjB,YAAA,gBAAA,EAAkB,MAAA,CAAO;WAC1B,CAAA;AACD,UAAA,UAAA,CAAW,KAAA,EAAM;AACnB,QAAA;OACD,CAAA;AACD,MAAA,OAAO;QACL,MAAA,EAAQ,eAAA;AACR,QAAA,OAAA,EAAS,MAAA,CAAO,OAAA;AAChB,QAAA,WAAA,EAAa,MAAA,CAAO,WAAA;AACpB,QAAA,QAAA,EAAU,MAAA,CAAO;AACnB,OAAA;AACF,IAAA;AAEA,IAAA,MAAM,EAAE,MAAM,QAAA,EAAS,GAAI,KAAK,OAAA,CAAQ,EAAE,GAAG,OAAA,EAAS,CAAA;AAEtD,IAAA,MAAM,IAAA,GAAO;MACX,GAAG,IAAA;MACH,MAAA,EAAQ,IAAA;;AAGR,MAAA,cAAA,EAAgB,KAAK,MAAA,CAAO,YAAA,GACxB,EAAE,aAAA,EAAe,MAAK,GACtB;AACN,KAAA;AAEA,IAAA,MAAM,iBAAA,GAAA,CACJ,KAAA,IAAA,CAAK,MAAA,CAAO,sBAAZ,IAAA,GAAA,MAAA,GAAA,GAA+B,qBAAA,EAAA;AAEjC,IAAA,MAAM,EAAE,eAAA,EAAiB,KAAA,EAAO,QAAA,EAAS,GAAI,MAAM,aAAA,CAAc;MAC/D,GAAA,EAAK,IAAA,CAAK,OAAO,GAAA,CAAI;QACnB,IAAA,EAAM,mBAAA;AACN,QAAA,OAAA,EAAS,IAAA,CAAK;OACf,CAAA;AACD,MAAA,OAAA,EAAS,eAAe,IAAA,CAAK,MAAA,CAAO,OAAA,EAAQ,EAAG,QAAQ,OAAO,CAAA;AAC9D,MAAA,IAAA;AACA,MAAA,qBAAA,EAAuB,IAAA,CAAK,qBAAA;MAC5B,yBAAA,EAA2B,gCAAA;QACzB,IAAA,CAAK;AACP,OAAA;AACA,MAAA,WAAA,EAAa,OAAA,CAAQ,WAAA;AACrB,MAAA,KAAA,EAAO,KAAK,MAAA,CAAO;KACpB,CAAA;AAED,IAAA,MAAM,EAAE,QAAA,EAAU,SAAA,EAAW,GAAG,aAAY,GAAI,IAAA;AAEhD,IAAA,MAAM,YAQD,EAAC;AAEN,IAAA,IAAI,YAAA,GAA4C,SAAA;AAChD,IAAA,IAAI,KAAA,GAWA;MACF,gBAAA,EAAkB,MAAA;MAClB,uBAAA,EAAyB;QACvB,eAAA,EAAiB,MAAA;QACjB,wBAAA,EAA0B,MAAA;QAC1B,wBAAA,EAA0B;AAC5B,OAAA;MACA,YAAA,EAAc,MAAA;MACd,mBAAA,EAAqB;QACnB,YAAA,EAAc;AAChB;AACF,KAAA;AACA,IAAA,IAAI,YAAA,GAAe,IAAA;AACnB,IAAA,IAAI,sBAAsB,IAAA,CAAK,mBAAA;AAE/B,IAAA,OAAO;AACL,MAAA,MAAA,EAAQ,QAAA,CAAS,WAAA;AACf,QAAA,IAAI,eAAA,CAGF;;AAEA,UAAA,SAAA,CAAU,OAAO,UAAA,EAAY;AAxcvC,YAAA,IAAAA,GAAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,IAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA;AA0cY,YAAA,IAAI,CAAC,MAAM,OAAA,EAAS;AAClB,cAAA,YAAA,GAAe,OAAA;AACf,cAAA,UAAA,CAAW,QAAQ,EAAE,IAAA,EAAM,SAAS,KAAA,EAAO,KAAA,CAAM,OAAO,CAAA;AACxD,cAAA;AACF,YAAA;AACA,YAAA,MAAM,QAAQ,KAAA,CAAM,KAAA;AAEpB,YAAA,iBAAA,IAAA,IAAA,GAAA,MAAA,GAAA,iBAAA,CAAmB,YAAA,CAAa,MAAM,QAAA,CAAA;AAGtC,YAAA,IAAI,WAAW,KAAA,EAAO;AACpB,cAAA,YAAA,GAAe,OAAA;AACf,cAAA,UAAA,CAAW,OAAA,CAAQ,EAAE,IAAA,EAAM,OAAA,EAAS,OAAO,KAAA,CAAM,KAAA,CAAM,SAAS,CAAA;AAChE,cAAA;AACF,YAAA;AAEA,YAAA,IAAI,YAAA,EAAc;AAChB,cAAA,YAAA,GAAe,KAAA;AAEf,cAAA,UAAA,CAAW,OAAA,CAAQ;gBACjB,IAAA,EAAM,mBAAA;AACN,gBAAA,GAAG,oBAAoB,KAAK;eAC7B,CAAA;AACH,YAAA;AAEA,YAAA,IAAI,KAAA,CAAM,SAAS,IAAA,EAAM;AACvB,cAAA,MAAM;AACJ,gBAAA,aAAA;AACA,gBAAA,iBAAA;AACA,gBAAA,qBAAA;AACA,gBAAA;AACF,eAAA,GAAI,KAAA,CAAM,KAAA;AAEV,cAAA,KAAA,CAAM,YAAA,GAAe,aAAA,IAAA,IAAA,GAAA,aAAA,GAAiB,MAAA;AACtC,cAAA,KAAA,CAAM,gBAAA,GAAmB,iBAAA,IAAA,IAAA,GAAA,iBAAA,GAAqB,MAAA;AAE9C,cAAA,IAAA,CAAI,yBAAA,IAAA,IAAA,GAAA,MAAA,GAAA,yBAAA,CAA2B,qBAAoB,IAAA,EAAM;AACvD,gBAAA,KAAA,CAAM,uBAAA,CAAwB,eAAA,GAC5B,yBAAA,IAAA,IAAA,GAAA,SAAA,yBAAA,CAA2B,gBAAA;AAC/B,cAAA;AACA,cAAA,IAAA,CACE,yBAAA,IAAA,IAAA,GAAA,MAAA,GAAA,yBAAA,CAA2B,+BAA8B,IAAA,EACzD;AACA,gBAAA,KAAA,CAAM,uBAAA,CAAwB,wBAAA,GAC5B,yBAAA,IAAA,IAAA,GAAA,SAAA,yBAAA,CAA2B,0BAAA;AAC/B,cAAA;AACA,cAAA,IAAA,CACE,yBAAA,IAAA,IAAA,GAAA,MAAA,GAAA,yBAAA,CAA2B,+BAA8B,IAAA,EACzD;AACA,gBAAA,KAAA,CAAM,uBAAA,CAAwB,wBAAA,GAC5B,yBAAA,IAAA,IAAA,GAAA,SAAA,yBAAA,CAA2B,0BAAA;AAC/B,cAAA;AACA,cAAA,IAAA,CAAI,qBAAA,IAAA,IAAA,GAAA,MAAA,GAAA,qBAAA,CAAuB,kBAAiB,IAAA,EAAM;AAChD,gBAAA,KAAA,CAAM,mBAAA,CAAoB,YAAA,GACxB,qBAAA,IAAA,IAAA,GAAA,SAAA,qBAAA,CAAuB,aAAA;AAC3B,cAAA;AACF,YAAA;AAEA,YAAA,MAAM,MAAA,GAAS,KAAA,CAAM,OAAA,CAAQ,CAAC,CAAA;AAE9B,YAAA,IAAA,CAAI,MAAA,IAAA,IAAA,GAAA,MAAA,GAAA,MAAA,CAAQ,kBAAiB,IAAA,EAAM;AACjC,cAAA,YAAA,GAAe,+BAAA;gBACb,MAAA,CAAO;AACT,eAAA;AACF,YAAA;AAEA,YAAA,IAAA,CAAI,MAAA,IAAA,IAAA,GAAA,MAAA,GAAA,MAAA,CAAQ,UAAS,IAAA,EAAM;AACzB,cAAA;AACF,YAAA;AAEA,YAAA,MAAM,QAAQ,MAAA,CAAO,KAAA;AAGrB,YAAA,IAAI,KAAA,CAAM,qBAAqB,IAAA,EAAM;AACnC,cAAA,UAAA,CAAW,OAAA,CAAQ;gBACjB,IAAA,EAAM,WAAA;AACN,gBAAA,SAAA,EAAW,KAAA,CAAM;eAClB,CAAA;AACH,YAAA;AAEA,YAAA,IAAI,KAAA,CAAM,WAAW,IAAA,EAAM;AACzB,cAAA,UAAA,CAAW,OAAA,CAAQ;gBACjB,IAAA,EAAM,YAAA;AACN,gBAAA,SAAA,EAAW,KAAA,CAAM;eAClB,CAAA;AACH,YAAA;AAEA,YAAA,IAAI,KAAA,CAAM,cAAc,IAAA,EAAM;AAC5B,cAAA,KAAA,MAAW,aAAA,IAAiB,MAAM,UAAA,EAAY;AAC5C,gBAAA,MAAM,QAAQ,aAAA,CAAc,KAAA;AAE5B,gBAAA,IAAI,SAAA,CAAU,KAAK,CAAA,IAAK,IAAA,EAAM;AAC5B,kBAAA,IAAI,aAAA,CAAc,SAAS,UAAA,EAAY;AACrC,oBAAA,MAAM,IAAI,wBAAA,CAAyB;sBACjC,IAAA,EAAM,aAAA;sBACN,OAAA,EAAS,CAAA,yBAAA;qBACV,CAAA;AACH,kBAAA;AAEA,kBAAA,IAAI,aAAA,CAAc,MAAM,IAAA,EAAM;AAC5B,oBAAA,MAAM,IAAI,wBAAA,CAAyB;sBACjC,IAAA,EAAM,aAAA;sBACN,OAAA,EAAS,CAAA,6BAAA;qBACV,CAAA;AACH,kBAAA;AAEA,kBAAA,IAAA,CAAA,CAAIA,MAAA,aAAA,CAAc,QAAA,KAAd,OAAA,MAAA,GAAAA,GAAAA,CAAwB,SAAQ,IAAA,EAAM;AACxC,oBAAA,MAAM,IAAI,wBAAA,CAAyB;sBACjC,IAAA,EAAM,aAAA;sBACN,OAAA,EAAS,CAAA,wCAAA;qBACV,CAAA;AACH,kBAAA;AAEA,kBAAA,SAAA,CAAU,KAAK,CAAA,GAAI;AACjB,oBAAA,EAAA,EAAI,aAAA,CAAc,EAAA;oBAClB,IAAA,EAAM,UAAA;oBACN,QAAA,EAAU;AACR,sBAAA,IAAA,EAAM,cAAc,QAAA,CAAS,IAAA;AAC7B,sBAAA,SAAA,EAAA,CAAW,EAAA,GAAA,aAAA,CAAc,QAAA,CAAS,SAAA,KAAvB,OAAA,EAAA,GAAoC;AACjD,qBAAA;oBACA,WAAA,EAAa;AACf,mBAAA;AAEA,kBAAA,MAAMC,SAAAA,GAAW,UAAU,KAAK,CAAA;AAEhC,kBAAA,IAAA,CAAA,CACE,EAAA,GAAAA,SAAAA,CAAS,QAAA,KAAT,IAAA,GAAA,SAAA,EAAA,CAAmB,IAAA,KAAQ,IAAA,IAAA,CAAA,CAC3B,EAAA,GAAAA,UAAS,QAAA,KAAT,IAAA,GAAA,MAAA,GAAA,EAAA,CAAmB,cAAa,IAAA,EAChC;AAEA,oBAAA,IAAIA,SAAAA,CAAS,QAAA,CAAS,SAAA,CAAU,MAAA,GAAS,CAAA,EAAG;AAC1C,sBAAA,UAAA,CAAW,OAAA,CAAQ;wBACjB,IAAA,EAAM,iBAAA;wBACN,YAAA,EAAc,UAAA;AACd,wBAAA,UAAA,EAAYA,SAAAA,CAAS,EAAA;AACrB,wBAAA,QAAA,EAAUA,UAAS,QAAA,CAAS,IAAA;AAC5B,wBAAA,aAAA,EAAeA,UAAS,QAAA,CAAS;uBAClC,CAAA;AACH,oBAAA;AAIA,oBAAA,IAAI,cAAA,CAAeA,SAAAA,CAAS,QAAA,CAAS,SAAS,CAAA,EAAG;AAC/C,sBAAA,UAAA,CAAW,OAAA,CAAQ;wBACjB,IAAA,EAAM,WAAA;wBACN,YAAA,EAAc,UAAA;AACd,wBAAA,UAAA,EAAA,CAAY,EAAA,GAAAA,SAAAA,CAAS,EAAA,KAAT,IAAA,GAAA,KAAe,UAAA,EAAW;AACtC,wBAAA,QAAA,EAAUA,UAAS,QAAA,CAAS,IAAA;AAC5B,wBAAA,IAAA,EAAMA,UAAS,QAAA,CAAS;uBACzB,CAAA;AACDA,sBAAAA,SAAAA,CAAS,WAAA,GAAc,IAAA;AACzB,oBAAA;AACF,kBAAA;AAEA,kBAAA;AACF,gBAAA;AAGA,gBAAA,MAAM,QAAA,GAAW,UAAU,KAAK,CAAA;AAEhC,gBAAA,IAAI,SAAS,WAAA,EAAa;AACxB,kBAAA;AACF,gBAAA;AAEA,gBAAA,IAAA,CAAA,CAAI,KAAA,aAAA,CAAc,QAAA,KAAd,OAAA,MAAA,GAAA,EAAA,CAAwB,cAAa,IAAA,EAAM;AAC7C,kBAAA,QAAA,CAAS,QAAA,CAAU,SAAA,IAAA,CACjB,EAAA,GAAA,CAAA,EAAA,GAAA,aAAA,CAAc,QAAA,KAAd,IAAA,GAAA,MAAA,GAAA,EAAA,CAAwB,SAAA,KAAxB,IAAA,GAAA,EAAA,GAAqC,EAAA;AACzC,gBAAA;AAGA,gBAAA,UAAA,CAAW,OAAA,CAAQ;kBACjB,IAAA,EAAM,iBAAA;kBACN,YAAA,EAAc,UAAA;AACd,kBAAA,UAAA,EAAY,QAAA,CAAS,EAAA;AACrB,kBAAA,QAAA,EAAU,SAAS,QAAA,CAAS,IAAA;AAC5B,kBAAA,aAAA,EAAA,CAAe,EAAA,GAAA,aAAA,CAAc,QAAA,CAAS,SAAA,KAAvB,OAAA,EAAA,GAAoC;iBACpD,CAAA;AAGD,gBAAA,IAAA,CAAA,CACE,EAAA,GAAA,SAAS,QAAA,KAAT,IAAA,GAAA,SAAA,EAAA,CAAmB,IAAA,KAAQ,UAC3B,EAAA,GAAA,QAAA,CAAS,aAAT,IAAA,GAAA,MAAA,GAAA,GAAmB,SAAA,KAAa,IAAA,IAChC,eAAe,QAAA,CAAS,QAAA,CAAS,SAAS,CAAA,EAC1C;AACA,kBAAA,UAAA,CAAW,OAAA,CAAQ;oBACjB,IAAA,EAAM,WAAA;oBACN,YAAA,EAAc,UAAA;AACd,oBAAA,UAAA,EAAA,CAAY,EAAA,GAAA,QAAA,CAAS,EAAA,KAAT,IAAA,GAAA,KAAe,UAAA,EAAW;AACtC,oBAAA,QAAA,EAAU,SAAS,QAAA,CAAS,IAAA;AAC5B,oBAAA,IAAA,EAAM,SAAS,QAAA,CAAS;mBACzB,CAAA;AACD,kBAAA,QAAA,CAAS,WAAA,GAAc,IAAA;AACzB,gBAAA;AACF,cAAA;AACF,YAAA;AACF,UAAA,CAAA;AAEA,UAAA,KAAA,CAAM,UAAA,EAAY;AA/oB5B,YAAA,IAAAD,GAAAA,EAAA,EAAA;AAgpBY,YAAA,MAAM,gBAAA,GAAoD;cACxD,CAAC,mBAAmB,GAAG,EAAC;AACxB,cAAA,GAAG,iBAAA,IAAA,IAAA,GAAA,MAAA,GAAA,iBAAA,CAAmB,aAAA;AACxB,aAAA;AACA,YAAA,IAAI,KAAA,CAAM,uBAAA,CAAwB,eAAA,IAAmB,IAAA,EAAM;AACzD,cAAA,gBAAA,CAAiB,mBAAmB,CAAA,CAAE,eAAA,GACpC,KAAA,CAAM,uBAAA,CAAwB,eAAA;AAClC,YAAA;AACA,YAAA,IACE,KAAA,CAAM,uBAAA,CAAwB,wBAAA,IAA4B,IAAA,EAC1D;AACA,cAAA,gBAAA,CAAiB,mBAAmB,CAAA,CAAE,wBAAA,GACpC,KAAA,CAAM,uBAAA,CAAwB,wBAAA;AAClC,YAAA;AACA,YAAA,IACE,KAAA,CAAM,uBAAA,CAAwB,wBAAA,IAA4B,IAAA,EAC1D;AACA,cAAA,gBAAA,CAAiB,mBAAmB,CAAA,CAAE,wBAAA,GACpC,KAAA,CAAM,uBAAA,CAAwB,wBAAA;AAClC,YAAA;AACA,YAAA,IAAI,KAAA,CAAM,mBAAA,CAAoB,YAAA,IAAgB,IAAA,EAAM;AAClD,cAAA,gBAAA,CAAiB,mBAAmB,CAAA,CAAE,kBAAA,GACpC,KAAA,CAAM,mBAAA,CAAoB,YAAA;AAC9B,YAAA;AAEA,YAAA,UAAA,CAAW,OAAA,CAAQ;cACjB,IAAA,EAAM,QAAA;AACN,cAAA,YAAA;cACA,KAAA,EAAO;AACL,gBAAA,YAAA,EAAA,CAAcA,GAAAA,GAAA,KAAA,CAAM,YAAA,KAAN,IAAA,GAAAA,GAAAA,GAAsB,GAAA;AACpC,gBAAA,gBAAA,EAAA,CAAkB,EAAA,GAAA,KAAA,CAAM,gBAAA,KAAN,IAAA,GAAA,EAAA,GAA0B;AAC9C,eAAA;AACA,cAAA;aACD,CAAA;AACH,UAAA;SACD;AACH,OAAA;MACA,OAAA,EAAS,EAAE,WAAW,WAAA,EAAY;MAClC,WAAA,EAAa,EAAE,SAAS,eAAA,EAAgB;AACxC,MAAA,QAAA;AACA,MAAA,OAAA,EAAS,EAAE,IAAA,EAAM,IAAA,CAAK,SAAA,CAAU,IAAI,CAAA;AACtC,KAAA;AACF,EAAA;AACF,CAAA;AAEA,IAAM,gCAAA,GAAmCE,EACtC,MAAA,CAAO;EACN,aAAA,EAAeA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;EAClC,iBAAA,EAAmBA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;AACtC,EAAA,qBAAA,EAAuBA,EACpB,MAAA,CAAO;IACN,aAAA,EAAeA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA;AAC5B,GAAC,EACA,OAAA,EAAQ;AACX,EAAA,yBAAA,EAA2BA,EACxB,MAAA,CAAO;IACN,gBAAA,EAAkBA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;IACrC,0BAAA,EAA4BA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;IAC/C,0BAAA,EAA4BA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA;AACzC,GAAC,EACA,OAAA;AACL,CAAC,EACA,OAAA,EAAQ;AAIX,IAAM,kCAAA,GAAqCA,EAAE,MAAA,CAAO;EAClD,EAAA,EAAIA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;EACvB,OAAA,EAASA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;EAC5B,KAAA,EAAOA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;AAC1B,EAAA,OAAA,EAASA,CAAAA,CAAE,KAAA;AACTA,IAAAA,CAAAA,CAAE,MAAA,CAAO;AACP,MAAA,OAAA,EAASA,EAAE,MAAA,CAAO;AAChB,QAAA,IAAA,EAAMA,CAAAA,CAAE,OAAA,CAAQ,WAAW,CAAA,CAAE,OAAA,EAAQ;QACrC,OAAA,EAASA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;QAC5B,iBAAA,EAAmBA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;AACtC,QAAA,UAAA,EAAYA,CAAAA,CACT,KAAA;AACCA,UAAAA,CAAAA,CAAE,MAAA,CAAO;YACP,EAAA,EAAIA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;YACvB,IAAA,EAAMA,CAAAA,CAAE,QAAQ,UAAU,CAAA;AAC1B,YAAA,QAAA,EAAUA,EAAE,MAAA,CAAO;AACjB,cAAA,IAAA,EAAMA,EAAE,MAAA,EAAO;AACf,cAAA,SAAA,EAAWA,EAAE,MAAA;aACd;WACF;AACH,SAAA,CACC,OAAA;OACJ,CAAA;MACD,aAAA,EAAeA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA;KAC3B;AACH,GAAA;EACA,KAAA,EAAO;AACT,CAAC,CAAA;AAID,IAAM,qCAAA,GAAwC,CAC5C,WAAA,KAEAA,CAAAA,CAAE,KAAA,CAAM;AACNA,EAAAA,CAAAA,CAAE,MAAA,CAAO;IACP,EAAA,EAAIA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;IACvB,OAAA,EAASA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;IAC5B,KAAA,EAAOA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;AAC1B,IAAA,OAAA,EAASA,CAAAA,CAAE,KAAA;AACTA,MAAAA,CAAAA,CAAE,MAAA,CAAO;AACP,QAAA,KAAA,EAAOA,EACJ,MAAA,CAAO;AACN,UAAA,IAAA,EAAMA,EAAE,IAAA,CAAK,CAAC,WAAW,CAAC,EAAE,OAAA,EAAQ;UACpC,OAAA,EAASA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;UAC5B,iBAAA,EAAmBA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;AACtC,UAAA,UAAA,EAAYA,CAAAA,CACT,KAAA;AACCA,YAAAA,CAAAA,CAAE,MAAA,CAAO;cACP,KAAA,EAAOA,CAAAA,CAAE,MAAA,EAAO,CAAE,QAAA,EAAS;cAC3B,EAAA,EAAIA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;AACvB,cAAA,IAAA,EAAMA,CAAAA,CAAE,OAAA,CAAQ,UAAU,CAAA,CAAE,OAAA,EAAQ;AACpC,cAAA,QAAA,EAAUA,EAAE,MAAA,CAAO;gBACjB,IAAA,EAAMA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;gBACzB,SAAA,EAAWA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA;eACvB;aACF;AACH,WAAA,CACC,OAAA;AACL,SAAC,EACA,OAAA,EAAQ;QACX,aAAA,EAAeA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA;OAC3B;AACH,KAAA;IACA,KAAA,EAAO;GACR,CAAA;AACD,EAAA;AACF,CAAC,CAAA;ACtc8CA,EAAE,MAAA,CAAO;EACxD,EAAA,EAAIA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;EACvB,OAAA,EAASA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;EAC5B,KAAA,EAAOA,CAAAA,CAAE,MAAA,EAAO,CAAE,OAAA,EAAQ;AAC1B,EAAA,OAAA,EAASA,CAAAA,CAAE,KAAA;AACTA,IAAAA,CAAAA,CAAE,MAAA,CAAO;AACP,MAAA,IAAA,EAAMA,EAAE,MAAA,EAAO;AACf,MAAA,aAAA,EAAeA,EAAE,MAAA;KAClB;AACH,GAAA;AACA,EAAA,KAAA,EAAOA,EACJ,MAAA,CAAO;AACN,IAAA,aAAA,EAAeA,EAAE,MAAA,EAAO;AACxB,IAAA,iBAAA,EAAmBA,EAAE,MAAA;AACvB,GAAC,EACA,OAAA;AACL,CAAC;ACtOyCA,EAAE,MAAA,CAAO;AACjD,EAAA,IAAA,EAAMA,CAAAA,CAAE,KAAA,CAAMA,CAAAA,CAAE,MAAA,CAAO,EAAE,SAAA,EAAWA,CAAAA,CAAE,KAAA,CAAMA,CAAAA,CAAE,MAAA,EAAQ,CAAA,EAAG,CAAC,CAAA;EAC1D,KAAA,EAAOA,CAAAA,CAAE,OAAO,EAAE,aAAA,EAAeA,EAAE,MAAA,EAAO,EAAG,CAAA,CAAE,OAAA;AACjD,CAAC;ACjGM,IAAM,6BAAN,MAAyD;EAW9D,WAAA,CACW,OAAA,EACQ,UACA,MAAA,EACjB;AAHS,IAAA,IAAA,CAAA,OAAA,GAAA,OAAA;AACQ,IAAA,IAAA,CAAA,QAAA,GAAA,QAAA;AACA,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AAbnB,IAAA,IAAA,CAAS,oBAAA,GAAuB,IAAA;AAc7B,EAAA;AAZH,EAAA,IAAI,gBAAA,GAA2B;AA9BjC,IAAA,IAAA,EAAA;AA+BI,IAAA,OAAA,CAAO,EAAA,GAAA,IAAA,CAAK,QAAA,CAAS,gBAAA,KAAd,OAAA,EAAA,GAAkC,EAAA;AAC3C,EAAA;AAEA,EAAA,IAAI,QAAA,GAAmB;AACrB,IAAA,OAAO,KAAK,MAAA,CAAO,QAAA;AACrB,EAAA;AAQA,EAAA,MAAM,UAAA,CAAW;AACf,IAAA,MAAA;AACA,IAAA,CAAA;AACA,IAAA,IAAA;AACA,IAAA,WAAA;AACA,IAAA,IAAA;AACA,IAAA,eAAA;AACA,IAAA,OAAA;AACA,IAAA;GACF,EAEE;AAvDJ,IAAA,IAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA,EAAA;AAwDI,IAAA,MAAM,WAA2C,EAAC;AAElD,IAAA,IAAI,eAAe,IAAA,EAAM;AACvB,MAAA,QAAA,CAAS,IAAA,CAAK;QACZ,IAAA,EAAM,qBAAA;QACN,OAAA,EAAS,aAAA;QACT,OAAA,EACE;OACH,CAAA;AACH,IAAA;AAEA,IAAA,IAAI,QAAQ,IAAA,EAAM;AAChB,MAAA,QAAA,CAAS,KAAK,EAAE,IAAA,EAAM,qBAAA,EAAuB,OAAA,EAAS,QAAQ,CAAA;AAChE,IAAA;AAEA,IAAA,MAAM,WAAA,GAAA,CAAc,MAAA,EAAA,GAAA,CAAA,EAAA,GAAA,KAAK,MAAA,CAAO,SAAA,KAAZ,OAAA,MAAA,GAAA,EAAA,CAAuB,gBAAvB,IAAA,GAAA,MAAA,GAAA,GAAA,IAAA,CAAA,EAAA,MAAA,IAAA,GAAA,EAAA,uBAA8C,IAAA,EAAK;AACvE,IAAA,MAAM,EAAE,KAAA,EAAO,QAAA,EAAU,eAAA,EAAgB,GAAI,MAAMC,aAAAA,CAAc;MAC/D,GAAA,EAAK,IAAA,CAAK,OAAO,GAAA,CAAI;QACnB,IAAA,EAAM,qBAAA;AACN,QAAA,OAAA,EAAS,IAAA,CAAK;OACf,CAAA;AACD,MAAA,OAAA,EAASC,cAAAA,CAAe,IAAA,CAAK,MAAA,CAAO,OAAA,IAAW,OAAO,CAAA;MACtD,IAAA,EAAM;AACJ,QAAA,KAAA,EAAO,IAAA,CAAK,OAAA;AACZ,QAAA,MAAA;AACA,QAAA,CAAA;AACA,QAAA,IAAA;AACA,QAAA,GAAA,CAAI,EAAA,GAAA,eAAA,CAAgB,MAAA,KAAhB,IAAA,GAAA,KAA0B,EAAC;QAC/B,eAAA,EAAiB,UAAA;QACjB,GAAI,IAAA,CAAK,SAAS,IAAA,GAAO,EAAE,MAAM,IAAA,CAAK,QAAA,CAAS,IAAA,EAAK,GAAI;AAC1D,OAAA;MACA,qBAAA,EAAuBC,8BAAAA;AACrB,QAAA,CAAA,EAAA,GAAA,IAAA,CAAK,MAAA,CAAO,cAAA,KAAZ,IAAA,GAAA,EAAA,GAA8B;AAChC,OAAA;MACA,yBAAA,EAA2BC,yBAAAA;AACzB,QAAA;AACF,OAAA;AACA,MAAA,WAAA;AACA,MAAA,KAAA,EAAO,KAAK,MAAA,CAAO;KACpB,CAAA;AAED,IAAA,OAAO;AACL,MAAA,MAAA,EAAQ,SAAS,IAAA,CAAK,GAAA,CAAI,CAAA,IAAA,KAAQ,KAAK,QAAQ,CAAA;AAC/C,MAAA,QAAA;MACA,QAAA,EAAU;QACR,SAAA,EAAW,WAAA;AACX,QAAA,OAAA,EAAS,IAAA,CAAK,OAAA;QACd,OAAA,EAAS;AACX;AACF,KAAA;AACF,EAAA;AACF,CAAA;AAIA,IAAM,mCAAA,GAAsCJ,EAAE,MAAA,CAAO;EACnD,IAAA,EAAMA,CAAAA,CAAE,KAAA,CAAMA,CAAAA,CAAE,MAAA,CAAO,EAAE,UAAUA,CAAAA,CAAE,MAAA,EAAO,EAAG,CAAC;AAClD,CAAC,CAAA;ACnFM,SAAS,0BAA0B,OAAA,EAAyB;AACjE,EAAA,OAAO;AACL,IAAA,QAAA;AACA,IAAA,aAAA;AACA,IAAA,eAAA;AACA,IAAA,aAAA;AACA,IAAA,kBAAA;AACA,IAAA,oBAAA;AACA,IAAA,aAAA;AACA,IAAA,kBAAA;AACA,IAAA,oBAAA;AACA,IAAA,kBAAA;AACA,IAAA,uBAAA;AACA,IAAA,yBAAA;AACA,IAAA,aAAA;AACA,IAAA;AACF,GAAA,CAAE,SAAS,OAAO,CAAA;AACpB;AC5CO,IAAM,cAAA,GAAiBA,EAAE,MAAA,CAAO;AACrC,EAAA,IAAA,EAAMA,EAAE,MAAA,EAAO;AACf,EAAA,KAAA,EAAOA,EAAE,MAAA;AACX,CAAC,CAAA;ACmBD,IAAM,iBAAA,GAA0D;EAC9D,WAAA,EAAa,cAAA;EACb,cAAA,EAAgB,CAAA,SAAQ,IAAA,CAAK;AAC/B,CAAA;AA6DO,SAAS,SAAA,CAAU,OAAA,GAA+B,EAAC,EAAgB;AAzF1E,EAAA,IAAA,EAAA;AA0FE,EAAA,MAAM,OAAA,GAAU,oBAAA;KACd,EAAA,GAAA,OAAA,CAAQ,OAAA,KAAR,IAAA,GAAA,EAAA,GAAmB;AACrB,GAAA;AACA,EAAA,MAAM,aAAa,OAAO;AACxB,IAAA,aAAA,EAAe,UAAU,UAAA,CAAW;AAClC,MAAA,MAAA,EAAQ,OAAA,CAAQ,MAAA;MAChB,uBAAA,EAAyB,aAAA;MACzB,WAAA,EAAa;AACf,KAAC,CAAC,CAAA,CAAA;AACF,IAAA,GAAG,OAAA,CAAQ;AACb,GAAA,CAAA;AAEA,EAAA,MAAM,mBAAA,GAAsB,CAC1B,OAAA,EACA,QAAA,GAA4B,EAAC,KAC1B;AACH,IAAA,MAAM,iBAAA,GAAoB,0BAA0B,OAAO,CAAA;AAC3D,IAAA,OAAO,IAAI,iCAAA,CAAkC,OAAA,EAAS,QAAA,EAAU;MAC9D,QAAA,EAAU,UAAA;AACV,MAAA,GAAA,EAAK,CAAC,EAAE,IAAA,OAAW,CAAA,EAAG,OAAO,GAAG,IAAI,CAAA,CAAA;MACpC,OAAA,EAAS,UAAA;AACT,MAAA,KAAA,EAAO,OAAA,CAAQ,KAAA;AACf,MAAA,2BAAA,EAA6B,oBAAoB,MAAA,GAAS,MAAA;MAC1D,cAAA,EAAgB,iBAAA;MAChB,yBAAA,EAA2B,iBAAA;MAC3B,YAAA,EAAc;KACf,CAAA;AACH,EAAA,CAAA;AAEA,EAAA,MAAM,gBAAA,GAAmB,CACvB,OAAA,EACA,QAAA,GAA6B,EAAC,KAC3B;AACH,IAAA,OAAO,IAAI,0BAAA,CAA2B,OAAA,EAAS,QAAA,EAAU;MACvD,QAAA,EAAU,WAAA;AACV,MAAA,GAAA,EAAK,CAAC,EAAE,IAAA,OAAW,CAAA,EAAG,OAAO,GAAG,IAAI,CAAA,CAAA;MACpC,OAAA,EAAS,UAAA;AACT,MAAA,KAAA,EAAO,OAAA,CAAQ,KAAA;MACf,cAAA,EAAgB;KACjB,CAAA;AACH,EAAA,CAAA;AAEA,EAAA,MAAM,WAAW,CAAC,OAAA,EAAyB,QAAA,KACzC,mBAAA,CAAoB,SAAS,QAAQ,CAAA;AAEvC,EAAA,QAAA,CAAS,aAAA,GAAgB,mBAAA;AACzB,EAAA,QAAA,CAAS,IAAA,GAAO,mBAAA;AAChB,EAAA,QAAA,CAAS,kBAAA,GAAqB,CAAC,OAAA,KAAoB;AACjD,IAAA,MAAM,IAAI,gBAAA,CAAiB,EAAE,OAAA,EAAS,SAAA,EAAW,sBAAsB,CAAA;AACzE,EAAA,CAAA;AACA,EAAA,QAAA,CAAS,UAAA,GAAa,gBAAA;AACtB,EAAA,QAAA,CAAS,KAAA,GAAQ,gBAAA;AAEjB,EAAA,OAAO,QAAA;AACT;AAEO,IAAM,MAAM,SAAA","file":"dist-ZOZ6MHJH.js","sourcesContent":["import {\n LanguageModelV1Prompt,\n LanguageModelV1ProviderMetadata,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertUint8ArrayToBase64 } from '@ai-sdk/provider-utils';\nimport { OpenAICompatibleChatPrompt } from './openai-compatible-api-types';\n\nfunction getOpenAIMetadata(message: {\n providerMetadata?: LanguageModelV1ProviderMetadata;\n}) {\n return message?.providerMetadata?.openaiCompatible ?? {};\n}\n\nexport function convertToOpenAICompatibleChatMessages(\n prompt: LanguageModelV1Prompt,\n): OpenAICompatibleChatPrompt {\n const messages: OpenAICompatibleChatPrompt = [];\n for (const { role, content, ...message } of prompt) {\n const metadata = getOpenAIMetadata({ ...message });\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content, ...metadata });\n break;\n }\n\n case 'user': {\n if (content.length === 1 && content[0].type === 'text') {\n messages.push({\n role: 'user',\n content: content[0].text,\n ...getOpenAIMetadata(content[0]),\n });\n break;\n }\n\n messages.push({\n role: 'user',\n content: content.map(part => {\n const partMetadata = getOpenAIMetadata(part);\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text, ...partMetadata };\n }\n case 'image': {\n return {\n type: 'image_url',\n image_url: {\n url:\n part.image instanceof URL\n ? part.image.toString()\n : `data:${\n part.mimeType ?? 'image/jpeg'\n };base64,${convertUint8ArrayToBase64(part.image)}`,\n },\n ...partMetadata,\n };\n }\n case 'file': {\n throw new UnsupportedFunctionalityError({\n functionality: 'File content parts in user messages',\n });\n }\n }\n }),\n ...metadata,\n });\n\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n const partMetadata = getOpenAIMetadata(part);\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n },\n ...partMetadata,\n });\n break;\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n ...metadata,\n });\n\n break;\n }\n\n case 'tool': {\n for (const toolResponse of content) {\n const toolResponseMetadata = getOpenAIMetadata(toolResponse);\n messages.push({\n role: 'tool',\n tool_call_id: toolResponse.toolCallId,\n content: JSON.stringify(toolResponse.result),\n ...toolResponseMetadata,\n });\n }\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV1FinishReason } from '@ai-sdk/provider';\n\nexport function mapOpenAICompatibleFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV1FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n return 'length';\n case 'content_filter':\n return 'content-filter';\n case 'function_call':\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z, ZodSchema } from 'zod';\n\nexport const openaiCompatibleErrorDataSchema = z.object({\n error: z.object({\n message: z.string(),\n\n // The additional information below is handled loosely to support\n // OpenAI-compatible providers that have slightly different error\n // responses:\n type: z.string().nullish(),\n param: z.any().nullish(),\n code: z.union([z.string(), z.number()]).nullish(),\n }),\n});\n\nexport type OpenAICompatibleErrorData = z.infer<\n typeof openaiCompatibleErrorDataSchema\n>;\n\nexport type ProviderErrorStructure<T> = {\n errorSchema: ZodSchema<T>;\n errorToMessage: (error: T) => string;\n isRetryable?: (response: Response, error?: T) => boolean;\n};\n\nexport const defaultOpenAICompatibleErrorStructure: ProviderErrorStructure<OpenAICompatibleErrorData> =\n {\n errorSchema: openaiCompatibleErrorDataSchema,\n errorToMessage: data => data.error.message,\n };\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function prepareTools({\n mode,\n structuredOutputs,\n}: {\n mode: Parameters<LanguageModelV1['doGenerate']>[0]['mode'] & {\n type: 'regular';\n };\n structuredOutputs: boolean;\n}): {\n tools:\n | undefined\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>;\n tool_choice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined;\n toolWarnings: LanguageModelV1CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n const tools = mode.tools?.length ? mode.tools : undefined;\n const toolWarnings: LanguageModelV1CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, tool_choice: undefined, toolWarnings };\n }\n\n const toolChoice = mode.toolChoice;\n\n const openaiCompatTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n openaiCompatTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: openaiCompatTools, tool_choice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n return { tools: openaiCompatTools, tool_choice: type, toolWarnings };\n case 'tool':\n return {\n tools: openaiCompatTools,\n tool_choice: {\n type: 'function',\n function: {\n name: toolChoice.toolName,\n },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1ObjectGenerationMode,\n LanguageModelV1ProviderMetadata,\n LanguageModelV1StreamPart,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n generateId,\n isParsableJson,\n ParseResult,\n postJsonToApi,\n ResponseHandler,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAICompatibleChatMessages } from './convert-to-openai-compatible-chat-messages';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompatibleFinishReason } from './map-openai-compatible-finish-reason';\nimport {\n OpenAICompatibleChatModelId,\n OpenAICompatibleChatSettings,\n} from './openai-compatible-chat-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\nimport { prepareTools } from './openai-compatible-prepare-tools';\nimport { MetadataExtractor } from './openai-compatible-metadata-extractor';\n\nexport type OpenAICompatibleChatConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n includeUsage?: boolean;\n errorStructure?: ProviderErrorStructure<any>;\n metadataExtractor?: MetadataExtractor;\n\n /**\nDefault object generation mode that should be used with this model when\nno mode is specified. Should be the mode with the best results for this\nmodel. `undefined` can be specified if object generation is not supported.\n */\n defaultObjectGenerationMode?: LanguageModelV1ObjectGenerationMode;\n\n /**\n * Whether the model supports structured outputs.\n */\n supportsStructuredOutputs?: boolean;\n};\n\nexport class OpenAICompatibleChatLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n\n readonly supportsStructuredOutputs: boolean;\n\n readonly modelId: OpenAICompatibleChatModelId;\n readonly settings: OpenAICompatibleChatSettings;\n\n private readonly config: OpenAICompatibleChatConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(\n modelId: OpenAICompatibleChatModelId,\n settings: OpenAICompatibleChatSettings,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n\n // initialize error handling:\n const errorStructure =\n config.errorStructure ?? defaultOpenAICompatibleErrorStructure;\n this.chunkSchema = createOpenAICompatibleChatChunkSchema(\n errorStructure.errorSchema,\n );\n this.failedResponseHandler = createJsonErrorResponseHandler(errorStructure);\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? false;\n }\n\n get defaultObjectGenerationMode(): 'json' | 'tool' | undefined {\n return this.config.defaultObjectGenerationMode;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n private getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n providerMetadata,\n stopSequences,\n responseFormat,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n user: this.settings.user,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : undefined,\n\n stop: stopSequences,\n seed,\n ...providerMetadata?.[this.providerOptionsName],\n\n reasoning_effort:\n providerMetadata?.[this.providerOptionsName]?.reasoningEffort ??\n providerMetadata?.['openai-compatible']?.reasoningEffort,\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n };\n\n switch (type) {\n case 'regular': {\n const { tools, tool_choice, toolWarnings } = prepareTools({\n mode,\n structuredOutputs: this.supportsStructuredOutputs,\n });\n\n return {\n args: { ...baseArgs, tools, tool_choice },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n case 'object-json': {\n return {\n args: {\n ...baseArgs,\n response_format:\n this.supportsStructuredOutputs === true && mode.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: mode.schema,\n name: mode.name ?? 'response',\n description: mode.description,\n },\n }\n : { type: 'json_object' },\n },\n warnings,\n };\n }\n\n case 'object-tool': {\n return {\n args: {\n ...baseArgs,\n tool_choice: {\n type: 'function',\n function: { name: mode.tool.name },\n },\n tools: [\n {\n type: 'function',\n function: {\n name: mode.tool.name,\n description: mode.tool.description,\n parameters: mode.tool.parameters,\n },\n },\n ],\n },\n warnings,\n };\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = this.getArgs({ ...options });\n\n const body = JSON.stringify(args);\n\n const {\n responseHeaders,\n value: responseBody,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n OpenAICompatibleChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n const choice = responseBody.choices[0];\n\n // provider metadata:\n const providerMetadata: LanguageModelV1ProviderMetadata = {\n [this.providerOptionsName]: {},\n ...this.config.metadataExtractor?.extractMetadata?.({\n parsedBody: rawResponse,\n }),\n };\n const completionTokenDetails =\n responseBody.usage?.completion_tokens_details;\n const promptTokenDetails = responseBody.usage?.prompt_tokens_details;\n if (completionTokenDetails?.reasoning_tokens != null) {\n providerMetadata[this.providerOptionsName].reasoningTokens =\n completionTokenDetails?.reasoning_tokens;\n }\n if (completionTokenDetails?.accepted_prediction_tokens != null) {\n providerMetadata[this.providerOptionsName].acceptedPredictionTokens =\n completionTokenDetails?.accepted_prediction_tokens;\n }\n if (completionTokenDetails?.rejected_prediction_tokens != null) {\n providerMetadata[this.providerOptionsName].rejectedPredictionTokens =\n completionTokenDetails?.rejected_prediction_tokens;\n }\n if (promptTokenDetails?.cached_tokens != null) {\n providerMetadata[this.providerOptionsName].cachedPromptTokens =\n promptTokenDetails?.cached_tokens;\n }\n\n return {\n text: choice.message.content ?? undefined,\n reasoning: choice.message.reasoning_content ?? undefined,\n toolCalls: choice.message.tool_calls?.map(toolCall => ({\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n })),\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: {\n promptTokens: responseBody.usage?.prompt_tokens ?? NaN,\n completionTokens: responseBody.usage?.completion_tokens ?? NaN,\n },\n providerMetadata,\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders, body: rawResponse },\n response: getResponseMetadata(responseBody),\n warnings,\n request: { body },\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n if (this.settings.simulateStreaming) {\n const result = await this.doGenerate(options);\n const simulatedStream = new ReadableStream<LanguageModelV1StreamPart>({\n start(controller) {\n controller.enqueue({ type: 'response-metadata', ...result.response });\n if (result.reasoning) {\n if (Array.isArray(result.reasoning)) {\n for (const part of result.reasoning) {\n if (part.type === 'text') {\n controller.enqueue({\n type: 'reasoning',\n textDelta: part.text,\n });\n }\n }\n } else {\n controller.enqueue({\n type: 'reasoning',\n textDelta: result.reasoning,\n });\n }\n }\n if (result.text) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: result.text,\n });\n }\n if (result.toolCalls) {\n for (const toolCall of result.toolCalls) {\n controller.enqueue({\n type: 'tool-call',\n ...toolCall,\n });\n }\n }\n controller.enqueue({\n type: 'finish',\n finishReason: result.finishReason,\n usage: result.usage,\n logprobs: result.logprobs,\n providerMetadata: result.providerMetadata,\n });\n controller.close();\n },\n });\n return {\n stream: simulatedStream,\n rawCall: result.rawCall,\n rawResponse: result.rawResponse,\n warnings: result.warnings,\n };\n }\n\n const { args, warnings } = this.getArgs({ ...options });\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n };\n\n const metadataExtractor =\n this.config.metadataExtractor?.createStreamExtractor();\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: {\n name: string;\n arguments: string;\n };\n hasFinished: boolean;\n }> = [];\n\n let finishReason: LanguageModelV1FinishReason = 'unknown';\n let usage: {\n completionTokens: number | undefined;\n completionTokensDetails: {\n reasoningTokens: number | undefined;\n acceptedPredictionTokens: number | undefined;\n rejectedPredictionTokens: number | undefined;\n };\n promptTokens: number | undefined;\n promptTokensDetails: {\n cachedTokens: number | undefined;\n };\n } = {\n completionTokens: undefined,\n completionTokensDetails: {\n reasoningTokens: undefined,\n acceptedPredictionTokens: undefined,\n rejectedPredictionTokens: undefined,\n },\n promptTokens: undefined,\n promptTokensDetails: {\n cachedTokens: undefined,\n },\n };\n let isFirstChunk = true;\n let providerOptionsName = this.providerOptionsName;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV1StreamPart\n >({\n // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n const value = chunk.value;\n\n metadataExtractor?.processChunk(chunk.rawValue);\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error.message });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n const {\n prompt_tokens,\n completion_tokens,\n prompt_tokens_details,\n completion_tokens_details,\n } = value.usage;\n\n usage.promptTokens = prompt_tokens ?? undefined;\n usage.completionTokens = completion_tokens ?? undefined;\n\n if (completion_tokens_details?.reasoning_tokens != null) {\n usage.completionTokensDetails.reasoningTokens =\n completion_tokens_details?.reasoning_tokens;\n }\n if (\n completion_tokens_details?.accepted_prediction_tokens != null\n ) {\n usage.completionTokensDetails.acceptedPredictionTokens =\n completion_tokens_details?.accepted_prediction_tokens;\n }\n if (\n completion_tokens_details?.rejected_prediction_tokens != null\n ) {\n usage.completionTokensDetails.rejectedPredictionTokens =\n completion_tokens_details?.rejected_prediction_tokens;\n }\n if (prompt_tokens_details?.cached_tokens != null) {\n usage.promptTokensDetails.cachedTokens =\n prompt_tokens_details?.cached_tokens;\n }\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n // enqueue reasoning before text deltas:\n if (delta.reasoning_content != null) {\n controller.enqueue({\n type: 'reasoning',\n textDelta: delta.reasoning_content,\n });\n }\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: delta.content,\n });\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index;\n\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n });\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n });\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n });\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n };\n\n const toolCall = toolCalls[index];\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n });\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n\n continue;\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index];\n\n if (toolCall.hasFinished) {\n continue;\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? '';\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCallDelta.function.arguments ?? '',\n });\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n }\n },\n\n flush(controller) {\n const providerMetadata: LanguageModelV1ProviderMetadata = {\n [providerOptionsName]: {},\n ...metadataExtractor?.buildMetadata(),\n };\n if (usage.completionTokensDetails.reasoningTokens != null) {\n providerMetadata[providerOptionsName].reasoningTokens =\n usage.completionTokensDetails.reasoningTokens;\n }\n if (\n usage.completionTokensDetails.acceptedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].acceptedPredictionTokens =\n usage.completionTokensDetails.acceptedPredictionTokens;\n }\n if (\n usage.completionTokensDetails.rejectedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].rejectedPredictionTokens =\n usage.completionTokensDetails.rejectedPredictionTokens;\n }\n if (usage.promptTokensDetails.cachedTokens != null) {\n providerMetadata[providerOptionsName].cachedPromptTokens =\n usage.promptTokensDetails.cachedTokens;\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: {\n promptTokens: usage.promptTokens ?? NaN,\n completionTokens: usage.completionTokens ?? NaN,\n },\n providerMetadata,\n });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings,\n request: { body: JSON.stringify(body) },\n };\n }\n}\n\nconst openaiCompatibleTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish();\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst OpenAICompatibleChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleChatChunkSchema = <ERROR_SCHEMA extends z.ZodType>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number().optional(),\n id: z.string().nullish(),\n type: z.literal('function').nullish(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n }),\n errorSchema,\n ]);\n","import {\n APICallError,\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1StreamPart,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n ParseResult,\n postJsonToApi,\n ResponseHandler,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAICompatibleCompletionPrompt } from './convert-to-openai-compatible-completion-prompt';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompatibleFinishReason } from './map-openai-compatible-finish-reason';\nimport {\n OpenAICompatibleCompletionModelId,\n OpenAICompatibleCompletionSettings,\n} from './openai-compatible-completion-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\n\ntype OpenAICompatibleCompletionConfig = {\n provider: string;\n includeUsage?: boolean;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n};\n\nexport class OpenAICompatibleCompletionLanguageModel\n implements LanguageModelV1\n{\n readonly specificationVersion = 'v1';\n readonly defaultObjectGenerationMode = undefined;\n\n readonly modelId: OpenAICompatibleCompletionModelId;\n readonly settings: OpenAICompatibleCompletionSettings;\n\n private readonly config: OpenAICompatibleCompletionConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(\n modelId: OpenAICompatibleCompletionModelId,\n settings: OpenAICompatibleCompletionSettings,\n config: OpenAICompatibleCompletionConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n\n // initialize error handling:\n const errorStructure =\n config.errorStructure ?? defaultOpenAICompatibleErrorStructure;\n this.chunkSchema = createOpenAICompatibleCompletionChunkSchema(\n errorStructure.errorSchema,\n );\n this.failedResponseHandler = createJsonErrorResponseHandler(errorStructure);\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n private getArgs({\n mode,\n inputFormat,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences: userStopSequences,\n responseFormat,\n seed,\n providerMetadata,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (responseFormat != null && responseFormat.type !== 'text') {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format is not supported.',\n });\n }\n\n const { prompt: completionPrompt, stopSequences } =\n convertToOpenAICompatibleCompletionPrompt({ prompt, inputFormat });\n\n const stop = [...(stopSequences ?? []), ...(userStopSequences ?? [])];\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n echo: this.settings.echo,\n logit_bias: this.settings.logitBias,\n suffix: this.settings.suffix,\n user: this.settings.user,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n seed,\n ...providerMetadata?.[this.providerOptionsName],\n\n // prompt:\n prompt: completionPrompt,\n\n // stop sequences:\n stop: stop.length > 0 ? stop : undefined,\n };\n\n switch (type) {\n case 'regular': {\n if (mode.tools?.length) {\n throw new UnsupportedFunctionalityError({\n functionality: 'tools',\n });\n }\n\n if (mode.toolChoice) {\n throw new UnsupportedFunctionalityError({\n functionality: 'toolChoice',\n });\n }\n\n return { args: baseArgs, warnings };\n }\n\n case 'object-json': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-json mode',\n });\n }\n\n case 'object-tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-tool mode',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleCompletionResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { prompt: rawPrompt, ...rawSettings } = args;\n const choice = response.choices[0];\n\n return {\n text: choice.text,\n usage: {\n promptTokens: response.usage?.prompt_tokens ?? NaN,\n completionTokens: response.usage?.completion_tokens ?? NaN,\n },\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders, body: rawResponse },\n response: getResponseMetadata(response),\n warnings,\n request: { body: JSON.stringify(args) },\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { prompt: rawPrompt, ...rawSettings } = args;\n\n let finishReason: LanguageModelV1FinishReason = 'unknown';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n let isFirstChunk = true;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage = {\n promptTokens: value.usage.prompt_tokens,\n completionTokens: value.usage.completion_tokens,\n };\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.text != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: choice.text,\n });\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings,\n request: { body: JSON.stringify(body) },\n };\n }\n}\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompatibleCompletionResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleCompletionChunkSchema = <\n ERROR_SCHEMA extends z.ZodType,\n>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n }),\n errorSchema,\n ]);\n","import {\n EmbeddingModelV1,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport {\n OpenAICompatibleEmbeddingModelId,\n OpenAICompatibleEmbeddingSettings,\n} from './openai-compatible-embedding-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\n\ntype OpenAICompatibleEmbeddingConfig = {\n /**\nOverride the maximum number of embeddings per call.\n */\n maxEmbeddingsPerCall?: number;\n\n /**\nOverride the parallelism of embedding calls.\n */\n supportsParallelCalls?: boolean;\n\n provider: string;\n url: (options: { modelId: string; path: string }) => string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n};\n\nexport class OpenAICompatibleEmbeddingModel\n implements EmbeddingModelV1<string>\n{\n readonly specificationVersion = 'v1';\n readonly modelId: OpenAICompatibleEmbeddingModelId;\n\n private readonly config: OpenAICompatibleEmbeddingConfig;\n private readonly settings: OpenAICompatibleEmbeddingSettings;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return this.config.maxEmbeddingsPerCall ?? 2048;\n }\n\n get supportsParallelCalls(): boolean {\n return this.config.supportsParallelCalls ?? true;\n }\n\n constructor(\n modelId: OpenAICompatibleEmbeddingModelId,\n settings: OpenAICompatibleEmbeddingSettings,\n config: OpenAICompatibleEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n }: Parameters<EmbeddingModelV1<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV1<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/embeddings',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n dimensions: this.settings.dimensions,\n user: this.settings.user,\n },\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n rawResponse: { headers: responseHeaders },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n","import { ImageModelV1, ImageModelV1CallWarning } from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { OpenAICompatibleImageModelId } from './openai-compatible-image-settings';\nimport { OpenAICompatibleImageSettings } from './openai-compatible-image-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\n\nexport type OpenAICompatibleImageModelConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n _internal?: {\n currentDate?: () => Date;\n };\n};\n\nexport class OpenAICompatibleImageModel implements ImageModelV1 {\n readonly specificationVersion = 'v1';\n\n get maxImagesPerCall(): number {\n return this.settings.maxImagesPerCall ?? 10;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n readonly modelId: OpenAICompatibleImageModelId,\n private readonly settings: OpenAICompatibleImageSettings,\n private readonly config: OpenAICompatibleImageModelConfig,\n ) {}\n\n async doGenerate({\n prompt,\n n,\n size,\n aspectRatio,\n seed,\n providerOptions,\n headers,\n abortSignal,\n }: Parameters<ImageModelV1['doGenerate']>[0]): Promise<\n Awaited<ReturnType<ImageModelV1['doGenerate']>>\n > {\n const warnings: Array<ImageModelV1CallWarning> = [];\n\n if (aspectRatio != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'aspectRatio',\n details:\n 'This model does not support aspect ratio. Use `size` instead.',\n });\n }\n\n if (seed != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'seed' });\n }\n\n const currentDate = this.config._internal?.currentDate?.() ?? new Date();\n const { value: response, responseHeaders } = await postJsonToApi({\n url: this.config.url({\n path: '/images/generations',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n prompt,\n n,\n size,\n ...(providerOptions.openai ?? {}),\n response_format: 'b64_json',\n ...(this.settings.user ? { user: this.settings.user } : {}),\n },\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleImageResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n images: response.data.map(item => item.b64_json),\n warnings,\n response: {\n timestamp: currentDate,\n modelId: this.modelId,\n headers: responseHeaders,\n },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompatibleImageResponseSchema = z.object({\n data: z.array(z.object({ b64_json: z.string() })),\n});\n","import { OpenAICompatibleChatSettings } from '@ai-sdk/openai-compatible';\n\n// https://console.x.ai and see \"View models\"\nexport type XaiChatModelId =\n | 'grok-3'\n | 'grok-3-latest'\n | 'grok-3-fast'\n | 'grok-3-fast-latest'\n | 'grok-3-mini'\n | 'grok-3-mini-latest'\n | 'grok-3-mini-fast'\n | 'grok-3-mini-fast-latest'\n | 'grok-2-vision-1212'\n | 'grok-2-vision'\n | 'grok-2-vision-latest'\n | 'grok-2-image-1212'\n | 'grok-2-image'\n | 'grok-2-image-latest'\n | 'grok-2-1212'\n | 'grok-2'\n | 'grok-2-latest'\n | 'grok-vision-beta'\n | 'grok-beta'\n | (string & {});\n\nexport interface XaiChatSettings extends OpenAICompatibleChatSettings {}\n\n/**\n * https://docs.x.ai/docs/guides/structured-outputs\n */\nexport function supportsStructuredOutputs(modelId: XaiChatModelId) {\n return [\n 'grok-3',\n 'grok-3-beta',\n 'grok-3-latest',\n 'grok-3-fast',\n 'grok-3-fast-beta',\n 'grok-3-fast-latest',\n 'grok-3-mini',\n 'grok-3-mini-beta',\n 'grok-3-mini-latest',\n 'grok-3-mini-fast',\n 'grok-3-mini-fast-beta',\n 'grok-3-mini-fast-latest',\n 'grok-2-1212',\n 'grok-2-vision-1212',\n ].includes(modelId);\n}\n","import { z } from 'zod';\n\n// Add error schema and structure\nexport const xaiErrorSchema = z.object({\n code: z.string(),\n error: z.string(),\n});\n\nexport type XaiErrorData = z.infer<typeof xaiErrorSchema>;\n","import {\n ImageModelV1,\n LanguageModelV1,\n NoSuchModelError,\n ProviderV1,\n} from '@ai-sdk/provider';\nimport {\n OpenAICompatibleChatLanguageModel,\n OpenAICompatibleImageModel,\n ProviderErrorStructure,\n} from '@ai-sdk/openai-compatible';\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport {\n XaiChatModelId,\n XaiChatSettings,\n supportsStructuredOutputs,\n} from './xai-chat-settings';\nimport { XaiImageSettings } from './xai-image-settings';\nimport { XaiImageModelId } from './xai-image-settings';\nimport { XaiErrorData, xaiErrorSchema } from './xai-error';\n\nconst xaiErrorStructure: ProviderErrorStructure<XaiErrorData> = {\n errorSchema: xaiErrorSchema,\n errorToMessage: data => data.error,\n};\n\nexport interface XaiProvider extends ProviderV1 {\n /**\nCreates an Xai chat model for text generation.\n */\n (modelId: XaiChatModelId, settings?: XaiChatSettings): LanguageModelV1;\n\n /**\nCreates an Xai language model for text generation.\n */\n languageModel(\n modelId: XaiChatModelId,\n settings?: XaiChatSettings,\n ): LanguageModelV1;\n\n /**\nCreates an Xai chat model for text generation.\n */\n chat: (\n modelId: XaiChatModelId,\n settings?: XaiChatSettings,\n ) => LanguageModelV1;\n\n /**\nCreates an Xai image model for image generation.\n */\n image(modelId: XaiImageModelId, settings?: XaiImageSettings): ImageModelV1;\n\n /**\nCreates an Xai image model for image generation.\n */\n imageModel(\n modelId: XaiImageModelId,\n settings?: XaiImageSettings,\n ): ImageModelV1;\n}\n\nexport interface XaiProviderSettings {\n /**\nBase URL for the xAI API calls.\n */\n baseURL?: string;\n\n /**\nAPI key for authenticating requests.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n}\n\nexport function createXai(options: XaiProviderSettings = {}): XaiProvider {\n const baseURL = withoutTrailingSlash(\n options.baseURL ?? 'https://api.x.ai/v1',\n );\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'XAI_API_KEY',\n description: 'xAI API key',\n })}`,\n ...options.headers,\n });\n\n const createLanguageModel = (\n modelId: XaiChatModelId,\n settings: XaiChatSettings = {},\n ) => {\n const structuredOutputs = supportsStructuredOutputs(modelId);\n return new OpenAICompatibleChatLanguageModel(modelId, settings, {\n provider: 'xai.chat',\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n defaultObjectGenerationMode: structuredOutputs ? 'json' : 'tool',\n errorStructure: xaiErrorStructure,\n supportsStructuredOutputs: structuredOutputs,\n includeUsage: true,\n });\n };\n\n const createImageModel = (\n modelId: XaiImageModelId,\n settings: XaiImageSettings = {},\n ) => {\n return new OpenAICompatibleImageModel(modelId, settings, {\n provider: 'xai.image',\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n errorStructure: xaiErrorStructure,\n });\n };\n\n const provider = (modelId: XaiChatModelId, settings?: XaiChatSettings) =>\n createLanguageModel(modelId, settings);\n\n provider.languageModel = createLanguageModel;\n provider.chat = createLanguageModel;\n provider.textEmbeddingModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'textEmbeddingModel' });\n };\n provider.imageModel = createImageModel;\n provider.image = createImageModel;\n\n return provider;\n}\n\nexport const xai = createXai();\n"]}
@@ -0,0 +1,31 @@
1
+ # @mastra/server Documentation
2
+
3
+ > Embedded documentation for coding agents
4
+
5
+ ## Quick Start
6
+
7
+ ```bash
8
+ # Read the skill overview
9
+ cat docs/SKILL.md
10
+
11
+ # Get the source map
12
+ cat docs/SOURCE_MAP.json
13
+
14
+ # Read topic documentation
15
+ cat docs/<topic>/01-overview.md
16
+ ```
17
+
18
+ ## Structure
19
+
20
+ ```
21
+ docs/
22
+ ├── SKILL.md # Entry point
23
+ ├── README.md # This file
24
+ ├── SOURCE_MAP.json # Export index
25
+ ├── server/ (4 files)
26
+ ```
27
+
28
+ ## Version
29
+
30
+ Package: @mastra/server
31
+ Version: 1.0.0-beta.20
@@ -0,0 +1,32 @@
1
+ ---
2
+ name: mastra-server-docs
3
+ description: Documentation for @mastra/server. Includes links to type definitions and readable implementation code in dist/.
4
+ ---
5
+
6
+ # @mastra/server Documentation
7
+
8
+ > **Version**: 1.0.0-beta.20
9
+ > **Package**: @mastra/server
10
+
11
+ ## Quick Navigation
12
+
13
+ Use SOURCE_MAP.json to find any export:
14
+
15
+ ```bash
16
+ cat docs/SOURCE_MAP.json
17
+ ```
18
+
19
+ Each export maps to:
20
+ - **types**: `.d.ts` file with JSDoc and API signatures
21
+ - **implementation**: `.js` chunk file with readable source
22
+ - **docs**: Conceptual documentation in `docs/`
23
+
24
+ ## Top Exports
25
+
26
+
27
+
28
+ See SOURCE_MAP.json for the complete list.
29
+
30
+ ## Available Topics
31
+
32
+ - [Server](server/) - 4 file(s)
@@ -0,0 +1,6 @@
1
+ {
2
+ "version": "1.0.0-beta.20",
3
+ "package": "@mastra/server",
4
+ "exports": {},
5
+ "modules": {}
6
+ }