@bratsos/workflow-engine 0.0.11 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/README.md +270 -513
  2. package/dist/chunk-D7RVRRM2.js +3 -0
  3. package/dist/chunk-D7RVRRM2.js.map +1 -0
  4. package/dist/chunk-HL3OJG7W.js +1033 -0
  5. package/dist/chunk-HL3OJG7W.js.map +1 -0
  6. package/dist/chunk-MUWP5SF2.js +33 -0
  7. package/dist/chunk-MUWP5SF2.js.map +1 -0
  8. package/dist/chunk-NYKMT46J.js +1143 -0
  9. package/dist/chunk-NYKMT46J.js.map +1 -0
  10. package/dist/chunk-P4KMGCT3.js +2292 -0
  11. package/dist/chunk-P4KMGCT3.js.map +1 -0
  12. package/dist/chunk-SPXBCZLB.js +17 -0
  13. package/dist/chunk-SPXBCZLB.js.map +1 -0
  14. package/dist/cli/sync-models.d.ts +1 -0
  15. package/dist/cli/sync-models.js +210 -0
  16. package/dist/cli/sync-models.js.map +1 -0
  17. package/dist/client-D4PoxADF.d.ts +798 -0
  18. package/dist/client.d.ts +5 -0
  19. package/dist/client.js +4 -0
  20. package/dist/client.js.map +1 -0
  21. package/dist/index-DAzCfO1R.d.ts +217 -0
  22. package/dist/index.d.ts +569 -0
  23. package/dist/index.js +399 -0
  24. package/dist/index.js.map +1 -0
  25. package/dist/interface-MMqhfQQK.d.ts +411 -0
  26. package/dist/kernel/index.d.ts +26 -0
  27. package/dist/kernel/index.js +3 -0
  28. package/dist/kernel/index.js.map +1 -0
  29. package/dist/kernel/testing/index.d.ts +44 -0
  30. package/dist/kernel/testing/index.js +85 -0
  31. package/dist/kernel/testing/index.js.map +1 -0
  32. package/dist/persistence/index.d.ts +2 -0
  33. package/dist/persistence/index.js +6 -0
  34. package/dist/persistence/index.js.map +1 -0
  35. package/dist/persistence/prisma/index.d.ts +37 -0
  36. package/dist/persistence/prisma/index.js +5 -0
  37. package/dist/persistence/prisma/index.js.map +1 -0
  38. package/dist/plugins-BCnDUwIc.d.ts +415 -0
  39. package/dist/ports-tU3rzPXJ.d.ts +245 -0
  40. package/dist/stage-BPw7m9Wx.d.ts +144 -0
  41. package/dist/testing/index.d.ts +264 -0
  42. package/dist/testing/index.js +920 -0
  43. package/dist/testing/index.js.map +1 -0
  44. package/package.json +11 -1
  45. package/skills/workflow-engine/SKILL.md +234 -348
  46. package/skills/workflow-engine/references/03-runtime-setup.md +111 -426
  47. package/skills/workflow-engine/references/05-persistence-setup.md +32 -0
  48. package/skills/workflow-engine/references/07-testing-patterns.md +141 -474
  49. package/skills/workflow-engine/references/08-common-patterns.md +118 -431
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/core/schema-helpers.ts","../src/core/stage-factory.ts","../src/ai/model-helper.ts","../src/utils/batch/model-mapping.ts","../src/utils/batch/providers/anthropic-batch.ts","../src/utils/batch/providers/google-batch.ts","../src/utils/batch/providers/openai-batch.ts","../src/ai/ai-helper.ts"],"names":["z","logger","jsonSchema","result"],"mappings":";;;;;;;;;AAuBO,IAAM,aAAA,GAAgB,CAAA,CAAE,MAAA,CAAO,EAAE;AA6BjC,SAAS,kBAAA,CACd,eAAA,EACA,OAAA,EACA,KAAA,EACG;AACH,EAAA,MAAM,WAAA,GAAc,gBAAgB,OAAO,CAAA;AAE3C,EAAA,IAAI,CAAC,WAAA,EAAa;AAChB,IAAA,MAAM,IAAI,KAAA;AAAA,MACR,CAAA,oCAAA,EAAuC,OAAO,CAAA,sFAAA,EAEvB,MAAA,CAAO,KAAK,eAAe,CAAA,CAAE,IAAA,CAAK,IAAI,CAAC,CAAA;AAAA,KAChE;AAAA,EACF;AAEA,EAAA,IAAI,KAAA,EAAO;AACT,IAAA,IAAI,OAAO,WAAA,KAAgB,QAAA,IAAY,WAAA,KAAgB,IAAA,EAAM;AAC3D,MAAA,MAAM,IAAI,KAAA;AAAA,QACR,SAAS,OAAO,CAAA,+CAAA,EAAkD,KAAK,CAAA,aAAA,EACxD,OAAO,WAAW,CAAA;AAAA,OACnC;AAAA,IACF;AACA,IAAA,IAAI,EAAE,SAAS,WAAA,CAAA,EAAc;AAC3B,MAAA,MAAM,eAAA,GAAkB,MAAA,CAAO,IAAA,CAAK,WAAW,CAAA;AAC/C,MAAA,MAAM,IAAI,KAAA;AAAA,QACR,CAAA,wBAAA,EAA2B,KAAK,CAAA,KAAA,EAAQ,OAAO,8BACxB,eAAA,CAAgB,IAAA,CAAK,IAAI,CAAC,CAAA;AAAA,OACnD;AAAA,IACF;AACA,IAAA,OAAQ,YAAoB,KAAK,CAAA;AAAA,EACnC;AAEA,EAAA,OAAO,WAAA;AACT;;;AC4KO,SAAS,YAMd,UAAA,EAQA;AAEA,EAAA,MAAM,cACJ,UAAA,CAAW,OAAA,CAAQ,UAAU,MAAA,GACzB,aAAA,GACC,WAAW,OAAA,CAAQ,KAAA;AAE1B,EAAA,MAAM,YAAA,GACJ,MAAA,IAAU,UAAA,IAAc,UAAA,CAAW,IAAA,KAAS,aAAA;AAG9C,EAAA,MAAM,KAAA,GAKF;AAAA,IACF,IAAI,UAAA,CAAW,EAAA;AAAA,IACf,MAAM,UAAA,CAAW,IAAA;AAAA,IACjB,aAAa,UAAA,CAAW,WAAA;AAAA,IACxB,IAAA,EAAM,eAAe,aAAA,GAAgB,MAAA;AAAA,IACrC,cAAc,UAAA,CAAW,YAAA;AAAA,IAEzB,WAAA;AAAA,IACA,YAAA,EAAc,WAAW,OAAA,CAAQ,MAAA;AAAA,IACjC,YAAA,EAAc,WAAW,OAAA,CAAQ,MAAA;AAAA,IAEjC,MAAM,QAAQ,OAAA,EAAS;AAErB,MAAA,MAAM,eAAA,GAAkB,qBAAA;AAAA,QACtB;AAAA,OACF;AAGA,MAAA,MAAM,MAAA,GAAS,MAAM,UAAA,CAAW,OAAA,CAAQ,eAAe,CAAA;AAGvD,MAAA,IAAI,WAAA,IAAe,MAAA,IAAU,MAAA,CAAO,SAAA,KAAc,IAAA,EAAM;AACtD,QAAA,MAAM,eAAA,GAAkB,MAAA;AACxB,QAAA,OAAO;AAAA,UACL,SAAA,EAAW,IAAA;AAAA,UACX,OAAO,eAAA,CAAgB,KAAA;AAAA,UACvB,YAAY,eAAA,CAAgB,UAAA;AAAA,UAC5B,OAAA,EAAS;AAAA,YACP,SAAA,EAAW,CAAA;AAAA,YACX,OAAA,EAAS,CAAA;AAAA,YACT,QAAA,EAAU,CAAA;AAAA,YACV,GAAG,eAAA,CAAgB;AAAA;AACrB,SACF;AAAA,MACF;AAGA,MAAA,MAAM,YAAA,GAAe,MAAA;AAGrB,MAAA,MAAM,WAAA,GAA6C;AAAA,QACjD,QAAQ,YAAA,CAAa,MAAA;AAAA,QACrB,OAAA,EAAS;AAAA;AAAA,UAEP,SAAA,EAAW,CAAA;AAAA,UACX,OAAA,EAAS,CAAA;AAAA,UACT,QAAA,EAAU,CAAA;AAAA;AAAA,UAEV,GAAG,YAAA,CAAa;AAAA,SAClB;AAAA,QACA,WAAW,YAAA,CAAa;AAAA,OAC1B;AAEA,MAAA,OAAO,WAAA;AAAA,IACT,CAAA;AAAA,IAEA,cAAc,UAAA,CAAW;AAAA,GAC3B;AAGA,EAAA,IAAI,YAAA,EAAc;AAChB,IAAA,MAAM,QAAA,GAAW,UAAA;AAMjB,IAAA,KAAA,CAAM,kBAAkB,QAAA,CAAS,eAAA;AAAA,EACnC;AAEA,EAAA,OAAO,KAAA;AACT;AASA,SAAS,sBAKP,OAAA,EACiD;AACjD,EAAA,OAAO;AAAA,IACL,GAAG,OAAA;AAAA,IAEH,QAAkC,OAAA,EAAyB;AACzD,MAAA,MAAM,MAAA,GAAS,OAAA,CAAQ,eAAA,CAAgB,OAAiB,CAAA;AACxD,MAAA,IAAI,WAAW,MAAA,EAAW;AACxB,QAAA,MAAM,eAAA,GAAkB,MAAA,CAAO,IAAA,CAAK,OAAA,CAAQ,eAAe,CAAA;AAC3D,QAAA,MAAM,IAAI,KAAA;AAAA,UACR,CAAA,gCAAA,EAAmC,MAAA,CAAO,OAAO,CAAC,CAAA,qBAAA,EAC3B,eAAA,CAAgB,MAAA,GAAS,CAAA,GAAI,eAAA,CAAgB,IAAA,CAAK,IAAI,CAAA,GAAI,QAAQ,CAAA;AAAA,SAC3F;AAAA,MACF;AACA,MAAA,OAAO,MAAA;AAAA,IACT,CAAA;AAAA,IAEA,SAAmC,OAAA,EAAqC;AACtE,MAAA,OAAO,OAAA,CAAQ,gBAAgB,OAAiB,CAAA;AAAA,IAGlD;AAAA,GACF;AACF;AAoDO,SAAS,sBAMd,UAAA,EAMA;AACA,EAAA,OAAO,YAAY,UAAU,CAAA;AAC/B;ACtcA,IAAI,eAAA,GAGQ,IAAA;AAEZ,eAAe,WAAA,GAAc;AAC3B,EAAA,IAAI,CAAC,eAAA,EAAiB;AACpB,IAAA,eAAA,GAAkB,QAAQ,GAAA,CAAI;AAAA,MAC5B,OAAO,kBAAkB,CAAA;AAAA,MACzB,OAAO,+BAA+B;AAAA,KACvC,EAAE,IAAA,CAAK,CAAC,CAAC,EAAE,QAAA,EAAS,EAAG,YAAY,CAAA,MAAO;AAAA,MACzC,QAAA;AAAA,MACA,aAAa,YAAA,CAAa;AAAA,KAC5B,CAAE,CAAA;AAAA,EACJ;AACA,EAAA,OAAO,eAAA;AACT;AAuDA,IAAM,iBAA8C,EAAC;AAK9C,SAAS,eAAe,MAAA,EAA2C;AACxE,EAAA,MAAA,CAAO,MAAA,CAAO,gBAAgB,MAAM,CAAA;AACtC;AAKO,SAAS,mBAAmB,GAAA,EAAsC;AACvE,EAAA,OAAO,eAAe,GAAG,CAAA;AAC3B;AAKO,SAAS,oBAAA,GAGb;AACD,EAAA,OAAO,MAAA,CAAO,QAAQ,cAAc,CAAA,CAAE,IAAI,CAAC,CAAC,GAAA,EAAK,MAAM,CAAA,MAAO;AAAA,IAC5D,GAAA;AAAA,IACA;AAAA,GACF,CAAE,CAAA;AACJ;AAiBO,IAAM,YAAA,GAAeA,EAAAA,CAAE,IAAA,CAAK,CAAC,kBAAkB,CAAC,CAAA;AAYhD,IAAM,QAAA,GAAWA,EAAAA,CACrB,MAAA,EAAO,CACP,MAAA;AAAA,EACC,CAAC,GAAA,KAAQ;AAEP,IAAA,IAAI,YAAA,CAAa,SAAA,CAAU,GAAG,CAAA,CAAE,OAAA,EAAS;AACvC,MAAA,OAAO,IAAA;AAAA,IACT;AAEA,IAAA,OAAO,cAAA,CAAe,GAAG,CAAA,KAAM,MAAA;AAAA,EACjC,CAAA;AAAA,EACA;AAAA,IACE,OAAA,EACE;AAAA;AAEN,CAAA,CACC,SAAA,CAAU,CAAC,GAAA,KAAQ,GAAe;AAM9B,IAAM,gBAAA,GAAgD;AAAA,EAC3D,CAAC,YAAA,CAAa,IAAA,CAAK,kBAAkB,CAAC,GAAG;AAAA,IACvC,EAAA,EAAI,yCAAA;AAAA,IACJ,IAAA,EAAM,0BAAA;AAAA,IACN,mBAAA,EAAqB,GAAA;AAAA,IACrB,oBAAA,EAAsB,GAAA;AAAA,IACtB,QAAA,EAAU,YAAA;AAAA,IACV,WAAA,EAAa,yCAAA;AAAA,IACb,kBAAA,EAAoB,IAAA;AAAA,IACpB,oBAAA,EAAsB;AAAA;AAE1B;AAMO,IAAM,iBAAA,GAA8B;AAMpC,SAAS,SAAS,GAAA,EAA4B;AAEnD,EAAA,MAAM,YAAA,GAAe,iBAAiB,GAAoC,CAAA;AAC1E,EAAA,IAAI,YAAA,EAAc;AAChB,IAAA,OAAO,YAAA;AAAA,EACT;AAGA,EAAA,MAAM,eAAA,GAAkB,eAAe,GAAa,CAAA;AACpD,EAAA,IAAI,eAAA,EAAiB;AACnB,IAAA,OAAO,eAAA;AAAA,EACT;AAEA,EAAA,MAAM,OAAA,GAAU;AAAA,IACd,GAAG,MAAA,CAAO,IAAA,CAAK,gBAAgB,CAAA;AAAA,IAC/B,GAAG,MAAA,CAAO,IAAA,CAAK,cAAc;AAAA,GAC/B;AACA,EAAA,MAAM,IAAI,KAAA;AAAA,IACR,UAAU,GAAG,CAAA,+BAAA,EAAkC,OAAA,CAAQ,IAAA,CAAK,IAAI,CAAC,CAAA;AAAA,GACnE;AACF;AAKO,SAAS,eAAA,GAA+B;AAC7C,EAAA,OAAO,SAAS,iBAAiB,CAAA;AACnC;AAMO,SAAS,WACd,MAAA,EAC6C;AAE7C,EAAA,MAAM,OAAA,GAAU,MAAA,CAAO,OAAA,CAAQ,gBAAgB,CAAA,CAAE,IAAI,CAAC,CAAC,GAAA,EAAK,MAAM,CAAA,MAAO;AAAA,IACvE,GAAA;AAAA,IACA;AAAA,GACF,CAAE,CAAA;AAEF,EAAA,MAAM,UAAA,GAAa,MAAA,CAAO,OAAA,CAAQ,cAAc,CAAA,CAAE,IAAI,CAAC,CAAC,GAAA,EAAK,MAAM,CAAA,MAAO;AAAA,IACxE,GAAA;AAAA,IACA;AAAA,GACF,CAAE,CAAA;AAGF,EAAA,MAAM,MAAA,uBAAa,GAAA,EAAkD;AACrE,EAAA,KAAA,MAAW,QAAQ,OAAA,EAAS;AAC1B,IAAA,MAAA,CAAO,GAAA,CAAI,IAAA,CAAK,GAAA,EAAK,IAAI,CAAA;AAAA,EAC3B;AACA,EAAA,KAAA,MAAW,QAAQ,UAAA,EAAY;AAC7B,IAAA,MAAA,CAAO,GAAA,CAAI,IAAA,CAAK,GAAA,EAAK,IAAI,CAAA;AAAA,EAC3B;AAEA,EAAA,IAAI,MAAA,GAAS,KAAA,CAAM,IAAA,CAAK,MAAA,CAAO,QAAQ,CAAA;AAGvC,EAAA,IAAI,MAAA,EAAQ;AACV,IAAA,MAAA,GAAS,MAAA,CAAO,MAAA,CAAO,CAAC,IAAA,KAAS;AAC/B,MAAA,MAAM,EAAE,QAAO,GAAI,IAAA;AAGnB,MAAA,IAAI,MAAA,CAAO,qBAAqB,MAAA,EAAW;AACzC,QAAA,IAAI,MAAA,CAAO,gBAAA,IAAoB,CAAC,MAAA,CAAO,kBAAkB,OAAO,KAAA;AAChE,QAAA,IAAI,CAAC,MAAA,CAAO,gBAAA,IAAoB,MAAA,CAAO,kBAAkB,OAAO,KAAA;AAAA,MAClE;AAGA,MAAA,IAAI,MAAA,CAAO,kBAAkB,MAAA,EAAW;AACtC,QAAA,IAAI,MAAA,CAAO,aAAA,IAAiB,CAAC,MAAA,CAAO,eAAe,OAAO,KAAA;AAC1D,QAAA,IAAI,CAAC,MAAA,CAAO,aAAA,IAAiB,MAAA,CAAO,eAAe,OAAO,KAAA;AAAA,MAC5D;AAGA,MAAA,IAAI,MAAA,CAAO,8BAA8B,MAAA,EAAW;AAClD,QAAA,IACE,MAAA,CAAO,yBAAA,IACP,CAAC,MAAA,CAAO,yBAAA;AAER,UAAA,OAAO,KAAA;AACT,QAAA,IACE,CAAC,MAAA,CAAO,yBAAA,IACR,MAAA,CAAO,yBAAA;AAEP,UAAA,OAAO,KAAA;AAAA,MACX;AAGA,MAAA,IAAI,MAAA,CAAO,uBAAuB,MAAA,EAAW;AAC3C,QAAA,IAAI,MAAA,CAAO,kBAAA,IAAsB,CAAC,MAAA,CAAO,kBAAA;AACvC,UAAA,OAAO,KAAA;AACT,QAAA,IAAI,CAAC,MAAA,CAAO,kBAAA,IAAsB,MAAA,CAAO,kBAAA;AACvC,UAAA,OAAO,KAAA;AAAA,MACX;AAEA,MAAA,OAAO,IAAA;AAAA,IACT,CAAC,CAAA;AAAA,EACH;AAEA,EAAA,OAAO,MAAA,CAAO,IAAA,CAAK,CAAC,CAAA,EAAG,CAAA,KAAM,EAAE,GAAA,CAAI,aAAA,CAAc,CAAA,CAAE,GAAG,CAAC,CAAA;AACzD;AAKO,SAAS,mBAAmB,QAAA,EAA6B;AAC9D,EAAA,MAAM,KAAA,GAAQ,SAAS,QAAQ,CAAA;AAC/B,EAAA,OAAO,MAAM,kBAAA,KAAuB,IAAA;AACtC;AAwBO,SAAS,YAAA,CACd,UACA,OAAA,EACmB;AACnB,EAAA,MAAM,WAAA,GAAc,SAAS,QAAQ,CAAA;AACrC,EAAA,OAAO;AAAA,IACL,IAAI,WAAA,CAAY,EAAA;AAAA,IAChB,MAAM,WAAA,CAAY,IAAA;AAAA,IAClB,UAAA,EAAY,CAAC,WAAA,EAAqB,YAAA,KAAyB;AACzD,MAAA,IAAI,CAAC,OAAA,EAAS;AACZ,QAAA,MAAM,IAAI,MAAM,gDAAgD,CAAA;AAAA,MAClE;AACA,MAAA,OAAA,CAAQ,UAAA,CAAW,WAAA,EAAa,YAAA,EAAc,QAAQ,CAAA;AAAA,IACxD;AAAA,GACF;AACF;AAKO,SAAS,aAAA,CACd,QAAA,EACA,WAAA,EACA,YAAA,EAKA;AACA,EAAA,MAAM,KAAA,GAAQ,SAAS,QAAQ,CAAA;AAE/B,EAAA,MAAM,SAAA,GAAa,WAAA,GAAc,GAAA,GAAa,KAAA,CAAM,mBAAA;AACpD,EAAA,MAAM,UAAA,GAAc,YAAA,GAAe,GAAA,GAAa,KAAA,CAAM,oBAAA;AACtD,EAAA,MAAM,YAAY,SAAA,GAAY,UAAA;AAE9B,EAAA,OAAO;AAAA,IACL,SAAA;AAAA,IACA,UAAA;AAAA,IACA;AAAA,GACF;AACF;AAKO,IAAM,iBAAA,GAAN,MAAM,kBAAA,CAAkB;AAAA,EACrB,QAAA;AAAA,EACA,WAAA;AAAA,EACA,KAAA;AAAA,EAKA,aAAA,uBAA6C,GAAA,EAAI;AAAA,EACjD,aAAA,GAAyB,KAAA;AAAA,EAEjC,WAAA,CAAY,WAAqB,iBAAA,EAAmB;AAClD,IAAA,IAAA,CAAK,QAAA,GAAW,QAAA;AAChB,IAAA,IAAA,CAAK,WAAA,GAAc,SAAS,QAAQ,CAAA;AACpC,IAAA,IAAA,CAAK,KAAA,GAAQ;AAAA,MACX,QAAA,EAAU,CAAA;AAAA,MACV,WAAA,EAAa,CAAA;AAAA,MACb,YAAA,EAAc;AAAA,KAChB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAO,iBAAA,GAAuC;AAC5C,IAAA,MAAM,OAAA,GAAU,IAAI,kBAAA,EAAkB;AACtC,IAAA,OAAA,CAAQ,aAAA,GAAgB,IAAA;AACxB,IAAA,OAAA,CAAQ,QAAA,GAAW,MAAA;AACnB,IAAA,OAAA,CAAQ,WAAA,GAAc,MAAA;AACtB,IAAA,OAAO,OAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,UAAA,GAAqB;AACnB,IAAA,IAAI,CAAC,KAAK,WAAA,EAAa;AACrB,MAAA,MAAM,IAAI,MAAM,gCAAgC,CAAA;AAAA,IAClD;AACA,IAAA,OAAO,KAAK,WAAA,CAAY,EAAA;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,cAAA,GAA8B;AAC5B,IAAA,IAAI,CAAC,KAAK,WAAA,EAAa;AACrB,MAAA,MAAM,IAAI,MAAM,gCAAgC,CAAA;AAAA,IAClD;AACA,IAAA,OAAO,IAAA,CAAK,WAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,YAAY,QAAA,EAA0B;AACpC,IAAA,IAAA,CAAK,QAAA,GAAW,QAAA;AAChB,IAAA,IAAA,CAAK,WAAA,GAAc,SAAS,QAAQ,CAAA;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAqBA,aAAa,QAAA,EAIX;AACA,IAAA,MAAM,WAAA,GAAc,SAAS,QAAQ,CAAA;AACrC,IAAA,OAAO;AAAA,MACL,IAAI,WAAA,CAAY,EAAA;AAAA,MAChB,MAAM,WAAA,CAAY,IAAA;AAAA,MAClB,UAAA,EAAY,CAAC,WAAA,EAAqB,YAAA,KAAyB;AACzD,QAAA,IAAA,CAAK,UAAA,CAAW,WAAA,EAAa,YAAA,EAAc,QAAQ,CAAA;AAAA,MACrD;AAAA,KACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,UAAA,CACE,WAAA,GAAsB,CAAA,EACtB,YAAA,GAAuB,GACvB,gBAAA,EACM;AAEN,IAAA,MAAM,aAAA,GAAgB,oBAAoB,IAAA,CAAK,QAAA;AAC/C,IAAA,IAAI,CAAC,aAAA,EAAe;AAClB,MAAA,MAAM,IAAI,KAAA;AAAA,QACR;AAAA,OACF;AAAA,IACF;AAEA,IAAA,MAAM,WAAA,GAAc,SAAS,aAAa,CAAA;AAE1C,IAAA,IAAA,CAAK,MAAM,QAAA,IAAY,CAAA;AACvB,IAAA,IAAA,CAAK,MAAM,WAAA,IAAe,WAAA;AAC1B,IAAA,IAAA,CAAK,MAAM,YAAA,IAAgB,YAAA;AAG3B,IAAA,IAAI,KAAK,aAAA,EAAe;AACtB,MAAA,MAAM,UAAU,WAAA,CAAY,EAAA;AAC5B,MAAA,MAAM,QAAA,GAAW,IAAA,CAAK,aAAA,CAAc,GAAA,CAAI,OAAO,CAAA,IAAK;AAAA,QAElD,WAAW,WAAA,CAAY,IAAA;AAAA,QACvB,QAAA,EAAU,CAAA;AAAA,QACV,WAAA,EAAa,CAAA;AAAA,QACb,YAAA,EAAc,CAAA;AAAA,QACd,WAAA,EAAa,CAAA;AAAA,QACb,SAAA,EAAW,CAAA;AAAA,QACX,UAAA,EAAY,CAAA;AAAA,QACZ,SAAA,EAAW;AAAA,OACb;AAEA,MAAA,MAAM,KAAA,GAAQ,aAAA,CAAc,aAAA,EAAe,WAAA,EAAa,YAAY,CAAA;AAEpE,MAAA,IAAA,CAAK,aAAA,CAAc,IAAI,OAAA,EAAS;AAAA,QAC9B,OAAA;AAAA,QACA,WAAW,WAAA,CAAY,IAAA;AAAA,QACvB,QAAA,EAAU,SAAS,QAAA,GAAW,CAAA;AAAA,QAC9B,WAAA,EAAa,SAAS,WAAA,GAAc,WAAA;AAAA,QACpC,YAAA,EAAc,SAAS,YAAA,GAAe,YAAA;AAAA,QACtC,WAAA,EAAa,QAAA,CAAS,WAAA,GAAc,WAAA,GAAc,YAAA;AAAA,QAClD,SAAA,EAAW,QAAA,CAAS,SAAA,GAAY,KAAA,CAAM,SAAA;AAAA,QACtC,UAAA,EAAY,QAAA,CAAS,UAAA,GAAa,KAAA,CAAM,UAAA;AAAA,QACxC,SAAA,EAAW,QAAA,CAAS,SAAA,GAAY,KAAA,CAAM;AAAA,OACvC,CAAA;AAAA,IACH;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,YAAA,CACJ,MAAA,EACA,qBAAA,GAAgC,GAAA,EAQ/B;AACD,IAAA,IAAI,CAAC,KAAK,QAAA,EAAU;AAClB,MAAA,MAAM,IAAI,MAAM,8BAA8B,CAAA;AAAA,IAChD;AAGA,IAAA,MAAM,EAAE,QAAA,EAAU,WAAA,EAAY,GAAI,MAAM,WAAA,EAAY;AAGpD,IAAA,MAAM,QAAA,GAAW,IAAI,QAAA,CAAS,WAAW,CAAA;AACzC,IAAA,MAAM,WAAA,GAAc,QAAA,CAAS,MAAA,CAAO,MAAM,CAAA,CAAE,MAAA;AAE5C,IAAA,MAAM,KAAA,GAAQ,aAAA;AAAA,MACZ,IAAA,CAAK,QAAA;AAAA,MACL,WAAA;AAAA,MACA;AAAA,KACF;AAEA,IAAA,OAAO;AAAA,MACL,WAAA;AAAA,MACA,YAAA,EAAc,qBAAA;AAAA,MACd,aAAa,WAAA,GAAc,qBAAA;AAAA,MAC3B,WAAW,KAAA,CAAM,SAAA;AAAA,MACjB,YAAY,KAAA,CAAM,UAAA;AAAA,MAClB,WAAW,KAAA,CAAM;AAAA,KACnB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,QAAA,GAA8B;AAC5B,IAAA,IAAI,KAAK,aAAA,EAAe;AACtB,MAAA,OAAO,IAAA;AAAA,IACT;AAEA,IAAA,IAAI,CAAC,IAAA,CAAK,QAAA,IAAY,CAAC,KAAK,WAAA,EAAa;AACvC,MAAA,MAAM,IAAI,MAAM,gCAAgC,CAAA;AAAA,IAClD;AAEA,IAAA,MAAM,WAAA,GAAc,IAAA,CAAK,KAAA,CAAM,WAAA,GAAc,KAAK,KAAA,CAAM,YAAA;AACxD,IAAA,MAAM,KAAA,GAAQ,aAAA;AAAA,MACZ,IAAA,CAAK,QAAA;AAAA,MACL,KAAK,KAAA,CAAM,WAAA;AAAA,MACX,KAAK,KAAA,CAAM;AAAA,KACb;AAEA,IAAA,OAAO;AAAA,MACL,OAAA,EAAS,KAAK,WAAA,CAAY,EAAA;AAAA,MAC1B,SAAA,EAAW,KAAK,WAAA,CAAY,IAAA;AAAA,MAC5B,QAAA,EAAU,KAAK,KAAA,CAAM,QAAA;AAAA,MACrB,WAAA,EAAa,KAAK,KAAA,CAAM,WAAA;AAAA,MACxB,YAAA,EAAc,KAAK,KAAA,CAAM,YAAA;AAAA,MACzB,WAAA;AAAA,MACA,WAAW,KAAA,CAAM,SAAA;AAAA,MACjB,YAAY,KAAA,CAAM,UAAA;AAAA,MAClB,WAAW,KAAA,CAAM;AAAA,KACnB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,kBAAA,GAWE;AACA,IAAA,MAAM,WAAW,KAAA,CAAM,IAAA,CAAK,IAAA,CAAK,aAAA,CAAc,QAAQ,CAAA;AAEvD,IAAA,MAAM,MAAA,GAAS;AAAA,MACb,aAAA,EAAe,SAAS,MAAA,CAAO,CAAC,KAAK,CAAA,KAAM,GAAA,GAAM,CAAA,CAAE,QAAA,EAAU,CAAC,CAAA;AAAA,MAC9D,gBAAA,EAAkB,SAAS,MAAA,CAAO,CAAC,KAAK,CAAA,KAAM,GAAA,GAAM,CAAA,CAAE,WAAA,EAAa,CAAC,CAAA;AAAA,MACpE,iBAAA,EAAmB,SAAS,MAAA,CAAO,CAAC,KAAK,CAAA,KAAM,GAAA,GAAM,CAAA,CAAE,YAAA,EAAc,CAAC,CAAA;AAAA,MACtE,WAAA,EAAa,SAAS,MAAA,CAAO,CAAC,KAAK,CAAA,KAAM,GAAA,GAAM,CAAA,CAAE,WAAA,EAAa,CAAC,CAAA;AAAA,MAC/D,cAAA,EAAgB,SAAS,MAAA,CAAO,CAAC,KAAK,CAAA,KAAM,GAAA,GAAM,CAAA,CAAE,SAAA,EAAW,CAAC,CAAA;AAAA,MAChE,eAAA,EAAiB,SAAS,MAAA,CAAO,CAAC,KAAK,CAAA,KAAM,GAAA,GAAM,CAAA,CAAE,UAAA,EAAY,CAAC,CAAA;AAAA,MAClE,SAAA,EAAW,SAAS,MAAA,CAAO,CAAC,KAAK,CAAA,KAAM,GAAA,GAAM,CAAA,CAAE,SAAA,EAAW,CAAC;AAAA,KAC7D;AAEA,IAAA,OAAO,EAAE,UAAU,MAAA,EAAO;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA,EAKA,UAAA,GAAmB;AACjB,IAAA,IAAI,KAAK,aAAA,EAAe;AACtB,MAAA,IAAA,CAAK,oBAAA,EAAqB;AAAA,IAC5B,CAAA,MAAO;AACL,MAAA,MAAM,KAAA,GAAQ,KAAK,QAAA,EAAS;AAC5B,MAAA,IAAI,CAAC,KAAA,EAAO;AACV,QAAA,OAAA,CAAQ,IAAI,yBAAyB,CAAA;AACrC,QAAA;AAAA,MACF;AAEA,MAAA,OAAA,CAAQ,IAAI,mCAA4B,CAAA;AACxC,MAAA,OAAA,CAAQ,IAAI,CAAA,SAAA,EAAY,KAAA,CAAM,SAAS,CAAA,EAAA,EAAK,KAAA,CAAM,OAAO,CAAA,CAAA,CAAG,CAAA;AAC5D,MAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,aAAA,EAAgB,KAAA,CAAM,QAAQ,CAAA,CAAE,CAAA;AAC5C,MAAA,OAAA,CAAQ,IAAI,CAAA,gBAAA,EAAmB,KAAA,CAAM,WAAA,CAAY,cAAA,EAAgB,CAAA,CAAE,CAAA;AACnE,MAAA,OAAA,CAAQ,IAAI,CAAA,iBAAA,EAAoB,KAAA,CAAM,YAAA,CAAa,cAAA,EAAgB,CAAA,CAAE,CAAA;AACrE,MAAA,OAAA,CAAQ,IAAI,CAAA,gBAAA,EAAmB,KAAA,CAAM,WAAA,CAAY,cAAA,EAAgB,CAAA,CAAE,CAAA;AAEnE,MAAA,IAAI,KAAA,CAAM,YAAY,CAAA,EAAG;AACvB,QAAA,OAAA,CAAQ,IAAI,CAAA,eAAA,EAAkB,KAAA,CAAM,UAAU,OAAA,CAAQ,CAAC,CAAC,CAAA,CAAE,CAAA;AAC1D,QAAA,OAAA,CAAQ,IAAI,CAAA,gBAAA,EAAmB,KAAA,CAAM,WAAW,OAAA,CAAQ,CAAC,CAAC,CAAA,CAAE,CAAA;AAC5D,QAAA,OAAA,CAAQ,IAAI,CAAA,eAAA,EAAkB,KAAA,CAAM,UAAU,OAAA,CAAQ,CAAC,CAAC,CAAA,CAAE,CAAA;AAAA,MAC5D,CAAA,MAAO;AACL,QAAA,OAAA,CAAQ,IAAI,+BAA+B,CAAA;AAAA,MAC7C;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,oBAAA,GAA6B;AAC3B,IAAA,MAAM,EAAE,QAAA,EAAU,MAAA,EAAO,GAAI,KAAK,kBAAA,EAAmB;AAErD,IAAA,OAAA,CAAQ,IAAI,8CAAuC,CAAA;AACnD,IAAA,OAAA,CAAQ,IAAI,0PAA6C,CAAA;AAEzD,IAAA,OAAA,CAAQ,IAAI,sBAAsB,CAAA;AAClC,IAAA,KAAA,MAAW,SAAS,QAAA,EAAU;AAC5B,MAAA,OAAA,CAAQ,GAAA,CAAI;AAAA,EAAA,EAAO,KAAA,CAAM,SAAS,CAAA,CAAE,CAAA;AACpC,MAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,eAAA,EAAkB,KAAA,CAAM,QAAQ,CAAA,CAAE,CAAA;AAC9C,MAAA,OAAA,CAAQ,IAAI,CAAA,kBAAA,EAAqB,KAAA,CAAM,WAAA,CAAY,cAAA,EAAgB,CAAA,CAAE,CAAA;AACrE,MAAA,OAAA,CAAQ,IAAI,CAAA,mBAAA,EAAsB,KAAA,CAAM,YAAA,CAAa,cAAA,EAAgB,CAAA,CAAE,CAAA;AACvE,MAAA,OAAA,CAAQ,IAAI,CAAA,kBAAA,EAAqB,KAAA,CAAM,WAAA,CAAY,cAAA,EAAgB,CAAA,CAAE,CAAA;AACrE,MAAA,IAAI,KAAA,CAAM,YAAY,CAAA,EAAG;AACvB,QAAA,OAAA,CAAQ,IAAI,CAAA,iBAAA,EAAoB,KAAA,CAAM,UAAU,OAAA,CAAQ,CAAC,CAAC,CAAA,CAAE,CAAA;AAC5D,QAAA,OAAA,CAAQ,IAAI,CAAA,kBAAA,EAAqB,KAAA,CAAM,WAAW,OAAA,CAAQ,CAAC,CAAC,CAAA,CAAE,CAAA;AAC9D,QAAA,OAAA,CAAQ,IAAI,CAAA,iBAAA,EAAoB,KAAA,CAAM,UAAU,OAAA,CAAQ,CAAC,CAAC,CAAA,CAAE,CAAA;AAAA,MAC9D,CAAA,MAAO;AACL,QAAA,OAAA,CAAQ,IAAI,CAAA,cAAA,CAAgB,CAAA;AAAA,MAC9B;AAAA,IACF;AAEA,IAAA,OAAA,CAAQ,IAAI,0PAA6C,CAAA;AACzD,IAAA,OAAA,CAAQ,IAAI,2BAA2B,CAAA;AACvC,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,mBAAA,EAAsB,MAAA,CAAO,aAAa,CAAA,CAAE,CAAA;AACxD,IAAA,OAAA,CAAQ,GAAA;AAAA,MACN,CAAA,sBAAA,EAAyB,MAAA,CAAO,gBAAA,CAAiB,cAAA,EAAgB,CAAA;AAAA,KACnE;AACA,IAAA,OAAA,CAAQ,GAAA;AAAA,MACN,CAAA,uBAAA,EAA0B,MAAA,CAAO,iBAAA,CAAkB,cAAA,EAAgB,CAAA;AAAA,KACrE;AACA,IAAA,OAAA,CAAQ,IAAI,CAAA,gBAAA,EAAmB,MAAA,CAAO,WAAA,CAAY,cAAA,EAAgB,CAAA,CAAE,CAAA;AACpE,IAAA,OAAA,CAAQ,IAAI,CAAA,qBAAA,EAAwB,MAAA,CAAO,eAAe,OAAA,CAAQ,CAAC,CAAC,CAAA,CAAE,CAAA;AACtE,IAAA,OAAA,CAAQ,IAAI,CAAA,sBAAA,EAAyB,MAAA,CAAO,gBAAgB,OAAA,CAAQ,CAAC,CAAC,CAAA,CAAE,CAAA;AACxE,IAAA,OAAA,CAAQ,IAAI,CAAA,eAAA,EAAkB,MAAA,CAAO,UAAU,OAAA,CAAQ,CAAC,CAAC,CAAA,CAAE,CAAA;AAC3D,IAAA,OAAA,CAAQ,IAAI,0PAA6C,CAAA;AAAA,EAC3D;AAAA;AAAA;AAAA;AAAA,EAKA,KAAA,GAAc;AACZ,IAAA,IAAA,CAAK,KAAA,GAAQ;AAAA,MACX,QAAA,EAAU,CAAA;AAAA,MACV,WAAA,EAAa,CAAA;AAAA,MACb,YAAA,EAAc;AAAA,KAChB;AACA,IAAA,IAAA,CAAK,cAAc,KAAA,EAAM;AAAA,EAC3B;AACF;AAKO,SAAS,oBAAA,GAA6B;AAC3C,EAAA,OAAA,CAAQ,IAAI,+BAAwB,CAAA;AACpC,EAAA,MAAM,SAAS,UAAA,EAAW;AAE1B,EAAA,KAAA,MAAW,EAAE,GAAA,EAAK,MAAA,EAAO,IAAK,MAAA,EAAQ;AACpC,IAAA,MAAM,YAAY,GAAA,KAAQ,iBAAA;AAC1B,IAAA,MAAM,aAAA,GAAgB,YAAY,YAAA,GAAe,EAAA;AACjD,IAAA,MAAM,QAAA,GACJ,MAAA,CAAO,mBAAA,KAAwB,CAAA,IAAK,MAAA,CAAO,oBAAA,KAAyB,CAAA,GAChE,MAAA,GACA,CAAA,CAAA,EAAI,MAAA,CAAO,mBAAmB,CAAA,SAAA,EAAY,OAAO,oBAAoB,CAAA,OAAA,CAAA;AAE3E,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,EAAA,EAAK,GAAG,CAAA,EAAG,aAAa,CAAA,CAAE,CAAA;AACtC,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,UAAA,EAAa,MAAA,CAAO,IAAI,CAAA,CAAE,CAAA;AACtC,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,QAAA,EAAW,MAAA,CAAO,EAAE,CAAA,CAAE,CAAA;AAClC,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,UAAA,EAAa,QAAQ,CAAA,CAAE,CAAA;AACnC,IAAA,IAAI,OAAO,WAAA,EAAa;AACtB,MAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,iBAAA,EAAoB,MAAA,CAAO,WAAW,CAAA,CAAE,CAAA;AAAA,IACtD;AACA,IAAA,OAAA,CAAQ,IAAI,EAAE,CAAA;AAAA,EAChB;AACF;AC9sBiCA,EAAAA,CAAE,IAAA,CAAK,CAAC,QAAA,EAAU,WAAA,EAAa,QAAQ,CAAC;AAazE,SAAS,oBAAA,CACP,cACA,QAAA,EACQ;AACR,EAAA,MAAM,MAAA,GAAS,GAAG,QAAQ,CAAA,CAAA,CAAA;AAC1B,EAAA,IAAI,YAAA,CAAa,UAAA,CAAW,MAAM,CAAA,EAAG;AACnC,IAAA,OAAO,YAAA,CAAa,KAAA,CAAM,MAAA,CAAO,MAAM,CAAA;AAAA,EACzC;AAEA,EAAA,IAAI,QAAA,KAAa,QAAA,IAAY,YAAA,CAAa,UAAA,CAAW,SAAS,CAAA,EAAG;AAC/D,IAAA,OAAO,YAAA,CAAa,KAAA,CAAM,SAAA,CAAU,MAAM,CAAA;AAAA,EAC5C;AACA,EAAA,OAAO,YAAA;AACT;AAKA,SAAS,4BACP,YAAA,EAC+B;AAC/B,EAAA,IAAI,aAAa,UAAA,CAAW,SAAS,KAAK,YAAA,CAAa,QAAA,CAAS,QAAQ,CAAA,EAAG;AACzE,IAAA,OAAO,QAAA;AAAA,EACT;AACA,EAAA,IACE,aAAa,UAAA,CAAW,YAAY,KACpC,YAAA,CAAa,QAAA,CAAS,QAAQ,CAAA,EAC9B;AACA,IAAA,OAAO,WAAA;AAAA,EACT;AACA,EAAA,IAAI,aAAa,UAAA,CAAW,SAAS,KAAK,YAAA,CAAa,QAAA,CAAS,KAAK,CAAA,EAAG;AACtE,IAAA,OAAO,QAAA;AAAA,EACT;AACA,EAAA,OAAO,MAAA;AACT;AAsCA,SAAS,2BAA2B,QAAA,EAAqC;AACvE,EAAA,MAAM,SAAS,UAAA,CAAW;AAAA,IACxB,kBAAA,EAAoB,IAAA;AAAA,IACpB,gBAAA,EAAkB;AAAA,GACnB,CAAA;AAED,EAAA,KAAA,MAAW,EAAE,MAAA,EAAO,IAAK,MAAA,EAAQ;AAC/B,IAAA,MAAM,aAAA,GAAgB,2BAAA,CAA4B,MAAA,CAAO,EAAE,CAAA;AAC3D,IAAA,IAAI,kBAAkB,QAAA,EAAU;AAC9B,MAAA,OAAO,oBAAA,CAAqB,MAAA,CAAO,EAAA,EAAI,QAAQ,CAAA;AAAA,IACjD;AAAA,EACF;AAGA,EAAA,MAAM,IAAI,KAAA;AAAA,IACR,wCAAwC,QAAQ,CAAA,qFAAA;AAAA,GAElD;AACF;AAUO,SAAS,uBAAA,CACd,UACA,QAAA,EACQ;AAER,EAAA,IAAI,CAAC,QAAA,EAAU;AACb,IAAA,OAAO,2BAA2B,QAAQ,CAAA;AAAA,EAC5C;AAGA,EAAA,MAAM,WAAA,GAAc,SAAS,QAAQ,CAAA;AAGrC,EAAA,IAAI,CAAC,YAAY,kBAAA,EAAoB;AACnC,IAAA,MAAM,IAAI,KAAA;AAAA,MACR,UAAU,QAAQ,CAAA,0CAAA;AAAA,KACpB;AAAA,EACF;AAGA,EAAA,MAAM,aAAA,GAAgB,2BAAA,CAA4B,WAAA,CAAY,EAAE,CAAA;AAChE,EAAA,IAAI,kBAAkB,QAAA,EAAU;AAC9B,IAAA,MAAM,IAAI,KAAA;AAAA,MACR,CAAA,OAAA,EAAU,QAAQ,CAAA,aAAA,EAAgB,aAAA,IAAiB,SAAS,CAAA,eAAA,EACnD,QAAQ,WAAW,QAAQ,CAAA,oCAAA;AAAA,KACtC;AAAA,EACF;AAEA,EAAA,OAAO,oBAAA,CAAqB,WAAA,CAAY,EAAA,EAAI,QAAQ,CAAA;AACtD;AA+BO,SAAS,wBACd,QAAA,EAC+B;AAC/B,EAAA,MAAM,WAAA,GAAc,SAAS,QAAQ,CAAA;AAErC,EAAA,IAAI,CAAC,YAAY,kBAAA,EAAoB;AACnC,IAAA,OAAO,MAAA;AAAA,EACT;AAEA,EAAA,OAAO,2BAAA,CAA4B,YAAY,EAAE,CAAA;AACnD;AChLO,IAAM,yBAAN,MAEP;AAAA,EACW,IAAA,GAAO,WAAA;AAAA,EACP,gBAAA,GAAmB,IAAA;AAAA,EAEpB,MAAA;AAAA,EACA,MAAA;AAAA,EAER,WAAA,CAAY,MAAA,GAAuC,EAAC,EAAGC,OAAAA,EAAsB;AAC3E,IAAA,MAAM,MAAA,GAAS,MAAA,CAAO,MAAA,IAAU,OAAA,CAAQ,GAAA,CAAI,iBAAA;AAC5C,IAAA,IAAI,CAAC,MAAA,EAAQ;AACX,MAAA,MAAM,IAAI,KAAA;AAAA,QACR;AAAA,OACF;AAAA,IACF;AACA,IAAA,IAAA,CAAK,MAAA,GAAS,IAAI,SAAA,CAAU,EAAE,QAAQ,CAAA;AACtC,IAAA,IAAA,CAAK,MAAA,GAASA,OAAAA;AAAA,EAChB;AAAA,EAEA,MAAM,MAAA,CACJ,QAAA,EACA,OAAA,EACsB;AACtB,IAAA,IAAI,QAAA,CAAS,WAAW,CAAA,EAAG;AACzB,MAAA,MAAM,IAAI,MAAM,2BAA2B,CAAA;AAAA,IAC7C;AAGA,IAAA,MAAM,QAAA,GAAW,QAAA,CAAS,CAAC,CAAA,EAAG,KAAA;AAC9B,IAAA,MAAM,KAAA,GAAQ,uBAAA,CAAwB,QAAA,EAAU,WAAW,CAAA;AAE3D,IAAA,IAAA,CAAK,MAAA,EAAQ,GAAA,CAAI,MAAA,EAAQ,4BAAA,EAA8B;AAAA,MACrD,cAAc,QAAA,CAAS,MAAA;AAAA,MACvB,UAAU,QAAA,IAAY,SAAA;AAAA,MACtB;AAAA,KACD,CAAA;AAGD,IAAA,MAAM,aAAA,GAAgB,QAAA,CAAS,GAAA,CAAI,CAAC,KAAK,CAAA,KAAM;AAE7C,MAAA,MAAM,WAAW,GAAA,CAAI,KAAA,GACjB,wBAAwB,GAAA,CAAI,KAAA,EAAO,WAAW,CAAA,GAC9C,KAAA;AAGJ,MAAA,MAAM,cAAA,GACJ,IAAI,KAAA,IAAS,MAAA,CAAO,KAAK,GAAA,CAAI,KAAK,EAAE,MAAA,GAAS,CAAA,GACzC,OAAO,OAAA,CAAQ,GAAA,CAAI,KAAK,CAAA,CAAE,GAAA,CAAI,CAAC,CAAC,IAAA,EAAM,IAAI,CAAA,MAAO;AAAA,QAC/C,IAAA;AAAA,QACA,WAAA,EAAa,KAAK,WAAA,IAAe,EAAA;AAAA,QACjC,cAAe,IAAA,CAAK,WAAA,GAChB,UAAA,CAAW,IAAA,CAAK,WAAW,CAAA,GAC3B;AAAA,UACE,IAAA,EAAM,QAAA;AAAA,UACN,YAAY;AAAC;AACf,QACJ,CAAA,GACF,MAAA;AAGN,MAAA,IAAI,UAAA;AACJ,MAAA,IAAI,IAAI,UAAA,EAAY;AAClB,QAAA,IAAI,GAAA,CAAI,eAAe,MAAA,EAAQ;AAC7B,UAAA,UAAA,GAAa,EAAE,MAAM,MAAA,EAAO;AAAA,QAC9B,CAAA,MAAA,IAAW,GAAA,CAAI,UAAA,KAAe,UAAA,EAAY;AACxC,UAAA,UAAA,GAAa,EAAE,MAAM,KAAA,EAAM;AAAA,QAC7B,CAAA,MAAA,IACE,OAAO,GAAA,CAAI,UAAA,KAAe,YAC1B,GAAA,CAAI,UAAA,CAAW,SAAS,MAAA,EACxB;AACA,UAAA,UAAA,GAAa,EAAE,IAAA,EAAM,MAAA,EAAQ,IAAA,EAAM,GAAA,CAAI,WAAW,QAAA,EAAS;AAAA,QAC7D;AAAA,MAEF;AAEA,MAAA,OAAO;AAAA,QACL,SAAA,EAAW,GAAA,CAAI,QAAA,IAAY,CAAA,QAAA,EAAW,CAAC,CAAA,CAAA;AAAA,QACvC,MAAA,EAAQ;AAAA,UACN,KAAA,EAAO,QAAA;AAAA,UACP,UAAA,EAAY,IAAI,SAAA,IAAa,IAAA;AAAA,UAC7B,QAAA,EAAU,CAAC,EAAE,IAAA,EAAM,QAAiB,OAAA,EAAS,GAAA,CAAI,QAAQ,CAAA;AAAA,UACzD,GAAI,GAAA,CAAI,MAAA,IAAU,EAAE,MAAA,EAAQ,IAAI,MAAA,EAAO;AAAA,UACvC,GAAI,GAAA,CAAI,WAAA,KAAgB,MAAA,IAAa;AAAA,YACnC,aAAa,GAAA,CAAI;AAAA,WACnB;AAAA,UACA,GAAI,cAAA,IACF,GAAA,CAAI,eAAe,MAAA,IAAU,EAAE,OAAO,cAAA,EAAe;AAAA,UACvD,GAAI,UAAA,IAAc,EAAE,WAAA,EAAa,UAAA;AAAW;AAC9C,OACF;AAAA,IACF,CAAC,CAAA;AAED,IAAA,MAAM,WAAW,MAAM,IAAA,CAAK,MAAA,CAAO,QAAA,CAAS,QAAQ,MAAA,CAAO;AAAA,MACzD,QAAA,EAAU;AAAA,KACX,CAAA;AAED,IAAA,IAAA,CAAK,MAAA,EAAQ,GAAA,CAAI,MAAA,EAAQ,2BAAA,EAA6B;AAAA,MACpD,SAAS,QAAA,CAAS,EAAA;AAAA,MAClB,cAAc,QAAA,CAAS,MAAA;AAAA,MACvB,kBAAkB,QAAA,CAAS;AAAA,KAC5B,CAAA;AAED,IAAA,OAAO;AAAA,MACL,IAAI,QAAA,CAAS,EAAA;AAAA,MACb,UAAU,IAAA,CAAK,IAAA;AAAA,MACf,cAAc,QAAA,CAAS,MAAA;AAAA,MACvB,SAAA,EAAW,IAAI,IAAA,CAAK,QAAA,CAAS,UAAU,CAAA;AAAA,MACvC,QAAA,EAAU;AAAA,QACR,KAAA;AAAA,QACA,kBAAkB,QAAA,CAAS,iBAAA;AAAA,QAC3B,GAAG,OAAA,EAAS;AAAA;AACd,KACF;AAAA,EACF;AAAA,EAEA,MAAM,YAAY,MAAA,EAA2C;AAC3D,IAAA,MAAM,KAAA,GAAQ,MAAM,IAAA,CAAK,MAAA,CAAO,SAAS,OAAA,CAAQ,QAAA,CAAS,OAAO,EAAE,CAAA;AAEnE,IAAA,MAAM,cAAA,GAAiB,KAAA,CAAM,cAAA,EAAgB,SAAA,IAAa,CAAA;AAC1D,IAAA,MAAM,YAAA,GAAe,KAAA,CAAM,cAAA,EAAgB,OAAA,IAAW,CAAA;AACtD,IAAA,MAAM,aAAA,GAAgB,KAAA,CAAM,cAAA,EAAgB,QAAA,IAAY,CAAA;AACxD,IAAA,MAAM,YAAA,GAAe,KAAA,CAAM,cAAA,EAAgB,OAAA,IAAW,CAAA;AACtD,IAAA,MAAM,eAAA,GAAkB,KAAA,CAAM,cAAA,EAAgB,UAAA,IAAc,CAAA;AAE5D,IAAA,MAAM,cAAA,GACJ,cAAA,GAAiB,YAAA,GAAe,aAAA,GAAgB,YAAA;AAClD,IAAA,MAAM,aAAa,cAAA,GAAiB,eAAA;AAEpC,IAAA,MAAM,MAAA,GAAsB;AAAA,MAC1B,KAAA,EAAO,IAAA,CAAK,SAAA,CAAU,KAAA,CAAM,iBAAiB,CAAA;AAAA,MAC7C,cAAA;AAAA,MACA,UAAA,EAAY,cAAc,MAAA,CAAO,YAAA;AAAA,MACjC,cAAA;AAAA,MACA,WAAA,EAAa,eAAe,aAAA,GAAgB;AAAA,KAC9C;AAEA,IAAA,IAAA,CAAK,MAAA,EAAQ,GAAA,CAAI,OAAA,EAAS,wBAAA,EAA0B;AAAA,MAClD,SAAS,MAAA,CAAO,EAAA;AAAA,MAChB,OAAO,MAAA,CAAO,KAAA;AAAA,MACd,WAAW,MAAA,CAAO,cAAA;AAAA,MAClB,OAAO,MAAA,CAAO;AAAA,KACf,CAAA;AAED,IAAA,OAAO,MAAA;AAAA,EACT;AAAA,EAEA,MAAM,WAAW,MAAA,EAAgD;AAE/D,IAAA,MAAM,MAAA,GAAS,MAAM,IAAA,CAAK,WAAA,CAAY,MAAM,CAAA;AAC5C,IAAA,IAAI,MAAA,CAAO,KAAA,KAAU,WAAA,IAAe,MAAA,CAAO,UAAU,QAAA,EAAU;AAC7D,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,0BAAA,EAA6B,MAAA,CAAO,KAAK,CAAA,CAAE,CAAA;AAAA,IAC7D;AAEA,IAAA,IAAA,CAAK,MAAA,EAAQ,GAAA,CAAI,MAAA,EAAQ,oCAAA,EAAsC;AAAA,MAC7D,SAAS,MAAA,CAAO;AAAA,KACjB,CAAA;AAED,IAAA,MAAM,UAA4B,EAAC;AACnC,IAAA,IAAI,KAAA,GAAQ,CAAA;AAGZ,IAAA,MAAM,eAAA,GAAkB,MAAM,IAAA,CAAK,MAAA,CAAO,SAAS,OAAA,CAAQ,OAAA;AAAA,MACzD,MAAA,CAAO;AAAA,KACT;AACA,IAAA,WAAA,MAAiB,SAAS,eAAA,EAAiB;AACzC,MAAA,IAAI,KAAA,CAAM,MAAA,CAAO,IAAA,KAAS,WAAA,EAAa;AACrC,QAAA,MAAM,OAAA,GAAU,MAAM,MAAA,CAAO,OAAA;AAC7B,QAAA,MAAM,WAAA,GAAc,QAAQ,OAAA,CAAQ,IAAA;AAAA,UAClC,CAAC,CAAA,KACC,CAAA,CAAE,IAAA,KAAS;AAAA,SACf;AAEA,QAAA,OAAA,CAAQ,IAAA,CAAK;AAAA,UACX,KAAA;AAAA,UACA,UAAU,KAAA,CAAM,SAAA;AAAA,UAChB,IAAA,EAAM,aAAa,IAAA,IAAQ,EAAA;AAAA,UAC3B,WAAA,EAAa,OAAA,CAAQ,KAAA,EAAO,YAAA,IAAgB,CAAA;AAAA,UAC5C,YAAA,EAAc,OAAA,CAAQ,KAAA,EAAO,aAAA,IAAiB;AAAA,SAC/C,CAAA;AAAA,MACH,CAAA,MAAO;AAEL,QAAA,IAAI,QAAA;AACJ,QAAA,QAAQ,KAAA,CAAM,OAAO,IAAA;AAAM,UACzB,KAAK,SAAA;AAEH,YAAA,QAAA,GACG,KAAA,CAAM,OAAO,KAAA,EAAgC,OAAA,IAC9C,eAAe,KAAA,CAAM,MAAA,CAAO,KAAA,EAAO,IAAI,CAAA,CAAA,IACvC,iBAAA;AACF,YAAA;AAAA,UACF,KAAK,UAAA;AACH,YAAA,QAAA,GAAW,sBAAA;AACX,YAAA;AAAA,UACF,KAAK,SAAA;AACH,YAAA,QAAA,GAAW,iBAAA;AACX,YAAA;AAAA,UACF;AACE,YAAA,QAAA,GAAW,CAAA,qBAAA,EAAyB,KAAA,CAAM,MAAA,CAA4B,IAAI,CAAA,CAAA;AAAA;AAG9E,QAAA,OAAA,CAAQ,IAAA,CAAK;AAAA,UACX,KAAA;AAAA,UACA,UAAU,KAAA,CAAM,SAAA;AAAA,UAChB,IAAA,EAAM,EAAA;AAAA,UACN,WAAA,EAAa,CAAA;AAAA,UACb,YAAA,EAAc,CAAA;AAAA,UACd,KAAA,EAAO;AAAA,SACR,CAAA;AAAA,MACH;AACA,MAAA,KAAA,EAAA;AAAA,IACF;AAEA,IAAA,IAAA,CAAK,MAAA,EAAQ,GAAA,CAAI,MAAA,EAAQ,mCAAA,EAAqC;AAAA,MAC5D,SAAS,MAAA,CAAO,EAAA;AAAA,MAChB,aAAa,OAAA,CAAQ,MAAA;AAAA,MACrB,YAAA,EAAc,QAAQ,MAAA,CAAO,CAAC,MAAM,CAAC,CAAA,CAAE,KAAK,CAAA,CAAE,MAAA;AAAA,MAC9C,YAAY,OAAA,CAAQ,MAAA,CAAO,CAAC,CAAA,KAAM,CAAA,CAAE,KAAK,CAAA,CAAE;AAAA,KAC5C,CAAA;AAED,IAAA,OAAO,OAAA;AAAA,EACT;AAAA,EAEA,MAAM,OAAO,MAAA,EAAoC;AAC/C,IAAA,MAAM,KAAK,MAAA,CAAO,QAAA,CAAS,OAAA,CAAQ,MAAA,CAAO,OAAO,EAAE,CAAA;AACnD,IAAA,IAAA,CAAK,MAAA,EAAQ,GAAA,CAAI,MAAA,EAAQ,2BAAA,EAA6B;AAAA,MACpD,SAAS,MAAA,CAAO;AAAA,KACjB,CAAA;AAAA,EACH;AAAA,EAEQ,UAAU,MAAA,EAA4B;AAC5C,IAAA,QAAQ,MAAA;AAAQ,MACd,KAAK,OAAA;AACH,QAAA,OAAO,WAAA;AAAA,MACT,KAAK,WAAA;AAAA,MACL,KAAK,UAAA;AACH,QAAA,OAAO,WAAA;AAAA,MACT,KAAK,aAAA;AACH,QAAA,OAAO,YAAA;AAAA,MACT;AACE,QAAA,OAAO,SAAA;AAAA;AACX,EACF;AACF;AC9OO,IAAM,sBAAN,MAEP;AAAA,EACW,IAAA,GAAO,QAAA;AAAA,EACP,gBAAA,GAAmB,IAAA;AAAA,EAEpB,EAAA;AAAA,EACA,MAAA;AAAA,EAER,WAAA,CAAY,MAAA,GAAoC,EAAC,EAAGA,OAAAA,EAAsB;AACxE,IAAA,MAAM,MAAA,GAAS,MAAA,CAAO,MAAA,IAAU,OAAA,CAAQ,GAAA,CAAI,4BAAA;AAC5C,IAAA,IAAI,CAAC,MAAA,EAAQ;AACX,MAAA,MAAM,IAAI,KAAA;AAAA,QACR;AAAA,OACF;AAAA,IACF;AACA,IAAA,IAAA,CAAK,EAAA,GAAK,IAAI,WAAA,CAAY,EAAE,QAAQ,CAAA;AACpC,IAAA,IAAA,CAAK,MAAA,GAASA,OAAAA;AAAA,EAChB;AAAA,EAEA,MAAM,MAAA,CACJ,QAAA,EACA,OAAA,EACsB;AACtB,IAAA,IAAI,QAAA,CAAS,WAAW,CAAA,EAAG;AACzB,MAAA,MAAM,IAAI,MAAM,2BAA2B,CAAA;AAAA,IAC7C;AAGA,IAAA,MAAM,QAAA,GAAW,QAAA,CAAS,CAAC,CAAA,EAAG,KAAA;AAC9B,IAAA,MAAM,KAAA,GAAQ,uBAAA,CAAwB,QAAA,EAAU,QAAQ,CAAA;AAGxD,IAAA,MAAM,YAAY,QAAA,CAAS,GAAA;AAAA,MACzB,CAAC,GAAA,EAAK,GAAA,KAAS,GAAA,CAAwB,EAAA,IAAM,WAAW,GAAG,CAAA;AAAA,KAC7D;AAEA,IAAA,IAAA,CAAK,MAAA,EAAQ,GAAA,CAAI,MAAA,EAAQ,yBAAA,EAA2B;AAAA,MAClD,cAAc,QAAA,CAAS,MAAA;AAAA,MACvB,UAAU,QAAA,IAAY,SAAA;AAAA,MACtB;AAAA,KACD,CAAA;AAGD,IAAA,MAAM,eAAA,GAAoC,QAAA,CAAS,GAAA,CAAI,CAAC,GAAA,KAAQ;AAC9D,MAAA,MAAM,QAAiC,CAAC,EAAE,IAAA,EAAM,GAAA,CAAI,QAAQ,CAAA;AAE5D,MAAA,IAAI,IAAI,MAAA,EAAQ;AAEd,QAAA,KAAA,CAAM,IAAA,CAAK;AAAA,UACT,IAAA,EAAM,CAAA,yEAAA;AAAA,SACP,CAAA;AAAA,MACH;AAEA,MAAA,IAAI,IAAI,SAAA,EAAW;AACjB,QAAA,KAAA,CAAM,IAAA,CAAK;AAAA,UACT,IAAA,EAAM,CAAA,oCAAA,EAAuC,GAAA,CAAI,SAAS,CAAA,QAAA;AAAA,SAC3D,CAAA;AAAA,MACH;AAEA,MAAA,IAAI,GAAA,CAAI,gBAAgB,MAAA,EAAW;AACjC,QAAA,KAAA,CAAM,IAAA,CAAK;AAAA,UACT,IAAA,EAAM,CAAA,6BAAA,EAAgC,GAAA,CAAI,WAAW,CAAA,mBAAA;AAAA,SACtD,CAAA;AAAA,MACH;AAGA,MAAA,MAAM,WACH,GAAA,CAAwB,EAAA,IAAM,WAAW,QAAA,CAAS,OAAA,CAAQ,GAAG,CAAC,CAAA,CAAA;AAGjE,MAAA,MAAM,QAAA,GAAoC;AAAA,QACxC,QAAA,EAAU;AAAA,UACR;AAAA,YACE,IAAA,EAAM,MAAA;AAAA,YACN;AAAA;AACF,SACF;AAAA;AAAA,QAEA,QAAA,EAAU,EAAE,QAAA;AAAS,OACvB;AAGA,MAAA,IAAI,GAAA,CAAI,SAAS,MAAA,CAAO,IAAA,CAAK,IAAI,KAAK,CAAA,CAAE,SAAS,CAAA,EAAG;AAClD,QAAA,MAAM,MAAA,GAAkC;AAAA,UACtC,KAAA,EAAO;AAAA,YACL;AAAA,cACE,oBAAA,EAAsB,MAAA,CAAO,OAAA,CAAQ,GAAA,CAAI,KAAK,CAAA,CAAE,GAAA;AAAA,gBAC9C,CAAC,CAAC,IAAA,EAAM,IAAI,CAAA,MAAO;AAAA,kBACjB,IAAA;AAAA,kBACA,aAAa,IAAA,CAAK,WAAA;AAAA;AAAA,kBAElB,YAAY,IAAA,CAAK,WAAA,GACbC,UAAAA,CAAW,IAAA,CAAK,WAAW,CAAA,GAC3B;AAAA,iBACN;AAAA;AACF;AACF;AACF,SACF;AAGA,QAAA,IAAI,IAAI,UAAA,EAAY;AAClB,UAAA,IAAI,GAAA,CAAI,eAAe,UAAA,EAAY;AACjC,YAAA,MAAA,CAAO,UAAA,GAAa;AAAA,cAClB,qBAAA,EAAuB,EAAE,IAAA,EAAM,yBAAA,CAA0B,GAAA;AAAI,aAC/D;AAAA,UACF,CAAA,MAAA,IAAW,GAAA,CAAI,UAAA,KAAe,MAAA,EAAQ;AACpC,YAAA,MAAA,CAAO,UAAA,GAAa;AAAA,cAClB,qBAAA,EAAuB,EAAE,IAAA,EAAM,yBAAA,CAA0B,IAAA;AAAK,aAChE;AAAA,UACF,CAAA,MAAA,IACE,OAAO,GAAA,CAAI,UAAA,KAAe,YAC1B,GAAA,CAAI,UAAA,CAAW,SAAS,MAAA,EACxB;AACA,YAAA,MAAA,CAAO,UAAA,GAAa;AAAA,cAClB,qBAAA,EAAuB;AAAA,gBACrB,MAAM,yBAAA,CAA0B,GAAA;AAAA,gBAChC,oBAAA,EAAsB,CAAC,GAAA,CAAI,UAAA,CAAW,QAAQ;AAAA;AAChD,aACF;AAAA,UACF;AAAA,QAEF;AAEA,QAAA,QAAA,CAAS,MAAA,GAAS,MAAA;AAAA,MACpB;AAEA,MAAA,OAAO,QAAA;AAAA,IACT,CAAC,CAAA;AAED,IAAA,MAAM,QAAA,GAAW,MAAM,IAAA,CAAK,EAAA,CAAG,QAAQ,MAAA,CAAO;AAAA,MAC5C,KAAA;AAAA,MACA,GAAA,EAAK,EAAE,eAAA,EAAgB;AAAA,MACvB,MAAA,EAAQ;AAAA,QACN,WAAA,EACE,SAAS,WAAA,IAAe,CAAA,MAAA,EAAS,KAAK,GAAA,EAAK,CAAA,CAAA,EAAI,QAAA,CAAS,MAAM,CAAA;AAAA;AAClE,KACD,CAAA;AAED,IAAA,IAAI,CAAC,SAAS,IAAA,EAAM;AAClB,MAAA,MAAM,IAAI,MAAM,wCAAwC,CAAA;AAAA,IAC1D;AAEA,IAAA,IAAA,CAAK,MAAA,EAAQ,GAAA,CAAI,MAAA,EAAQ,wBAAA,EAA0B;AAAA,MACjD,WAAW,QAAA,CAAS,IAAA;AAAA,MACpB,cAAc,QAAA,CAAS;AAAA,KACxB,CAAA;AAED,IAAA,OAAO;AAAA,MACL,IAAI,QAAA,CAAS,IAAA;AAAA,MACb,UAAU,IAAA,CAAK,IAAA;AAAA,MACf,cAAc,QAAA,CAAS,MAAA;AAAA,MACvB,SAAA,sBAAe,IAAA,EAAK;AAAA,MACpB,QAAA,EAAU;AAAA,QACR,KAAA;AAAA,QACA,aAAa,OAAA,EAAS,WAAA;AAAA,QACtB;AAAA;AAAA;AACF,KACF;AAAA,EACF;AAAA,EAEA,MAAM,YAAY,MAAA,EAA2C;AAC3D,IAAA,MAAM,KAAA,GAAQ,MAAM,IAAA,CAAK,EAAA,CAAG,OAAA,CAAQ,IAAI,EAAE,IAAA,EAAM,MAAA,CAAO,EAAA,EAAI,CAAA;AAE3D,IAAA,MAAM,MAAA,GAAsB;AAAA,MAC1B,KAAA,EAAO,IAAA,CAAK,QAAA,CAAS,KAAA,CAAM,KAAK,CAAA;AAAA,MAChC,cAAA,EAAgB,MAAA,CAAO,KAAA,CAAM,eAAA,EAAiB,eAAe,CAAA,IAAK,CAAA;AAAA,MAClE,YAAY,MAAA,CAAO,YAAA;AAAA,MACnB,cAAA,EAAgB,MAAM,eAAA,EAAiB,eAAA,GACnC,OAAO,KAAA,CAAM,eAAA,CAAgB,eAAe,CAAA,GAC5C,MAAA;AAAA,MACJ,WAAA,EAAa,MAAM,eAAA,EAAiB,WAAA,GAChC,OAAO,KAAA,CAAM,eAAA,CAAgB,WAAW,CAAA,GACxC,MAAA;AAAA,MACJ,OAAQ,KAAA,CAAkC,KAAA,GACtC,MAAA,CAAQ,KAAA,CAAkC,KAAK,CAAA,GAC/C;AAAA,KACN;AAIA,IAAA,IACE,MAAM,KAAA,KAAU,QAAA,CAAS,uBACzB,KAAA,CAAM,KAAA,KAAU,SAAS,gBAAA,EACzB;AACA,MAAA,MAAM,gBAAA,GAAmB,MAAM,IAAA,EAAM,gBAAA;AACrC,MAAA,IAAI,gBAAA,IAAoB,KAAA,CAAM,OAAA,CAAQ,gBAAgB,CAAA,EAAG;AACvD,QAAA,IAAI,gBAAA,GAAmB,CAAA;AACvB,QAAA,IAAI,iBAAA,GAAoB,CAAA;AAExB,QAAA,KAAA,MAAW,mBAAmB,gBAAA,EAAkB;AAC9C,UAAA,MAAM,aAAA,GAAgB,gBAAgB,QAAA,EAAU,aAAA;AAChD,UAAA,IAAI,aAAA,EAAe;AACjB,YAAA,gBAAA,IAAoB,cAAc,gBAAA,IAAoB,CAAA;AACtD,YAAA,iBAAA,IAAqB,cAAc,oBAAA,IAAwB,CAAA;AAAA,UAC7D;AAAA,QACF;AAEA,QAAA,MAAA,CAAO,gBAAA,GAAmB,gBAAA;AAC1B,QAAA,MAAA,CAAO,iBAAA,GAAoB,iBAAA;AAE3B,QAAA,IAAA,CAAK,MAAA,EAAQ,GAAA,CAAI,MAAA,EAAQ,0BAAA,EAA4B;AAAA,UACnD,SAAS,MAAA,CAAO,EAAA;AAAA,UAChB,gBAAA;AAAA,UACA,iBAAA;AAAA,UACA,eAAe,gBAAA,CAAiB;AAAA,SACjC,CAAA;AAAA,MACH;AAAA,IACF;AAEA,IAAA,IAAA,CAAK,QAAQ,GAAA,CAAI,OAAA,EAAS,cAAA,EAAgB,EAAE,OAAO,CAAA;AAEnD,IAAA,IAAA,CAAK,MAAA,EAAQ,GAAA,CAAI,OAAA,EAAS,qBAAA,EAAuB;AAAA,MAC/C,SAAS,MAAA,CAAO,EAAA;AAAA,MAChB,OAAO,MAAA,CAAO,KAAA;AAAA,MACd,WAAW,MAAA,CAAO,cAAA;AAAA,MAClB,OAAO,MAAA,CAAO,UAAA;AAAA,MACd,WAAW,KAAA,CAAM,SAAA;AAAA,MACjB,SAAS,KAAA,CAAM,OAAA;AAAA,MACf,kBAAkB,MAAA,CAAO,gBAAA;AAAA,MACzB,mBAAmB,MAAA,CAAO;AAAA,KAC3B,CAAA;AAED,IAAA,OAAO,MAAA;AAAA,EACT;AAAA,EAEA,MAAM,UAAA,CACJ,MAAA,EACA,SAAA,EAC2B;AAE3B,IAAA,MAAM,UAAA,GACJ,SAAA,IAAc,MAAA,CAAO,QAAA,EAAU,SAAA;AAEjC,IAAA,IAAA,CAAK,MAAA,EAAQ,GAAA,CAAI,OAAA,EAAS,4CAAA,EAA8C;AAAA,MACtE,SAAS,MAAA,CAAO,EAAA;AAAA,MAChB,iBAAA,EAAmB,CAAC,CAAC,SAAA;AAAA,MACrB,iBAAA,EAAmB,CAAC,CAAC,MAAA,CAAO,QAAA;AAAA,MAC5B,oBAAA,EAAsB,CAAC,CAAC,MAAA,CAAO,QAAA,EAAU,SAAA;AAAA,MACzC,eAAA,EAAiB,YAAY,MAAA,IAAU;AAAA,KACxC,CAAA;AAED,IAAA,MAAM,KAAA,GAAQ,MAAM,IAAA,CAAK,EAAA,CAAG,OAAA,CAAQ,IAAI,EAAE,IAAA,EAAM,MAAA,CAAO,EAAA,EAAI,CAAA;AAE3D,IAAA,IACE,MAAM,KAAA,KAAU,QAAA,CAAS,uBACzB,KAAA,CAAM,KAAA,KAAU,SAAS,gBAAA,EACzB;AACA,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,0BAAA,EAA6B,KAAA,CAAM,KAAK,CAAA,CAAE,CAAA;AAAA,IAC5D;AAEA,IAAA,IAAI,KAAA,CAAM,KAAA,KAAU,QAAA,CAAS,gBAAA,EAAkB;AAC7C,MAAA,MAAM,QAAA,GACH,MAAkC,KAAA,IAAS,eAAA;AAC9C,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,cAAA,EAAiB,QAAQ,CAAA,CAAE,CAAA;AAAA,IAC7C;AAEA,IAAA,MAAM,qBAAA,GAAwB,MAAM,IAAA,EAAM,gBAAA;AAC1C,IAAA,IAAI,CAAC,qBAAA,EAAuB;AAC1B,MAAA,MAAM,IAAI,KAAA;AAAA,QACR;AAAA,OACF;AAAA,IACF;AAEA,IAAA,IAAI,CAAC,qBAAA,IAAyB,CAAC,KAAA,CAAM,OAAA,CAAQ,qBAAqB,CAAA,EAAG;AACnE,MAAA,IAAA,CAAK,MAAA,EAAQ,GAAA,CAAI,OAAA,EAAS,kCAAA,EAAoC;AAAA,QAC5D,SAAS,MAAA,CAAO,EAAA;AAAA,QAChB,WAAA,EAAa,CAAC,CAAC,qBAAA;AAAA,QACf,QAAA,EAAU,MAAM,IAAA,GAAO,MAAA,CAAO,KAAK,KAAA,CAAM,IAAI,IAAI;AAAC,OACnD,CAAA;AACD,MAAA,MAAM,IAAI,KAAA;AAAA,QACR;AAAA,OACF;AAAA,IACF;AAEA,IAAA,IAAA,CAAK,MAAA,EAAQ,GAAA,CAAI,MAAA,EAAQ,iCAAA,EAAmC;AAAA,MAC1D,SAAS,MAAA,CAAO,EAAA;AAAA,MAChB,eAAe,qBAAA,CAAsB,MAAA;AAAA,MACrC,SAAA,EAAW,IAAA,CAAK,SAAA,CAAU,qBAAA,CAAsB,CAAC,CAAC;AAAA;AAAA,KACnD,CAAA;AAED,IAAA,OAAO,qBAAA,CAAsB,GAAA,CAAI,CAAC,eAAA,EAAiB,KAAA,KAAU;AAC3D,MAAA,IAAI;AAEF,QAAA,MAAM,mBACJ,eAAA,CACA,QAAA;AACF,QAAA,MAAM,WACJ,gBAAA,EAAkB,QAAA,IAClB,aAAa,KAAK,CAAA,IAClB,WAAW,KAAK,CAAA,CAAA;AAElB,QAAA,IAAI,CAAC,UAAA,GAAa,KAAK,CAAA,IAAK,CAAC,kBAAkB,QAAA,EAAU;AACvD,UAAA,IAAA,CAAK,MAAA,EAAQ,GAAA;AAAA,YACX,MAAA;AAAA,YACA,+BAA+B,KAAK,CAAA,gBAAA,CAAA;AAAA,YACpC,EAAE,KAAA;AAAM,WACV;AAAA,QACF;AAEA,QAAA,IAAI,gBAAgB,KAAA,EAAO;AACzB,UAAA,MAAMC,OAAAA,GAAS;AAAA,YACb,KAAA;AAAA,YACA,QAAA;AAAA,YACA,IAAA,EAAM,EAAA;AAAA,YACN,WAAA,EAAa,CAAA;AAAA,YACb,YAAA,EAAc,CAAA;AAAA,YACd,KAAA,EAAO,eAAA,CAAgB,KAAA,CAAM,OAAA,IAAW;AAAA,WAC1C;AACA,UAAA,IAAA,CAAK,MAAA,EAAQ,GAAA,CAAI,OAAA,EAAS,CAAA,SAAA,EAAY,KAAK,CAAA,UAAA,CAAA,EAAc;AAAA,YACvD,QAAA;AAAA,YACA,OAAOA,OAAAA,CAAO;AAAA,WACf,CAAA;AACD,UAAA,OAAOA,OAAAA;AAAA,QACT;AAEA,QAAA,MAAM,WAAW,eAAA,CAAgB,QAAA;AACjC,QAAA,IAAI,CAAC,QAAA,EAAU;AACb,UAAA,MAAMA,OAAAA,GAAS;AAAA,YACb,KAAA;AAAA,YACA,IAAA,EAAM,EAAA;AAAA,YACN,WAAA,EAAa,CAAA;AAAA,YACb,YAAA,EAAc,CAAA;AAAA,YACd,KAAA,EAAO;AAAA,WACT;AACA,UAAA,IAAA,CAAK,MAAA,EAAQ,GAAA;AAAA,YACX,OAAA;AAAA,YACA,YAAY,KAAK,CAAA,uBAAA,CAAA;AAAA,YACjB,EAAE,QAAA;AAAS,WACb;AACA,UAAA,OAAOA,OAAAA;AAAA,QACT;AAEA,QAAA,MAAM,IAAA,GACJ,QAAA,CAAS,IAAA,IACT,QAAA,CAAS,UAAA,GAAa,CAAC,CAAA,EAAG,OAAA,EAAS,KAAA,GAAQ,CAAC,CAAA,EAAG,IAAA,IAC/C,EAAA;AACF,QAAA,MAAM,gBAAgB,QAAA,CAAS,aAAA;AAE/B,QAAA,MAAM,MAAA,GAAS;AAAA,UACb,KAAA;AAAA,UACA,QAAA;AAAA;AAAA,UACA,IAAA;AAAA,UACA,WAAA,EAAa,eAAe,gBAAA,IAAoB,CAAA;AAAA,UAChD,YAAA,EAAc,eAAe,oBAAA,IAAwB;AAAA,SACvD;AACA,QAAA,IAAA,CAAK,MAAA,EAAQ,GAAA,CAAI,OAAA,EAAS,CAAA,SAAA,EAAY,KAAK,CAAA,oBAAA,CAAA,EAAwB;AAAA,UACjE,QAAA;AAAA,UACA,YAAY,IAAA,CAAK,MAAA;AAAA,UACjB,aAAa,MAAA,CAAO,WAAA;AAAA,UACpB,cAAc,MAAA,CAAO;AAAA,SACtB,CAAA;AACD,QAAA,OAAO,MAAA;AAAA,MACT,SAAS,KAAA,EAAO;AACd,QAAA,MAAM,MAAA,GAAS;AAAA,UACb,KAAA;AAAA,UACA,IAAA,EAAM,EAAA;AAAA,UACN,WAAA,EAAa,CAAA;AAAA,UACb,YAAA,EAAc,CAAA;AAAA,UACd,OAAO,KAAA,YAAiB,KAAA,GAAQ,KAAA,CAAM,OAAA,GAAU,OAAO,KAAK;AAAA,SAC9D;AACA,QAAA,IAAA,CAAK,MAAA,EAAQ,GAAA,CAAI,OAAA,EAAS,CAAA,SAAA,EAAY,KAAK,CAAA,gBAAA,CAAA,EAAoB;AAAA,UAC7D,OAAO,MAAA,CAAO;AAAA,SACf,CAAA;AACD,QAAA,OAAO,MAAA;AAAA,MACT;AAAA,IACF,CAAC,CAAA;AAAA,EACH;AAAA,EAEA,MAAM,OAAO,MAAA,EAAoC;AAE/C,IAAA,IAAI;AACF,MAAA,MAAM,IAAA,CAAK,GAAG,OAAA,CAAQ,MAAA,CAAO,EAAE,IAAA,EAAM,MAAA,CAAO,IAAI,CAAA;AAChD,MAAA,IAAA,CAAK,MAAA,EAAQ,GAAA,CAAI,MAAA,EAAQ,wBAAA,EAA0B;AAAA,QACjD,SAAS,MAAA,CAAO;AAAA,OACjB,CAAA;AAAA,IACH,SAAS,KAAA,EAAO;AACd,MAAA,IAAA,CAAK,MAAA,EAAQ,GAAA,CAAI,MAAA,EAAQ,+BAAA,EAAiC;AAAA,QACxD,SAAS,MAAA,CAAO,EAAA;AAAA,QAChB,OAAO,KAAA,YAAiB,KAAA,GAAQ,KAAA,CAAM,OAAA,GAAU,OAAO,KAAK;AAAA,OAC7D,CAAA;AACD,MAAA,MAAM,KAAA;AAAA,IACR;AAAA,EACF;AAAA,EAEQ,SAAS,KAAA,EAA8B;AAC7C,IAAA,QAAQ,KAAA;AAAO,MACb,KAAK,QAAA,CAAS,mBAAA;AACZ,QAAA,OAAO,WAAA;AAAA,MACT,KAAK,QAAA,CAAS,gBAAA;AACZ,QAAA,OAAO,QAAA;AAAA,MACT,KAAK,QAAA,CAAS,mBAAA;AACZ,QAAA,OAAO,WAAA;AAAA,MACT,KAAK,QAAA,CAAS,iBAAA;AACZ,QAAA,OAAO,SAAA;AAAA,MACT,KAAK,QAAA,CAAS,iBAAA;AACZ,QAAA,OAAO,YAAA;AAAA,MACT;AACE,QAAA,OAAO,YAAA;AAAA;AACX,EACF;AACF;ACtZO,IAAM,sBAAN,MAEP;AAAA,EACW,IAAA,GAAO,QAAA;AAAA,EACP,gBAAA,GAAmB,IAAA;AAAA,EAEpB,MAAA;AAAA,EACA,MAAA;AAAA,EAER,WAAA,CAAY,MAAA,GAAoC,EAAC,EAAGF,OAAAA,EAAsB;AACxE,IAAA,MAAM,MAAA,GAAS,MAAA,CAAO,MAAA,IAAU,OAAA,CAAQ,GAAA,CAAI,cAAA;AAC5C,IAAA,IAAI,CAAC,MAAA,EAAQ;AACX,MAAA,MAAM,IAAI,KAAA;AAAA,QACR;AAAA,OACF;AAAA,IACF;AACA,IAAA,IAAA,CAAK,MAAA,GAAS,IAAI,MAAA,CAAO,EAAE,QAAQ,CAAA;AACnC,IAAA,IAAA,CAAK,MAAA,GAASA,OAAAA;AAAA,EAChB;AAAA,EAEA,MAAM,MAAA,CACJ,QAAA,EACA,OAAA,EACsB;AACtB,IAAA,IAAI,QAAA,CAAS,WAAW,CAAA,EAAG;AACzB,MAAA,MAAM,IAAI,MAAM,2BAA2B,CAAA;AAAA,IAC7C;AAGA,IAAA,MAAM,QAAA,GAAW,QAAA,CAAS,CAAC,CAAA,EAAG,KAAA;AAC9B,IAAA,MAAM,KAAA,GAAQ,uBAAA,CAAwB,QAAA,EAAU,QAAQ,CAAA;AAExD,IAAA,IAAA,CAAK,MAAA,EAAQ,GAAA,CAAI,MAAA,EAAQ,yBAAA,EAA2B;AAAA,MAClD,cAAc,QAAA,CAAS,MAAA;AAAA,MACvB,UAAU,QAAA,IAAY,SAAA;AAAA,MACtB;AAAA,KACD,CAAA;AAGD,IAAA,MAAM,YAAA,GAAe,QAAA,CAClB,GAAA,CAAI,CAAC,KAAK,CAAA,KAAM;AAEf,MAAA,MAAM,WAAW,GAAA,CAAI,KAAA,GACjB,wBAAwB,GAAA,CAAI,KAAA,EAAO,QAAQ,CAAA,GAC3C,KAAA;AAGJ,MAAA,MAAM,WAAA,GACJ,IAAI,KAAA,IAAS,MAAA,CAAO,KAAK,GAAA,CAAI,KAAK,EAAE,MAAA,GAAS,CAAA,GACzC,OAAO,OAAA,CAAQ,GAAA,CAAI,KAAK,CAAA,CAAE,GAAA,CAAI,CAAC,CAAC,IAAA,EAAM,IAAI,CAAA,MAAO;AAAA,QAC/C,IAAA,EAAM,UAAA;AAAA,QACN,QAAA,EAAU;AAAA,UACR,IAAA;AAAA,UACA,aAAa,IAAA,CAAK,WAAA;AAAA,UAClB,YAAY,IAAA,CAAK,WAAA,GACbC,UAAAA,CAAW,IAAA,CAAK,WAAW,CAAA,GAC3B;AAAA;AACN,QACA,CAAA,GACF,MAAA;AAGN,MAAA,IAAI,UAAA;AAMJ,MAAA,IAAI,IAAI,UAAA,EAAY;AAClB,QAAA,IAAI,GAAA,CAAI,eAAe,MAAA,EAAQ;AAC7B,UAAA,UAAA,GAAa,MAAA;AAAA,QACf,CAAA,MAAA,IAAW,GAAA,CAAI,UAAA,KAAe,UAAA,EAAY;AACxC,UAAA,UAAA,GAAa,UAAA;AAAA,QACf,CAAA,MAAA,IAAW,GAAA,CAAI,UAAA,KAAe,MAAA,EAAQ;AACpC,UAAA,UAAA,GAAa,MAAA;AAAA,QACf,CAAA,MAAA,IACE,OAAO,GAAA,CAAI,UAAA,KAAe,YAC1B,GAAA,CAAI,UAAA,CAAW,SAAS,MAAA,EACxB;AACA,UAAA,UAAA,GAAa;AAAA,YACX,IAAA,EAAM,UAAA;AAAA,YACN,QAAA,EAAU,EAAE,IAAA,EAAM,GAAA,CAAI,WAAW,QAAA;AAAS,WAC5C;AAAA,QACF;AAAA,MACF;AAEA,MAAA,OAAO,KAAK,SAAA,CAAU;AAAA,QACpB,SAAA,EAAW,GAAA,CAAI,QAAA,IAAY,CAAA,QAAA,EAAW,CAAC,CAAA,CAAA;AAAA,QACvC,MAAA,EAAQ,MAAA;AAAA,QACR,GAAA,EAAK,sBAAA;AAAA,QACL,IAAA,EAAM;AAAA,UACJ,KAAA,EAAO,QAAA;AAAA,UACP,QAAA,EAAU;AAAA,YACR,GAAI,GAAA,CAAI,MAAA,GACJ,CAAC,EAAE,IAAA,EAAM,QAAA,EAAmB,OAAA,EAAS,GAAA,CAAI,MAAA,EAAQ,CAAA,GACjD,EAAC;AAAA,YACL,EAAE,IAAA,EAAM,MAAA,EAAiB,OAAA,EAAS,IAAI,MAAA;AAAO,WAC/C;AAAA,UACA,UAAA,EAAY,IAAI,SAAA,IAAa,IAAA;AAAA,UAC7B,GAAI,GAAA,CAAI,WAAA,KAAgB,MAAA,IAAa;AAAA,YACnC,aAAa,GAAA,CAAI;AAAA,WACnB;AAAA,UACA,GAAI,WAAA,IAAe,EAAE,KAAA,EAAO,WAAA,EAAY;AAAA,UACxC,GAAI,UAAA,IAAc,EAAE,WAAA,EAAa,UAAA;AAAW;AAC9C,OACD,CAAA;AAAA,IACH,CAAC,CAAA,CACA,IAAA,CAAK,IAAI,CAAA;AAIZ,IAAA,MAAM,IAAA,GAAO,MAAM,IAAA,CAAK,MAAA,CAAO,MAAM,MAAA,CAAO;AAAA,MAC1C,MAAM,IAAI,IAAA,CAAK,CAAC,YAAY,GAAG,sBAAA,EAAwB;AAAA,QACrD,IAAA,EAAM;AAAA,OACP,CAAA;AAAA,MACD,OAAA,EAAS;AAAA,KACV,CAAA;AAED,IAAA,IAAA,CAAK,MAAA,EAAQ,GAAA,CAAI,OAAA,EAAS,4BAAA,EAA8B;AAAA,MACtD,QAAQ,IAAA,CAAK,EAAA;AAAA,MACb,UAAU,IAAA,CAAK;AAAA,KAChB,CAAA;AAGD,IAAA,MAAM,KAAA,GAAQ,MAAM,IAAA,CAAK,MAAA,CAAO,QAAQ,MAAA,CAAO;AAAA,MAC7C,eAAe,IAAA,CAAK,EAAA;AAAA,MACpB,QAAA,EAAU,sBAAA;AAAA,MACV,iBAAA,EAAmB,KAAA;AAAA;AAAA,MACnB,UAAU,OAAA,EAAS;AAAA,KACpB,CAAA;AAED,IAAA,IAAA,CAAK,MAAA,EAAQ,GAAA,CAAI,MAAA,EAAQ,wBAAA,EAA0B;AAAA,MACjD,SAAS,KAAA,CAAM,EAAA;AAAA,MACf,cAAc,QAAA,CAAS,MAAA;AAAA,MACvB,QAAQ,KAAA,CAAM;AAAA,KACf,CAAA;AAED,IAAA,OAAO;AAAA,MACL,IAAI,KAAA,CAAM,EAAA;AAAA,MACV,UAAU,IAAA,CAAK,IAAA;AAAA,MACf,cAAc,QAAA,CAAS,MAAA;AAAA,MACvB,SAAA,EAAW,IAAI,IAAA,CAAK,KAAA,CAAM,aAAa,GAAI,CAAA;AAAA,MAC3C,QAAA,EAAU;AAAA,QACR,KAAA;AAAA,QACA,aAAa,IAAA,CAAK,EAAA;AAAA,QAClB,cAAc,KAAA,CAAM,cAAA;AAAA,QACpB,aAAa,KAAA,CAAM,aAAA;AAAA,QACnB,GAAG,OAAA,EAAS;AAAA;AACd,KACF;AAAA,EACF;AAAA,EAEA,MAAM,YAAY,MAAA,EAA2C;AAC3D,IAAA,MAAM,QAAQ,MAAM,IAAA,CAAK,OAAO,OAAA,CAAQ,QAAA,CAAS,OAAO,EAAE,CAAA;AAE1D,IAAA,MAAM,gBAAgB,KAAA,CAAM,cAAA;AAC5B,IAAA,MAAM,cAAA,GAAiB,eAAe,SAAA,IAAa,CAAA;AACnD,IAAA,MAAM,WAAA,GAAc,eAAe,MAAA,IAAU,CAAA;AAC7C,IAAA,MAAM,UAAA,GAAa,aAAA,EAAe,KAAA,IAAS,MAAA,CAAO,YAAA;AAIlD,IAAA,IAAI,KAAA,GAAQ,IAAA,CAAK,SAAA,CAAU,KAAA,CAAM,MAAM,CAAA;AACvC,IAAA,IAAI,KAAA,KAAU,WAAA,IAAe,CAAC,KAAA,CAAM,cAAA,EAAgB;AAClD,MAAA,IAAA,CAAK,MAAA,EAAQ,GAAA;AAAA,QACX,MAAA;AAAA,QACA,qDAAA;AAAA,QACA;AAAA,UACE,SAAS,MAAA,CAAO,EAAA;AAAA,UAChB,QAAQ,KAAA,CAAM;AAAA;AAChB,OACF;AACA,MAAA,KAAA,GAAQ,YAAA;AAAA,IACV;AAEA,IAAA,MAAM,MAAA,GAAsB;AAAA,MAC1B,KAAA;AAAA,MACA,gBAAgB,cAAA,GAAiB,WAAA;AAAA,MACjC,UAAA;AAAA,MACA,cAAA,EAAgB,cAAA;AAAA,MAChB,WAAA;AAAA,MACA,KAAA,EAAO,KAAA,CAAM,MAAA,EAAQ,IAAA,GAAO,CAAC,CAAA,EAAG;AAAA,KAClC;AAEA,IAAA,IAAA,CAAK,MAAA,EAAQ,GAAA,CAAI,MAAA,EAAQ,2BAAA,EAA6B;AAAA,MACpD,SAAS,MAAA,CAAO,EAAA;AAAA,MAChB,WAAW,KAAA,CAAM,MAAA;AAAA,MACjB,aAAa,MAAA,CAAO,KAAA;AAAA,MACpB,WAAW,MAAA,CAAO,cAAA;AAAA,MAClB,OAAO,MAAA,CAAO,UAAA;AAAA,MACd,cAAc,KAAA,CAAM;AAAA,KACrB,CAAA;AAED,IAAA,OAAO,MAAA;AAAA,EACT;AAAA,EAEA,MAAM,WAAW,MAAA,EAAgD;AAC/D,IAAA,MAAM,QAAQ,MAAM,IAAA,CAAK,OAAO,OAAA,CAAQ,QAAA,CAAS,OAAO,EAAE,CAAA;AAE1D,IAAA,IAAA,CAAK,MAAA,EAAQ,GAAA,CAAI,MAAA,EAAQ,mCAAA,EAAqC;AAAA,MAC5D,SAAS,MAAA,CAAO,EAAA;AAAA,MAChB,QAAQ,KAAA,CAAM,MAAA;AAAA,MACd,cAAc,KAAA,CAAM,cAAA;AAAA,MACpB,aAAa,KAAA,CAAM;AAAA,KACpB,CAAA;AAED,IAAA,IAAI,KAAA,CAAM,MAAA,KAAW,WAAA,IAAe,KAAA,CAAM,WAAW,QAAA,EAAU;AAC7D,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,2BAAA,EAA8B,KAAA,CAAM,MAAM,CAAA,CAAE,CAAA;AAAA,IAC9D;AAGA,IAAA,IAAI,KAAA,CAAM,WAAW,QAAA,EAAU;AAC7B,MAAA,MAAM,eACJ,KAAA,CAAM,MAAA,EAAQ,IAAA,GAAO,CAAC,GAAG,OAAA,IAAW,qBAAA;AACtC,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,cAAA,EAAiB,YAAY,CAAA,CAAE,CAAA;AAAA,IACjD;AAEA,IAAA,IAAI,CAAC,MAAM,cAAA,EAAgB;AAEzB,MAAA,IAAA,CAAK,MAAA,EAAQ,GAAA,CAAI,OAAA,EAAS,oCAAA,EAAsC;AAAA,QAC9D,SAAS,MAAA,CAAO,EAAA;AAAA,QAChB,QAAQ,KAAA,CAAM,MAAA;AAAA,QACd,eAAe,KAAA,CAAM,cAAA;AAAA,QACrB,QAAQ,KAAA,CAAM;AAAA,OACf,CAAA;AACD,MAAA,MAAM,IAAI,KAAA;AAAA,QACR,CAAA,yCAAA,EAA4C,MAAM,MAAM,CAAA,kBAAA,EAAqB,KAAK,SAAA,CAAU,KAAA,CAAM,cAAc,CAAC,CAAA;AAAA,OACnH;AAAA,IACF;AAEA,IAAA,IAAA,CAAK,MAAA,EAAQ,GAAA,CAAI,MAAA,EAAQ,iCAAA,EAAmC;AAAA,MAC1D,SAAS,MAAA,CAAO,EAAA;AAAA,MAChB,cAAc,KAAA,CAAM;AAAA,KACrB,CAAA;AAGD,IAAA,MAAM,cAAc,MAAM,IAAA,CAAK,OAAO,KAAA,CAAM,OAAA,CAAQ,MAAM,cAAc,CAAA;AACxE,IAAA,MAAM,IAAA,GAAO,MAAM,WAAA,CAAY,IAAA,EAAK;AACpC,IAAA,MAAM,KAAA,GAAQ,KAAK,IAAA,EAAK,CAAE,MAAM,IAAI,CAAA,CAAE,OAAO,OAAO,CAAA;AAEpD,IAAA,MAAM,UAA4B,KAAA,CAAM,GAAA;AAAA,MACtC,CAAC,MAAc,KAAA,KAAkB;AAC/B,QAAA,IAAI;AACF,UAAA,MAAM,MAAA,GAAS,IAAA,CAAK,KAAA,CAAM,IAAI,CAAA;AAC9B,UAAA,MAAM,QAAA,GAAW,OAAO,QAAA,EAAU,IAAA;AAClC,UAAA,MAAM,MAAA,GAAS,QAAA,EAAU,OAAA,GAAU,CAAC,CAAA;AAEpC,UAAA,IAAI,OAAO,KAAA,EAAO;AAChB,YAAA,OAAO;AAAA,cACL,KAAA;AAAA,cACA,UAAU,MAAA,CAAO,SAAA;AAAA,cACjB,IAAA,EAAM,EAAA;AAAA,cACN,WAAA,EAAa,CAAA;AAAA,cACb,YAAA,EAAc,CAAA;AAAA,cACd,KAAA,EAAO,MAAA,CAAO,KAAA,CAAM,OAAA,IAAW;AAAA,aACjC;AAAA,UACF;AAEA,UAAA,OAAO;AAAA,YACL,KAAA;AAAA,YACA,UAAU,MAAA,CAAO,SAAA;AAAA,YACjB,IAAA,EAAM,MAAA,EAAQ,OAAA,EAAS,OAAA,IAAW,EAAA;AAAA,YAClC,WAAA,EAAa,QAAA,EAAU,KAAA,EAAO,aAAA,IAAiB,CAAA;AAAA,YAC/C,YAAA,EAAc,QAAA,EAAU,KAAA,EAAO,iBAAA,IAAqB;AAAA,WACtD;AAAA,QACF,SAAS,KAAA,EAAO;AACd,UAAA,OAAO;AAAA,YACL,KAAA;AAAA,YACA,QAAA,EAAU,MAAA;AAAA,YACV,IAAA,EAAM,EAAA;AAAA,YACN,WAAA,EAAa,CAAA;AAAA,YACb,YAAA,EAAc,CAAA;AAAA,YACd,OAAO,KAAA,YAAiB,KAAA,GAAQ,KAAA,CAAM,OAAA,GAAU,OAAO,KAAK;AAAA,WAC9D;AAAA,QACF;AAAA,MACF;AAAA,KACF;AAEA,IAAA,IAAA,CAAK,MAAA,EAAQ,GAAA,CAAI,MAAA,EAAQ,gCAAA,EAAkC;AAAA,MACzD,SAAS,MAAA,CAAO,EAAA;AAAA,MAChB,aAAa,OAAA,CAAQ,MAAA;AAAA,MACrB,YAAA,EAAc,QAAQ,MAAA,CAAO,CAAC,MAAM,CAAC,CAAA,CAAE,KAAK,CAAA,CAAE,MAAA;AAAA,MAC9C,YAAY,OAAA,CAAQ,MAAA,CAAO,CAAC,CAAA,KAAM,CAAA,CAAE,KAAK,CAAA,CAAE;AAAA,KAC5C,CAAA;AAED,IAAA,OAAO,OAAA;AAAA,EACT;AAAA,EAEA,MAAM,OAAO,MAAA,EAAoC;AAC/C,IAAA,MAAM,IAAA,CAAK,MAAA,CAAO,OAAA,CAAQ,MAAA,CAAO,OAAO,EAAE,CAAA;AAC1C,IAAA,IAAA,CAAK,MAAA,EAAQ,IAAI,MAAA,EAAQ,wBAAA,EAA0B,EAAE,OAAA,EAAS,MAAA,CAAO,IAAI,CAAA;AAAA,EAC3E;AAAA,EAEQ,UAAU,MAAA,EAA4B;AAC5C,IAAA,QAAQ,MAAA;AAAQ,MACd,KAAK,WAAA;AACH,QAAA,OAAO,WAAA;AAAA,MACT,KAAK,QAAA;AAAA,MACL,KAAK,SAAA;AACH,QAAA,OAAO,QAAA;AAAA,MACT,KAAK,YAAA;AAAA,MACL,KAAK,WAAA;AACH,QAAA,OAAO,WAAA;AAAA,MACT,KAAK,YAAA;AAAA,MACL,KAAK,aAAA;AAAA,MACL,KAAK,YAAA;AACH,QAAA,OAAO,YAAA;AAAA,MACT;AACE,QAAA,OAAO,SAAA;AAAA;AACX,EACF;AACF;AC/SA,IAAM,MAAA,GAAS,aAAa,UAAU,CAAA;AAGtC,IAAM,4BAAA,GAA+B,GAAA;AAgTrC,SAAS,iBAAiB,WAAA,EAA0B;AAClD,EAAA,IAAI,WAAA,CAAY,aAAa,YAAA,EAAc;AAKzC,IAAA,OAAO,UAAA,CAAW,YAAY,EAAA,EAAI;AAAA,MAChC,SAAA,EAAW;AAAA,QACT,QAAA,EAAU;AAAA,UACR,IAAA,EAAM,YAAA;AAAA,UACN,kBAAA,EAAoB,IAAA;AAAA,UACpB,SAAA,EAAW;AAAA,YACT,QAAQ,WAAA,CAAY,mBAAA;AAAA,YACpB,YAAY,WAAA,CAAY;AAAA;AAC1B;AACF;AACF,KACD,CAAA;AAAA,EACH;AACA,EAAA,OAAO,MAAA,CAAO,YAAY,EAAE,CAAA;AAC9B;AAEA,SAAS,yBAAA,CACP,QAAA,EACA,WAAA,EACA,YAAA,EACA,UAAmB,KAAA,EACX;AACR,EAAA,MAAM,KAAA,GAAQ,SAAS,QAAQ,CAAA;AAC/B,EAAA,MAAM,QAAA,GAAW,aAAA,CAAc,QAAA,EAAU,WAAA,EAAa,YAAY,CAAA;AAElE,EAAA,IAAI,OAAA,IAAW,MAAM,oBAAA,EAAsB;AACzC,IAAA,OAAO,QAAA,CAAS,SAAA,IAAa,CAAA,GAAI,KAAA,CAAM,oBAAA,GAAuB,GAAA,CAAA;AAAA,EAChE;AAEA,EAAA,OAAO,QAAA,CAAS,SAAA;AAClB;AAMA,IAAM,YAAA,GAAN,MAAM,aAAA,CAAiC;AAAA,EAC5B,KAAA;AAAA,EACQ,YAAA;AAAA,EACA,UAAA;AAAA,EACA,UAAA;AAAA,EAEjB,WAAA,CACE,KAAA,EACA,YAAA,EACA,UAAA,EACA;AACA,IAAA,IAAI,CAAC,YAAA,EAAc;AACjB,MAAA,MAAM,IAAI,KAAA;AAAA,QACR;AAAA,OACF;AAAA,IACF;AACA,IAAA,IAAA,CAAK,KAAA,GAAQ,KAAA;AACb,IAAA,IAAA,CAAK,YAAA,GAAe,YAAA;AACpB,IAAA,IAAA,CAAK,UAAA,GAAa,UAAA;AAGlB,IAAA,IAAI,UAAA,EAAY;AACd,MAAA,IAAA,CAAK,UAAA,GAAa,CAAC,KAAA,EAAO,OAAA,EAAS,IAAA,KAAS;AAE1C,QAAA,UAAA,CACG,SAAA,CAAU;AAAA,UACT,eAAe,UAAA,CAAW,aAAA;AAAA,UAC1B,iBAAiB,UAAA,CAAW,aAAA;AAAA,UAC5B,KAAA;AAAA,UACA,OAAA;AAAA,UACA,QAAA,EAAU;AAAA,SACX,EACA,KAAA,CAAM,CAAC,QAAQ,MAAA,CAAO,KAAA,CAAM,wBAAA,EAA0B,GAAG,CAAC,CAAA;AAE7D,QAAA,MAAA,CAAO,KAAA,CAAM,CAAA,CAAA,EAAI,KAAK,CAAA,EAAA,EAAK,OAAO,CAAA,CAAA,EAAI,IAAA,GAAO,IAAA,CAAK,SAAA,CAAU,IAAI,CAAA,GAAI,EAAE,CAAA;AAAA,MACxE,CAAA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,YAAA,CACJ,QAAA,EACA,MAAA,EACA,OAAA,GAA+B,EAAC,EACT;AACvB,IAAA,MAAM,WAAA,GAAc,SAAS,QAAQ,CAAA;AACrC,IAAA,MAAM,KAAA,GAAQ,iBAAiB,WAAW,CAAA;AAC1C,IAAA,MAAM,SAAA,GAAY,KAAK,GAAA,EAAI;AAG3B,IAAA,MAAM,YAAA,GAAe,KAAA,CAAM,OAAA,CAAQ,MAAM,CAAA;AACzC,IAAA,MAAM,QAAA,GAAW,QAAQ,KAAA,KAAU,MAAA;AACnC,IAAA,MAAM,eAAA,GAAkB,QAAQ,mBAAA,KAAwB,MAAA;AAGxD,IAAA,MAAM,eAAe,YAAA,GAChB,MAAA,CACE,OAAO,CAAC,CAAA,KAAqB,EAAE,IAAA,KAAS,MAAM,EAC9C,GAAA,CAAI,CAAC,MAAM,CAAA,CAAE,IAAI,EACjB,IAAA,CAAK,IAAI,KAAK,sBAAA,GAChB,MAAA;AAGL,IAAA,IAAI,YAAY,eAAA,EAAiB;AAC/B,MAAA,MAAA,CAAO,KAAA;AAAA,QACL,iCAAiC,QAAQ,CAAA,kBAAA,EAAqB,eAAe,CAAA,YAAA,EAAe,WAAW,MAAA,CAAO,IAAA,CAAK,OAAA,CAAQ,KAAA,IAAS,EAAE,CAAA,CAAE,IAAA,CAAK,IAAI,IAAI,MAAM,CAAA;AAAA,OAC7J;AAAA,IACF;AAGA,IAAA,MAAM,mBAAA,GAAsB,OAAA,CAAQ,YAAA,GAChC,OAAO,UAAA,KAAmC;AAExC,MAAA,IAAI,WAAW,WAAA,IAAe,KAAA,CAAM,OAAA,CAAQ,UAAA,CAAW,WAAW,CAAA,EAAG;AACnE,QAAA,KAAA,MAAW,UAAA,IAAc,WAAW,WAAA,EAAa;AAC/C,UAAA,MAAM,MAAA,GAAS,UAAA;AAMf,UAAA,IAAI,OAAO,QAAA,EAAU;AACnB,YAAA,MAAM,aAAa,CAAA,EAAG,IAAA,CAAK,KAAK,CAAA,MAAA,EAAS,OAAO,QAAQ,CAAA,CAAA;AACxD,YAAA,IAAA,CAAK,aAAa,OAAA,CAAQ;AAAA,cACxB,KAAA,EAAO,UAAA;AAAA,cACP,QAAA,EAAU,MAAA;AAAA,cACV,QAAA;AAAA,cACA,SAAS,WAAA,CAAY,EAAA;AAAA,cACrB,MAAA,EAAQ,KAAK,SAAA,CAAU,MAAA,CAAO,SAAS,EAAC,EAAG,MAAM,CAAC,CAAA;AAAA,cAClD,QAAA,EAAU,KAAK,SAAA,CAAU,MAAA,CAAO,UAAU,EAAC,EAAG,MAAM,CAAC,CAAA;AAAA,cACrD,WAAA,EAAa,UAAA,CAAW,KAAA,CAAM,WAAA,IAAe,CAAA;AAAA,cAC7C,YAAA,EAAc,UAAA,CAAW,KAAA,CAAM,YAAA,IAAgB,CAAA;AAAA,cAC/C,IAAA,EAAM,yBAAA;AAAA,gBACJ,QAAA;AAAA,gBACA,UAAA,CAAW,MAAM,WAAA,IAAe,CAAA;AAAA,gBAChC,UAAA,CAAW,MAAM,YAAA,IAAgB;AAAA,eACnC;AAAA,cACA,QAAA,EAAU;AAAA,gBACR,UAAU,MAAA,CAAO,QAAA;AAAA,gBACjB,YAAY,MAAA,CAAO,UAAA;AAAA,gBACnB,cAAc,UAAA,CAAW;AAAA;AAC3B,aACD,CAAA;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAEA,MAAA,MAAM,OAAA,CAAQ,eAAe,UAAU,CAAA;AAAA,IACzC,CAAA,GACA,MAAA;AAGJ,IAAA,MAAM,WAAA,GAAc;AAAA,MAClB,KAAA;AAAA,MACA,WAAA,EAAa,QAAQ,WAAA,IAAe,GAAA;AAAA,MACpC,iBAAiB,OAAA,CAAQ,SAAA;AAAA;AAAA,MAEzB,GAAI,QAAA,IAAY;AAAA,QACd,OAAO,OAAA,CAAQ,KAAA;AAAA;AAAA,QAEf,YAAY,OAAA,CAAQ,UAAA;AAAA,QAGpB,UAAU,OAAA,CAAQ,QAAA;AAAA,QAClB,YAAA,EAAc;AAAA,OAGhB;AAAA;AAAA,MAEA,GAAI,eAAA,IAAmB;AAAA,QACrB,qBAAqB,OAAA,CAAQ;AAAA;AAC/B,KACF;AAGA,IAAA,MAAA,CAAO,MAAM,CAAA,oBAAA,CAAA,EAAwB;AAAA,MACnC,KAAA,EAAO,QAAA;AAAA,MACP,SAAS,WAAA,CAAY,EAAA;AAAA,MACrB,MAAA,EACE,aAAa,SAAA,CAAU,CAAA,EAAG,GAAG,CAAA,IAC5B,YAAA,CAAa,MAAA,GAAS,GAAA,GAAM,KAAA,GAAQ,EAAA,CAAA;AAAA,MACvC,WAAA,EAAa,QAAQ,WAAA,IAAe,GAAA;AAAA,MACpC,WAAW,OAAA,CAAQ,SAAA;AAAA,MACnB,QAAA;AAAA,MACA,eAAA;AAAA,MACA;AAAA,KACD,CAAA;AAED,IAAA,IAAI;AAGF,MAAA,MAAM,MAAA,GAAS,YAAA,GACX,MAAM,YAAA,CAAa;AAAA,QACjB,GAAG,WAAA;AAAA,QACH,QAAA,EAAU;AAAA,UACR;AAAA,YACE,IAAA,EAAM,MAAA;AAAA,YACN,SAAU,MAAA,CAAyB,GAAA;AAAA,cAAI,CAAC,IAAA,KACtC,IAAA,CAAK,IAAA,KAAS,MAAA,GACV,EAAE,IAAA,EAAM,MAAA,EAAiB,IAAA,EAAM,IAAA,CAAK,IAAA,EAAK,GACzC;AAAA,gBACE,IAAA,EAAM,MAAA;AAAA,gBACN,MAAM,IAAA,CAAK,IAAA;AAAA,gBACX,WAAW,IAAA,CAAK,SAAA;AAAA,gBAChB,GAAI,IAAA,CAAK,QAAA,IAAY,EAAE,QAAA,EAAU,KAAK,QAAA;AAAS;AACjD;AACN;AACF;AACF,OACM,CAAA,GACR,MAAM,YAAA,CAAa;AAAA,QACjB,GAAG,WAAA;AAAA,QACH;AAAA,OACM,CAAA;AAGZ,MAAA,IAAI,YAAY,eAAA,EAAiB;AAC/B,QAAA,MAAM,SAAA,GAAY,MAAA;AAClB,QAAA,MAAA,CAAO,KAAA;AAAA,UACL,CAAA,gCAAA,EAAmC,SAAA,CAAU,KAAA,EAAO,MAAA,IAAU,CAAC,CAAA,YAAA,EAAe,SAAA,CAAU,MAAA,KAAW,KAAA,CAAS,CAAA,eAAA,EAAkB,MAAA,CAAO,YAAY,CAAA;AAAA,SACnJ;AAAA,MACF;AAEA,MAAA,MAAM,WAAA,GAAc,MAAA,CAAO,KAAA,EAAO,WAAA,IAAe,CAAA;AACjD,MAAA,MAAM,YAAA,GAAe,MAAA,CAAO,KAAA,EAAO,YAAA,IAAgB,CAAA;AACnD,MAAA,MAAM,IAAA,GAAO,yBAAA;AAAA,QACX,QAAA;AAAA,QACA,WAAA;AAAA,QACA;AAAA,OACF;AACA,MAAA,MAAM,UAAA,GAAa,IAAA,CAAK,GAAA,EAAI,GAAI,SAAA;AAGhC,MAAA,IAAA,CAAK,aAAa,OAAA,CAAQ;AAAA,QACxB,OAAO,IAAA,CAAK,KAAA;AAAA,QACZ,QAAA,EAAU,MAAA;AAAA,QACV,QAAA;AAAA,QACA,SAAS,WAAA,CAAY,EAAA;AAAA,QACrB,MAAA,EAAQ,YAAA;AAAA,QACR,UAAU,MAAA,CAAO,IAAA;AAAA,QACjB,WAAA;AAAA,QACA,YAAA;AAAA,QACA,IAAA;AAAA,QACA,QAAA,EAAU;AAAA,UACR,aAAa,OAAA,CAAQ,WAAA;AAAA,UACrB,WAAW,OAAA,CAAQ,SAAA;AAAA,UACnB,cAAc,MAAA,CAAO,YAAA;AAAA,UACrB,UAAA;AAAA,UACA,YAAA;AAAA,UACA,GAAI,MAAA,CAAO,YAAA,KAAiB,OAAA,IAAW,EAAE,QAAQ,OAAA,EAAQ;AAAA,UACzD,GAAI,YAAA,IAAgB;AAAA,YAClB,UAAA,EAAa,MAAA,CACV,MAAA,CAAO,CAAC,CAAA,KAAsB,CAAA,CAAE,IAAA,KAAS,MAAM,CAAA,CAC/C,GAAA,CAAI,CAAC,CAAA,KAAM,EAAE,SAAS;AAAA;AAC3B;AACF,OACD,CAAA;AAGD,MAAA,MAAA,CAAO,MAAM,CAAA,qBAAA,CAAA,EAAyB;AAAA,QACpC,KAAA,EAAO,QAAA;AAAA,QACP,QAAA,EACE,MAAA,CAAO,IAAA,CAAK,SAAA,CAAU,CAAA,EAAG,GAAG,CAAA,IAC3B,MAAA,CAAO,IAAA,CAAK,MAAA,GAAS,GAAA,GAAM,KAAA,GAAQ,EAAA,CAAA;AAAA,QACtC,WAAA;AAAA,QACA,YAAA;AAAA,QACA,IAAA,EAAM,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA;AAAA,QACpB,UAAA;AAAA,QACA,cAAc,MAAA,CAAO;AAAA,OACtB,CAAA;AAED,MAAA,OAAO;AAAA,QACL,MAAM,MAAA,CAAO,IAAA;AAAA,QACb,WAAA;AAAA,QACA,YAAA;AAAA,QACA,IAAA;AAAA;AAAA,QAEA,GAAI,eAAA,IAAmB;AAAA,UACrB,QAAS,MAAA,CAAgC;AAAA;AAC3C,OACF;AAAA,IACF,SAAS,KAAA,EAAO;AAEd,MAAA,MAAM,UAAA,GAAa,IAAA,CAAK,GAAA,EAAI,GAAI,SAAA;AAChC,MAAA,MAAM,eACJ,KAAA,YAAiB,KAAA,GAAQ,KAAA,CAAM,OAAA,GAAU,OAAO,KAAK,CAAA;AAGvD,MAAA,MAAA,CAAO,MAAM,CAAA,kBAAA,CAAA,EAAsB;AAAA,QACjC,KAAA,EAAO,QAAA;AAAA,QACP,KAAA,EAAO,YAAA;AAAA,QACP;AAAA,OACD,CAAA;AAED,MAAA,IAAA,CAAK,aAAa,OAAA,CAAQ;AAAA,QACxB,OAAO,IAAA,CAAK,KAAA;AAAA,QACZ,QAAA,EAAU,MAAA;AAAA,QACV,QAAA;AAAA,QACA,SAAS,WAAA,CAAY,EAAA;AAAA,QACrB,MAAA,EAAQ,YAAA;AAAA,QACR,QAAA,EAAU,EAAA;AAAA,QACV,WAAA,EAAa,CAAA;AAAA,QACb,YAAA,EAAc,CAAA;AAAA,QACd,IAAA,EAAM,CAAA;AAAA,QACN,QAAA,EAAU;AAAA,UACR,aAAa,OAAA,CAAQ,WAAA;AAAA,UACrB,WAAW,OAAA,CAAQ,SAAA;AAAA,UACnB,YAAA,EAAc,OAAA;AAAA,UACd,UAAA;AAAA,UACA,YAAA;AAAA,UACA,MAAA,EAAQ,OAAA;AAAA,UACR,KAAA,EAAO;AAAA;AACT,OACD,CAAA;AAED,MAAA,MAAM,KAAA;AAAA,IACR;AAAA,EACF;AAAA,EAEA,MAAM,cAAA,CACJ,QAAA,EACA,QACA,MAAA,EACA,OAAA,GAAyB,EAAC,EACiB;AAC3C,IAAA,MAAM,WAAA,GAAc,SAAS,QAAQ,CAAA;AACrC,IAAA,MAAM,KAAA,GAAQ,iBAAiB,WAAW,CAAA;AAC1C,IAAA,MAAM,SAAA,GAAY,KAAK,GAAA,EAAI;AAG3B,IAAA,MAAM,YAAA,GAAe,KAAA,CAAM,OAAA,CAAQ,MAAM,CAAA;AACzC,IAAA,MAAM,QAAA,GAAW,QAAQ,KAAA,KAAU,MAAA;AAGnC,IAAA,MAAM,eAAe,YAAA,GAChB,MAAA,CACE,OAAO,CAAC,CAAA,KAAqB,EAAE,IAAA,KAAS,MAAM,EAC9C,GAAA,CAAI,CAAC,MAAM,CAAA,CAAE,IAAI,EACjB,IAAA,CAAK,IAAI,KAAK,sBAAA,GAChB,MAAA;AAIL,IAAA,MAAM,WAAA,GAAc;AAAA,MAClB,KAAA;AAAA,MACA,MAAA,EAAQ,MAAA,CAAO,MAAA,CAAO,EAAE,QAAQ,CAAA;AAAA,MAChC,WAAA,EAAa,QAAQ,WAAA,IAAe,CAAA;AAAA,MACpC,iBAAiB,OAAA,CAAQ,SAAA;AAAA;AAAA,MAEzB,GAAI,QAAA,IAAY;AAAA,QACd,OAAO,OAAA,CAAQ,KAAA;AAAA,QACf,UAAU,OAAA,CAAQ,QAAA;AAAA,QAClB,cAAc,OAAA,CAAQ;AAAA;AACxB,KACF;AAGA,IAAA,MAAA,CAAO,MAAM,CAAA,sBAAA,CAAA,EAA0B;AAAA,MACrC,KAAA,EAAO,QAAA;AAAA,MACP,SAAS,WAAA,CAAY,EAAA;AAAA,MACrB,MAAA,EACE,aAAa,SAAA,CAAU,CAAA,EAAG,GAAG,CAAA,IAC5B,YAAA,CAAa,MAAA,GAAS,GAAA,GAAM,KAAA,GAAQ,EAAA,CAAA;AAAA,MACvC,WAAA,EAAa,QAAQ,WAAA,IAAe,CAAA;AAAA,MACpC,WAAW,OAAA,CAAQ,SAAA;AAAA,MACnB,QAAA;AAAA,MACA;AAAA,KACD,CAAA;AAED,IAAA,IAAI;AACF,MAAA,MAAM,MAAA,GAAS,YAAA,GACX,MAAM,YAAA,CAAa;AAAA,QACjB,GAAG,WAAA;AAAA,QACH,QAAA,EAAU;AAAA,UACR;AAAA,YACE,IAAA,EAAM,MAAA;AAAA,YACN,SAAU,MAAA,CAAyB,GAAA;AAAA,cAAI,CAAC,IAAA,KACtC,IAAA,CAAK,IAAA,KAAS,MAAA,GACV,EAAE,IAAA,EAAM,MAAA,EAAiB,IAAA,EAAM,IAAA,CAAK,IAAA,EAAK,GACzC;AAAA,gBACE,IAAA,EAAM,MAAA;AAAA,gBACN,MAAM,IAAA,CAAK,IAAA;AAAA,gBACX,WAAW,IAAA,CAAK,SAAA;AAAA,gBAChB,GAAI,IAAA,CAAK,QAAA,IAAY,EAAE,QAAA,EAAU,KAAK,QAAA;AAAS;AACjD;AACN;AACF;AACF,OACD,CAAA,GACD,MAAM,YAAA,CAAa;AAAA,QACjB,GAAG,WAAA;AAAA,QACH;AAAA,OACD,CAAA;AAEL,MAAA,MAAM,WAAA,GAAc,MAAA,CAAO,KAAA,EAAO,WAAA,IAAe,CAAA;AACjD,MAAA,MAAM,YAAA,GAAe,MAAA,CAAO,KAAA,EAAO,YAAA,IAAgB,CAAA;AACnD,MAAA,MAAM,IAAA,GAAO,yBAAA;AAAA,QACX,QAAA;AAAA,QACA,WAAA;AAAA,QACA;AAAA,OACF;AACA,MAAA,MAAM,UAAA,GAAa,IAAA,CAAK,GAAA,EAAI,GAAI,SAAA;AAGhC,MAAA,IAAA,CAAK,aAAa,OAAA,CAAQ;AAAA,QACxB,OAAO,IAAA,CAAK,KAAA;AAAA,QACZ,QAAA,EAAU,QAAA;AAAA,QACV,QAAA;AAAA,QACA,SAAS,WAAA,CAAY,EAAA;AAAA,QACrB,MAAA,EAAQ,YAAA;AAAA,QACR,UAAU,IAAA,CAAK,SAAA,CAAU,MAAA,CAAO,MAAA,EAAQ,MAAM,CAAC,CAAA;AAAA,QAC/C,WAAA;AAAA,QACA,YAAA;AAAA,QACA,IAAA;AAAA,QACA,QAAA,EAAU;AAAA,UACR,aAAa,OAAA,CAAQ,WAAA;AAAA,UACrB,WAAW,OAAA,CAAQ,SAAA;AAAA,UACnB,cAAc,MAAA,CAAO,YAAA;AAAA,UACrB,UAAA;AAAA,UACA,YAAA;AAAA,UACA,GAAI,MAAA,CAAO,YAAA,KAAiB,OAAA,IAAW,EAAE,QAAQ,OAAA,EAAQ;AAAA,UACzD,GAAI,YAAA,IAAgB;AAAA,YAClB,UAAA,EAAa,MAAA,CACV,MAAA,CAAO,CAAC,CAAA,KAAsB,CAAA,CAAE,IAAA,KAAS,MAAM,CAAA,CAC/C,GAAA,CAAI,CAAC,CAAA,KAAM,EAAE,SAAS;AAAA;AAC3B;AACF,OACD,CAAA;AAGD,MAAA,MAAM,WAAA,GAAc,IAAA,CAAK,SAAA,CAAU,MAAA,CAAO,MAAM,CAAA;AAChD,MAAA,MAAA,CAAO,MAAM,CAAA,uBAAA,CAAA,EAA2B;AAAA,QACtC,KAAA,EAAO,QAAA;AAAA,QACP,QAAA,EACE,YAAY,SAAA,CAAU,CAAA,EAAG,GAAG,CAAA,IAC3B,WAAA,CAAY,MAAA,GAAS,GAAA,GAAM,KAAA,GAAQ,EAAA,CAAA;AAAA,QACtC,WAAA;AAAA,QACA,YAAA;AAAA,QACA,IAAA,EAAM,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA;AAAA,QACpB,UAAA;AAAA,QACA,cAAc,MAAA,CAAO;AAAA,OACtB,CAAA;AAED,MAAA,OAAO;AAAA,QACL,QAAQ,MAAA,CAAO,MAAA;AAAA,QACf,WAAA;AAAA,QACA,YAAA;AAAA,QACA;AAAA,OACF;AAAA,IACF,SAAS,KAAA,EAAO;AAEd,MAAA,MAAM,UAAA,GAAa,IAAA,CAAK,GAAA,EAAI,GAAI,SAAA;AAChC,MAAA,MAAM,eACJ,KAAA,YAAiB,KAAA,GAAQ,KAAA,CAAM,OAAA,GAAU,OAAO,KAAK,CAAA;AAGvD,MAAA,MAAA,CAAO,MAAM,CAAA,oBAAA,CAAA,EAAwB;AAAA,QACnC,KAAA,EAAO,QAAA;AAAA,QACP,KAAA,EAAO,YAAA;AAAA,QACP;AAAA,OACD,CAAA;AAED,MAAA,IAAA,CAAK,aAAa,OAAA,CAAQ;AAAA,QACxB,OAAO,IAAA,CAAK,KAAA;AAAA,QACZ,QAAA,EAAU,QAAA;AAAA,QACV,QAAA;AAAA,QACA,SAAS,WAAA,CAAY,EAAA;AAAA,QACrB,MAAA,EAAQ,YAAA;AAAA,QACR,QAAA,EAAU,EAAA;AAAA,QACV,WAAA,EAAa,CAAA;AAAA,QACb,YAAA,EAAc,CAAA;AAAA,QACd,IAAA,EAAM,CAAA;AAAA,QACN,QAAA,EAAU;AAAA,UACR,aAAa,OAAA,CAAQ,WAAA;AAAA,UACrB,WAAW,OAAA,CAAQ,SAAA;AAAA,UACnB,YAAA,EAAc,OAAA;AAAA,UACd,UAAA;AAAA,UACA,YAAA;AAAA,UACA,MAAA,EAAQ,OAAA;AAAA,UACR,KAAA,EAAO;AAAA;AACT,OACD,CAAA;AAED,MAAA,MAAM,KAAA;AAAA,IACR;AAAA,EACF;AAAA,EAEA,MAAM,KAAA,CACJ,QAAA,EACA,IAAA,EACA,OAAA,GAAwB,EAAC,EACD;AACxB,IAAA,MAAM,WAAA,GAAc,SAAS,QAAQ,CAAA;AACrC,IAAA,MAAM,QAAQ,KAAA,CAAM,OAAA,CAAQ,IAAI,CAAA,GAAI,IAAA,GAAO,CAAC,IAAI,CAAA;AAChD,IAAA,MAAM,SAAA,GAAY,KAAK,GAAA,EAAI;AAG3B,IAAA,MAAM,UAAA,GAAa,QAAQ,UAAA,IAAc,4BAAA;AAGzC,IAAA,MAAM,WAAA,GACJ,MAAM,MAAA,KAAW,CAAA,GACb,MAAM,CAAC,CAAA,CAAE,UAAU,CAAA,EAAG,GAAG,KAAK,KAAA,CAAM,CAAC,EAAE,MAAA,GAAS,GAAA,GAAM,QAAQ,EAAA,CAAA,GAC9D,CAAA,CAAA,EAAI,MAAM,MAAM,CAAA,OAAA,CAAA;AACtB,IAAA,MAAA,CAAO,MAAM,CAAA,aAAA,CAAA,EAAiB;AAAA,MAC5B,KAAA,EAAO,QAAA;AAAA,MACP,SAAS,WAAA,CAAY,EAAA;AAAA,MACrB,WAAW,KAAA,CAAM,MAAA;AAAA,MACjB,WAAA;AAAA,MACA,UAAA;AAAA,MACA,QAAA,EAAU,QAAQ,QAAA,IAAY;AAAA,KAC/B,CAAA;AAED,IAAA,IAAI;AAGF,MAAA,MAAM,aAAyB,EAAC;AAChC,MAAA,IAAI,gBAAA,GAAmB,CAAA;AAEvB,MAAA,KAAA,MAAW,KAAK,KAAA,EAAO;AAErB,QAAA,MAAM,gBAAA,GAAmB,WAAA,CAAY,EAAA,CAAG,OAAA,CAAQ,aAAa,EAAE,CAAA;AAE/D,QAAA,MAAM,MAAA,GAAS,MAAM,KAAA,CAAM;AAAA,UACzB,KAAA,EAAO,MAAA,CAAO,cAAA,CAAe,gBAAgB,CAAA;AAAA,UAC7C,KAAA,EAAO,CAAA;AAAA,UACP,eAAA,EAAiB;AAAA,YACf,MAAA,EAAQ;AAAA,cACN,oBAAA,EAAsB,UAAA;AAAA,cACtB,QAAA,EAAU,QAAQ,QAAA,IAAY;AAAA;AAChC;AACF,SACD,CAAA;AAED,QAAA,UAAA,CAAW,IAAA,CAAK,OAAO,SAAS,CAAA;AAChC,QAAA,gBAAA,IAAoB,MAAA,CAAO,OAAO,MAAA,IAAU,CAAA;AAAA,MAC9C;AAEA,MAAA,MAAM,YAAA,GAAe,CAAA;AACrB,MAAA,MAAM,IAAA,GAAO,yBAAA;AAAA,QACX,QAAA;AAAA,QACA,gBAAA;AAAA,QACA;AAAA,OACF;AACA,MAAA,MAAM,UAAA,GAAa,IAAA,CAAK,GAAA,EAAI,GAAI,SAAA;AAEhC,MAAA,IAAA,CAAK,aAAa,OAAA,CAAQ;AAAA,QACxB,OAAO,IAAA,CAAK,KAAA;AAAA,QACZ,QAAA,EAAU,OAAA;AAAA,QACV,QAAA;AAAA,QACA,SAAS,WAAA,CAAY,EAAA;AAAA,QACrB,MAAA,EAAQ,MAAM,MAAA,KAAW,CAAA,GAAI,MAAM,CAAC,CAAA,GAAI,CAAA,CAAA,EAAI,KAAA,CAAM,MAAM,CAAA,OAAA,CAAA;AAAA,QACxD,QAAA,EAAU,CAAA,CAAA,EAAI,UAAA,CAAW,MAAM,gBAAgB,UAAU,CAAA,MAAA,CAAA;AAAA,QACzD,WAAA,EAAa,gBAAA;AAAA,QACb,YAAA;AAAA,QACA,IAAA;AAAA,QACA,QAAA,EAAU;AAAA,UACR,UAAU,OAAA,CAAQ,QAAA;AAAA,UAClB,WAAW,KAAA,CAAM,MAAA;AAAA,UACjB,UAAA;AAAA,UACA;AAAA;AACF,OACD,CAAA;AAGD,MAAA,MAAA,CAAO,MAAM,CAAA,cAAA,CAAA,EAAkB;AAAA,QAC7B,KAAA,EAAO,QAAA;AAAA,QACP,iBAAiB,UAAA,CAAW,MAAA;AAAA,QAC5B,UAAA;AAAA,QACA,WAAA,EAAa,gBAAA;AAAA,QACb,IAAA,EAAM,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA;AAAA,QACpB;AAAA,OACD,CAAA;AAED,MAAA,OAAO;AAAA,QACL,SAAA,EAAW,WAAW,CAAC,CAAA;AAAA;AAAA,QACvB,UAAA;AAAA;AAAA,QACA,UAAA;AAAA;AAAA,QACA,WAAA,EAAa,gBAAA;AAAA,QACb;AAAA,OACF;AAAA,IACF,SAAS,KAAA,EAAO;AAEd,MAAA,MAAM,UAAA,GAAa,IAAA,CAAK,GAAA,EAAI,GAAI,SAAA;AAChC,MAAA,MAAM,eACJ,KAAA,YAAiB,KAAA,GAAQ,KAAA,CAAM,OAAA,GAAU,OAAO,KAAK,CAAA;AAGvD,MAAA,MAAA,CAAO,MAAM,CAAA,WAAA,CAAA,EAAe;AAAA,QAC1B,KAAA,EAAO,QAAA;AAAA,QACP,KAAA,EAAO,YAAA;AAAA,QACP;AAAA,OACD,CAAA;AAED,MAAA,IAAA,CAAK,aAAa,OAAA,CAAQ;AAAA,QACxB,OAAO,IAAA,CAAK,KAAA;AAAA,QACZ,QAAA,EAAU,OAAA;AAAA,QACV,QAAA;AAAA,QACA,SAAS,WAAA,CAAY,EAAA;AAAA,QACrB,MAAA,EAAQ,MAAM,MAAA,KAAW,CAAA,GAAI,MAAM,CAAC,CAAA,GAAI,CAAA,CAAA,EAAI,KAAA,CAAM,MAAM,CAAA,OAAA,CAAA;AAAA,QACxD,QAAA,EAAU,EAAA;AAAA,QACV,WAAA,EAAa,CAAA;AAAA,QACb,YAAA,EAAc,CAAA;AAAA,QACd,IAAA,EAAM,CAAA;AAAA,QACN,QAAA,EAAU;AAAA,UACR,UAAU,OAAA,CAAQ,QAAA;AAAA,UAClB,WAAW,KAAA,CAAM,MAAA;AAAA,UACjB,UAAA;AAAA,UACA,UAAA;AAAA,UACA,MAAA,EAAQ,OAAA;AAAA,UACR,KAAA,EAAO;AAAA;AACT,OACD,CAAA;AAED,MAAA,MAAM,KAAA;AAAA,IACR;AAAA,EACF;AAAA,EAEA,UAAA,CACE,QAAA,EACA,KAAA,EACA,OAAA,GAAyB,EAAC,EACV;AAChB,IAAA,MAAM,WAAA,GAAc,SAAS,QAAQ,CAAA;AACrC,IAAA,MAAM,KAAA,GAAQ,iBAAiB,WAAW,CAAA;AAC1C,IAAA,MAAM,SAAA,GAAY,KAAK,GAAA,EAAI;AAC3B,IAAA,MAAM,QAAA,GAAW,QAAQ,KAAA,KAAU,MAAA;AAGnC,IAAA,MAAM,YAAA,GACJ,QAAA,IAAY,KAAA,IAAS,KAAA,CAAM,MAAA,GACvB,MAAM,MAAA,GACN,IAAA,CAAK,SAAA,CAAU,KAAA,CAAM,QAAQ,CAAA;AAGnC,IAAA,IAAI,WAAA,GAAc,KAAA;AAGlB,IAAA,MAAM,QAAA,GAAW,CAAC,KAAA,KAAmB;AACnC,MAAA,IAAI,WAAA,EAAa;AACjB,MAAA,WAAA,GAAc,IAAA;AAEd,MAAA,MAAM,UAAA,GAAa,IAAA,CAAK,GAAA,EAAI,GAAI,SAAA;AAChC,MAAA,MAAM,eACJ,KAAA,YAAiB,KAAA,GAAQ,KAAA,CAAM,OAAA,GAAU,OAAO,KAAK,CAAA;AAEvD,MAAA,IAAA,CAAK,aAAa,OAAA,CAAQ;AAAA,QACxB,OAAO,IAAA,CAAK,KAAA;AAAA,QACZ,QAAA,EAAU,QAAA;AAAA,QACV,QAAA;AAAA,QACA,SAAS,WAAA,CAAY,EAAA;AAAA,QACrB,MAAA,EAAQ,YAAA;AAAA,QACR,QAAA,EAAU,EAAA;AAAA,QACV,WAAA,EAAa,CAAA;AAAA,QACb,YAAA,EAAc,CAAA;AAAA,QACd,IAAA,EAAM,CAAA;AAAA,QACN,QAAA,EAAU;AAAA,UACR,aAAa,OAAA,CAAQ,WAAA;AAAA,UACrB,WAAW,OAAA,CAAQ,SAAA;AAAA,UACnB,UAAA;AAAA,UACA,MAAA,EAAQ,OAAA;AAAA,UACR,KAAA,EAAO,YAAA;AAAA,UACP,GAAI,MAAM,MAAA,GAAS,EAAE,QAAQ,KAAA,CAAM,MAAA,KAAW;AAAC;AACjD,OACD,CAAA;AAAA,IACH,CAAA;AAGA,IAAA,MAAA,CAAO,MAAM,CAAA,kBAAA,CAAA,EAAsB;AAAA,MACjC,KAAA,EAAO,QAAA;AAAA,MACP,SAAS,WAAA,CAAY,EAAA;AAAA,MACrB,MAAA,EACE,aAAa,SAAA,CAAU,CAAA,EAAG,GAAG,CAAA,IAC5B,YAAA,CAAa,MAAA,GAAS,GAAA,GAAM,KAAA,GAAQ,EAAA,CAAA;AAAA,MACvC,WAAA,EAAa,QAAQ,WAAA,IAAe,GAAA;AAAA,MACpC,WAAW,OAAA,CAAQ,SAAA;AAAA,MACnB,QAAA;AAAA,MACA,SAAA,EAAW,CAAC,CAAC,KAAA,CAAM;AAAA,KACpB,CAAA;AAGD,IAAA,MAAM,UAAA,GAAa;AAAA,MACjB,KAAA;AAAA,MACA,WAAA,EAAa,QAAQ,WAAA,IAAe,GAAA;AAAA,MACpC,iBAAiB,OAAA,CAAQ,SAAA;AAAA,MACzB,GAAI,MAAM,MAAA,GAAS,EAAE,QAAQ,KAAA,CAAM,MAAA,KAAW,EAAC;AAAA;AAAA,MAE/C,GAAI,QAAA,IAAY;AAAA,QACd,OAAO,OAAA,CAAQ,KAAA;AAAA,QACf,UAAU,OAAA,CAAQ,QAAA;AAAA,QAClB,cAAc,OAAA,CAAQ;AAAA,OACxB;AAAA;AAAA,MAEA,OAAA,EAAS,CAAC,EAAE,KAAA,EAAM,KAA0B;AAC1C,QAAA,QAAA,CAAS,KAAK,CAAA;AAAA,MAChB;AAAA,KACF;AAEA,IAAA,MAAM,MAAA,GACJ,UAAA,IAAc,KAAA,IAAS,KAAA,CAAM,WACzB,UAAA,CAAW,EAAE,GAAG,UAAA,EAAY,QAAA,EAAU,KAAA,CAAM,QAAA,EAAU,IACtD,UAAA,CAAW;AAAA,MACT,GAAG,UAAA;AAAA,MACH,QAAS,KAAA,CAA6B;AAAA,KACvC,CAAA;AAEP,IAAA,IAAI,QAAA,GAAW,EAAA;AACf,IAAA,IAAI,UAAA,GAAa,CAAA;AACjB,IAAA,IAAI,cAAA,GAAiB,KAAA;AACrB,IAAA,IAAI,aAAA,GAAgB,KAAA;AACpB,IAAA,IAAI,WAAA,GAIO,IAAA;AAGX,IAAA,MAAM,cAAA,GAAwC;AAAA,MAC5C,CAAC,MAAA,CAAO,aAAa,GAAG,MAAM;AAC5B,QAAA,MAAM,MAAA,GAAS,MAAA,CAAO,UAAA,CAAW,MAAA,CAAO,aAAa,CAAA,EAAE;AACvD,QAAA,OAAO;AAAA,UACL,MAAM,IAAA,GAAO;AACX,YAAA,IAAI;AACF,cAAA,MAAM,EAAE,IAAA,EAAM,KAAA,EAAM,GAAI,MAAM,OAAO,IAAA,EAAK;AAC1C,cAAA,IAAI,IAAA,EAAM;AACR,gBAAA,cAAA,GAAiB,IAAA;AACjB,gBAAA,OAAO,EAAE,IAAA,EAAM,IAAA,EAAM,KAAA,EAAO,KAAA,CAAA,EAAU;AAAA,cACxC;AACA,cAAA,QAAA,IAAY,KAAA;AACZ,cAAA,UAAA,EAAA;AACA,cAAA,OAAA,CAAQ,UAAU,KAAK,CAAA;AACvB,cAAA,OAAO,EAAE,IAAA,EAAM,KAAA,EAAO,KAAA,EAAM;AAAA,YAC9B,SAAS,KAAA,EAAO;AAEd,cAAA,QAAA,CAAS,KAAK,CAAA;AACd,cAAA,MAAM,KAAA;AAAA,YACR;AAAA,UACF;AAAA,SACF;AAAA,MACF;AAAA,KACF;AAGA,IAAA,MAAM,WAAW,YAAY;AAE3B,MAAA,IAAI,iBAAiB,WAAA,EAAa;AAChC,QAAA,OAAO,WAAA;AAAA,MACT;AAGA,MAAA,IAAI,CAAC,cAAA,EAAgB;AACnB,QAAA,WAAA,MAAiB,KAAK,cAAA,EAAgB;AAAA,QAEtC;AAAA,MACF;AAEA,MAAA,MAAM,KAAA,GAAQ,MAAM,MAAA,CAAO,KAAA;AAC3B,MAAA,MAAM,WAAA,GAAc,OAAO,WAAA,IAAe,CAAA;AAC1C,MAAA,MAAM,YAAA,GAAe,OAAO,YAAA,IAAgB,CAAA;AAC5C,MAAA,MAAM,IAAA,GAAO,yBAAA;AAAA,QACX,QAAA;AAAA,QACA,WAAA;AAAA,QACA;AAAA,OACF;AACA,MAAA,MAAM,UAAA,GAAa,IAAA,CAAK,GAAA,EAAI,GAAI,SAAA;AAGhC,MAAA,IAAI,CAAC,aAAA,EAAe;AAClB,QAAA,aAAA,GAAgB,IAAA;AAChB,QAAA,WAAA,GAAc,EAAE,WAAA,EAAa,YAAA,EAAc,IAAA,EAAK;AAGhD,QAAA,MAAA,CAAO,MAAM,CAAA,mBAAA,CAAA,EAAuB;AAAA,UAClC,KAAA,EAAO,QAAA;AAAA,UACP,QAAA,EACE,SAAS,SAAA,CAAU,CAAA,EAAG,GAAG,CAAA,IAAK,QAAA,CAAS,MAAA,GAAS,GAAA,GAAM,KAAA,GAAQ,EAAA,CAAA;AAAA,UAChE,WAAA;AAAA,UACA,YAAA;AAAA,UACA,IAAA,EAAM,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA;AAAA,UACpB,UAAA;AAAA,UACA;AAAA,SACD,CAAA;AAGD,QAAA,IAAA,CAAK,aAAa,OAAA,CAAQ;AAAA,UACxB,OAAO,IAAA,CAAK,KAAA;AAAA,UACZ,QAAA,EAAU,QAAA;AAAA,UACV,QAAA;AAAA,UACA,SAAS,WAAA,CAAY,EAAA;AAAA,UACrB,MAAA,EAAQ,YAAA;AAAA,UACR,QAAA,EAAU,QAAA;AAAA,UACV,WAAA;AAAA,UACA,YAAA;AAAA,UACA,IAAA;AAAA,UACA,QAAA,EAAU;AAAA,YACR,aAAa,OAAA,CAAQ,WAAA;AAAA,YACrB,WAAW,OAAA,CAAQ,SAAA;AAAA,YACnB,YAAA,EAAc,UAAA;AAAA,YACd,UAAA;AAAA,YACA,GAAI,MAAM,MAAA,GAAS,EAAE,QAAQ,KAAA,CAAM,MAAA,KAAW;AAAC;AACjD,SACD,CAAA;AAAA,MACH;AAEA,MAAA,OAAO,WAAA,IAAe,EAAE,WAAA,EAAa,YAAA,EAAc,IAAA,EAAK;AAAA,IAC1D,CAAA;AAEA,IAAA,OAAO;AAAA,MACL,MAAA,EAAQ,cAAA;AAAA,MACR,QAAA;AAAA,MACA,SAAA,EAAW;AAAA,KACb;AAAA,EACF;AAAA,EAEA,KAAA,CACE,UACA,QAAA,EACY;AACZ,IAAA,MAAM,gBAAA,GACJ,QAAA,IAAY,uBAAA,CAAwB,QAAQ,CAAA,IAAK,QAAA;AACnD,IAAA,OAAO,IAAI,WAAA;AAAA,MACT,IAAA;AAAA,MACA,QAAA;AAAA,MACA,gBAAA;AAAA,MACA,IAAA,CAAK;AAAA,KACP;AAAA,EACF;AAAA,EAEA,WAAA,CAAY,SAAiB,EAAA,EAAuB;AAClD,IAAA,MAAM,QAAA,GAAW,EAAA,GACb,CAAA,EAAG,IAAA,CAAK,KAAK,CAAA,CAAA,EAAI,OAAO,CAAA,CAAA,EAAI,EAAE,CAAA,CAAA,GAC9B,CAAA,EAAG,IAAA,CAAK,KAAK,IAAI,OAAO,CAAA,CAAA;AAE5B,IAAA,OAAO,IAAI,aAAA,CAAa,QAAA,EAAU,IAAA,CAAK,YAAA,EAAc,KAAK,UAAU,CAAA;AAAA,EACtE;AAAA;AAAA,EAGA,SAAA,GAA0B;AACxB,IAAA,OAAO,IAAA,CAAK,YAAA;AAAA,EACd;AAAA;AAAA,EAGA,UAAA,CACE,gBAAA,EACA,MAAA,EACA,QAAA,EACA,QACA,OAAA,EAKM;AACN,IAAA,IAAI,QAAA;AACJ,IAAA,IAAI,YAAA;AACJ,IAAA,IAAI,cAAA;AACJ,IAAA,IAAI,WAAA;AACJ,IAAA,IAAI,YAAA;AACJ,IAAA,IAAI,QAAA;AACJ,IAAA,IAAI,OAAA;AACJ,IAAA,IAAI,QAAA;AAGJ,IAAA,IACE,OAAO,gBAAA,KAAqB,QAAA,IAC5B,UAAA,IAAc,gBAAA,EACd;AAEA,MAAA,MAAM,MAAA,GAAS,gBAAA;AACf,MAAA,QAAA,GAAW,MAAA,CAAO,QAAA;AAClB,MAAA,YAAA,GAAe,MAAA,CAAO,MAAA;AACtB,MAAA,cAAA,GAAiB,MAAA,CAAO,QAAA;AACxB,MAAA,WAAA,GAAc,MAAA,CAAO,WAAA;AACrB,MAAA,YAAA,GAAe,MAAA,CAAO,YAAA;AACtB,MAAA,QAAA,GAAW,MAAA,CAAO,QAAA;AAClB,MAAA,OAAA,GAAU,QAAA,KAAa,OAAA;AACvB,MAAA,QAAA,GAAW,MAAA,CAAO,QAAA;AAAA,IACpB,CAAA,MAAO;AAEL,MAAA,IAAI,CAAC,MAAA,IAAU,CAAC,QAAA,IAAY,CAAC,MAAA,EAAQ;AACnC,QAAA,MAAM,IAAI,KAAA;AAAA,UACR;AAAA,SACF;AAAA,MACF;AACA,MAAA,QAAA,GAAW,gBAAA;AACX,MAAA,YAAA,GAAe,MAAA;AACf,MAAA,cAAA,GAAiB,QAAA;AACjB,MAAA,WAAA,GAAc,MAAA,CAAO,KAAA;AACrB,MAAA,YAAA,GAAe,MAAA,CAAO,MAAA;AACtB,MAAA,QAAA,GAAW,SAAS,QAAA,IAAY,MAAA;AAChC,MAAA,OAAA,GAAU,SAAS,OAAA,IAAW,KAAA;AAC9B,MAAA,QAAA,GAAW,OAAA,EAAS,QAAA;AAAA,IACtB;AAEA,IAAA,MAAM,WAAA,GAAc,SAAS,QAAQ,CAAA;AACrC,IAAA,MAAM,IAAA,GAAO,yBAAA;AAAA,MACX,QAAA;AAAA,MACA,WAAA;AAAA,MACA,YAAA;AAAA,MACA;AAAA,KACF;AAGA,IAAA,IAAA,CAAK,aAAa,OAAA,CAAQ;AAAA,MACxB,OAAO,IAAA,CAAK,KAAA;AAAA,MACZ,QAAA;AAAA,MACA,QAAA;AAAA,MACA,SAAS,WAAA,CAAY,EAAA;AAAA,MACrB,MAAA,EAAQ,YAAA;AAAA,MACR,QAAA,EAAU,cAAA;AAAA,MACV,WAAA;AAAA,MACA,YAAA;AAAA,MACA,IAAA;AAAA,MACA,UAAU,OAAA,GAAU,EAAE,GAAG,QAAA,EAAU,OAAA,EAAS,MAAK,GAAI;AAAA,KACtD,CAAA;AAAA,EACH;AAAA,EAEA,MAAM,QAAA,GAAmC;AACvC,IAAA,OAAO,IAAA,CAAK,YAAA,CAAa,QAAA,CAAS,IAAA,CAAK,KAAK,CAAA;AAAA,EAC9C;AACF,CAAA;AAMA,IAAM,cAAN,MAAoD;AAAA,EAKlD,WAAA,CACU,MAAA,EACA,QAAA,EACA,QAAA,EACA,UAAA,EACR;AAJQ,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AACA,IAAA,IAAA,CAAA,QAAA,GAAA,QAAA;AACA,IAAA,IAAA,CAAA,QAAA,GAAA,QAAA;AACA,IAAA,IAAA,CAAA,UAAA,GAAA,UAAA;AAIR,IAAA,MAAM,WAAA,GAAc;AAAA,MAClB,GAAA,EAAK,CACH,KAAA,EACA,OAAA,EACA,IAAA,KACG;AACH,QAAA,IAAI,KAAK,UAAA,EAAY;AACnB,UAAA,IAAA,CAAK,WAAW,KAAA,EAAO,CAAA,OAAA,EAAU,QAAQ,CAAA,EAAA,EAAK,OAAO,IAAI,IAAI,CAAA;AAAA,QAC/D,CAAA,MAAO;AACL,UAAA,MAAA,CAAO,KAAA;AAAA,YACL,CAAA,OAAA,EAAU,QAAQ,CAAA,GAAA,EAAM,KAAK,KAAK,OAAO,CAAA,CAAA;AAAA,YACzC,IAAA,GAAO,IAAA,CAAK,SAAA,CAAU,IAAI,CAAA,GAAI;AAAA,WAChC;AAAA,QACF;AAAA,MACF;AAAA,KACF;AAGA,IAAA,IAAI,aAAa,QAAA,EAAU;AACzB,MAAA,IAAA,CAAK,cAAA,GAAiB,IAAI,mBAAA,CAAoB,IAAI,WAAW,CAAA;AAAA,IAC/D,CAAA,MAAA,IAAW,aAAa,WAAA,EAAa;AACnC,MAAA,IAAA,CAAK,iBAAA,GAAoB,IAAI,sBAAA,CAAuB,IAAI,WAAW,CAAA;AAAA,IACrE,CAAA,MAAA,IAAW,aAAa,QAAA,EAAU;AAChC,MAAA,IAAA,CAAK,cAAA,GAAiB,IAAI,mBAAA,CAAoB,IAAI,WAAW,CAAA;AAAA,IAC/D;AAAA,EACF;AAAA,EArCQ,cAAA,GAA6C,IAAA;AAAA,EAC7C,iBAAA,GAAmD,IAAA;AAAA,EACnD,cAAA,GAA6C,IAAA;AAAA,EAqCrD,MAAM,OAAO,QAAA,EAAoD;AAE/D,IAAA,MAAA,CAAO,MAAM,CAAA,oBAAA,CAAA,EAAwB;AAAA,MACnC,UAAU,IAAA,CAAK,QAAA;AAAA,MACf,OAAO,IAAA,CAAK,QAAA;AAAA,MACZ,cAAc,QAAA,CAAS,MAAA;AAAA,MACvB,UAAA,EAAY,QAAA,CAAS,KAAA,CAAM,CAAA,EAAG,EAAE,EAAE,GAAA,CAAI,CAAC,CAAA,KAAM,CAAA,CAAE,EAAE,CAAA;AAAA,MACjD,eAAA,EAAiB,SAAS,MAAA,GAAS;AAAA,KACpC,CAAA;AAED,IAAA,MAAM,gBAAA,GACJ,2FAAA;AAEF,IAAA,IAAI,IAAA,CAAK,QAAA,KAAa,QAAA,IAAY,IAAA,CAAK,cAAA,EAAgB;AACrD,MAAA,MAAM,cAAA,GAAiB,QAAA,CAAS,GAAA,CAAI,CAAC,GAAA,MAAS;AAAA,QAC5C,IAAI,GAAA,CAAI,EAAA;AAAA,QACR,QAAQ,GAAA,CAAI,MAAA;AAAA,QACZ,OAAO,IAAA,CAAK,QAAA;AAAA,QACZ,GAAI,IAAI,MAAA,IAAU,EAAE,QAAQ,gBAAA,EAAkB,MAAA,EAAQ,IAAI,MAAA;AAAO,OACnE,CAAE,CAAA;AACF,MAAA,MAAM,MAAA,GAAS,MAAM,IAAA,CAAK,cAAA,CAAe,OAAO,cAAc,CAAA;AAC9D,MAAA,MAAA,CAAO,MAAM,CAAA,eAAA,CAAA,EAAmB;AAAA,QAC9B,QAAA,EAAU,QAAA;AAAA,QACV,SAAS,MAAA,CAAO,EAAA;AAAA,QAChB,cAAc,QAAA,CAAS;AAAA,OACxB,CAAA;AACD,MAAA,OAAO,EAAE,IAAI,MAAA,CAAO,EAAA,EAAI,QAAQ,SAAA,EAAW,QAAA,EAAU,KAAK,QAAA,EAAS;AAAA,IACrE;AAEA,IAAA,IAAI,IAAA,CAAK,QAAA,KAAa,WAAA,IAAe,IAAA,CAAK,iBAAA,EAAmB;AAC3D,MAAA,MAAM,iBAAA,GAAoB,QAAA,CAAS,GAAA,CAAI,CAAC,GAAA,MAAS;AAAA,QAC/C,UAAU,GAAA,CAAI,EAAA;AAAA,QACd,QAAQ,GAAA,CAAI,MAAA;AAAA,QACZ,OAAO,IAAA,CAAK,QAAA;AAAA,QACZ,GAAI,GAAA,CAAI,MAAA,IAAU,EAAE,QAAQ,gBAAA;AAAiB,OAC/C,CAAE,CAAA;AACF,MAAA,MAAM,MAAA,GAAS,MAAM,IAAA,CAAK,iBAAA,CAAkB,OAAO,iBAAiB,CAAA;AACpE,MAAA,MAAA,CAAO,MAAM,CAAA,eAAA,CAAA,EAAmB;AAAA,QAC9B,QAAA,EAAU,WAAA;AAAA,QACV,SAAS,MAAA,CAAO,EAAA;AAAA,QAChB,cAAc,QAAA,CAAS;AAAA,OACxB,CAAA;AACD,MAAA,OAAO,EAAE,IAAI,MAAA,CAAO,EAAA,EAAI,QAAQ,SAAA,EAAW,QAAA,EAAU,KAAK,QAAA,EAAS;AAAA,IACrE;AAEA,IAAA,IAAI,IAAA,CAAK,QAAA,KAAa,QAAA,IAAY,IAAA,CAAK,cAAA,EAAgB;AACrD,MAAA,MAAM,cAAA,GAAiB,QAAA,CAAS,GAAA,CAAI,CAAC,GAAA,MAAS;AAAA,QAC5C,UAAU,GAAA,CAAI,EAAA;AAAA,QACd,QAAQ,GAAA,CAAI,MAAA;AAAA,QACZ,OAAO,IAAA,CAAK,QAAA;AAAA,QACZ,GAAI,GAAA,CAAI,MAAA,IAAU,EAAE,QAAQ,gBAAA;AAAiB,OAC/C,CAAE,CAAA;AACF,MAAA,MAAM,MAAA,GAAS,MAAM,IAAA,CAAK,cAAA,CAAe,OAAO,cAAc,CAAA;AAC9D,MAAA,MAAA,CAAO,MAAM,CAAA,eAAA,CAAA,EAAmB;AAAA,QAC9B,QAAA,EAAU,QAAA;AAAA,QACV,SAAS,MAAA,CAAO,EAAA;AAAA,QAChB,cAAc,QAAA,CAAS;AAAA,OACxB,CAAA;AACD,MAAA,OAAO,EAAE,IAAI,MAAA,CAAO,EAAA,EAAI,QAAQ,SAAA,EAAW,QAAA,EAAU,KAAK,QAAA,EAAS;AAAA,IACrE;AAEA,IAAA,MAAM,IAAI,KAAA;AAAA,MACR,CAAA,+BAAA,EAAkC,KAAK,QAAQ,CAAA,yEAAA;AAAA,KAEjD;AAAA,EACF;AAAA,EAEA,MAAM,UAAU,OAAA,EAAyC;AACvD,IAAA,MAAM,MAAA,GAAS;AAAA,MACb,EAAA,EAAI,OAAA;AAAA,MACJ,UAAU,IAAA,CAAK,QAAA;AAAA,MACf,YAAA,EAAc,CAAA;AAAA,MACd,SAAA,sBAAe,IAAA;AAAK,KACtB;AACA,IAAA,IAAI,MAAA;AAEJ,IAAA,IAAI,IAAA,CAAK,QAAA,KAAa,QAAA,IAAY,IAAA,CAAK,cAAA,EAAgB;AACrD,MAAA,MAAA,GAAS,MAAM,IAAA,CAAK,cAAA,CAAe,WAAA,CAAY,MAAM,CAAA;AAAA,IACvD,CAAA,MAAA,IAAW,IAAA,CAAK,QAAA,KAAa,WAAA,IAAe,KAAK,iBAAA,EAAmB;AAClE,MAAA,MAAA,GAAS,MAAM,IAAA,CAAK,iBAAA,CAAkB,WAAA,CAAY,MAAM,CAAA;AAAA,IAC1D,CAAA,MAAA,IAAW,IAAA,CAAK,QAAA,KAAa,QAAA,IAAY,KAAK,cAAA,EAAgB;AAC5D,MAAA,MAAA,GAAS,MAAM,IAAA,CAAK,cAAA,CAAe,WAAA,CAAY,MAAM,CAAA;AAAA,IACvD,CAAA,MAAO;AACL,MAAA,MAAM,IAAI,KAAA;AAAA,QACR,CAAA,iCAAA,EAAoC,KAAK,QAAQ,CAAA,sBAAA;AAAA,OACnD;AAAA,IACF;AAEA,IAAA,IAAI,WAAA;AACJ,IAAA,QAAQ,OAAO,KAAA;AAAO,MACpB,KAAK,WAAA;AACH,QAAA,WAAA,GAAc,WAAA;AACd,QAAA;AAAA,MACF,KAAK,QAAA;AACH,QAAA,WAAA,GAAc,QAAA;AACd,QAAA;AAAA,MACF,KAAK,YAAA;AACH,QAAA,WAAA,GAAc,YAAA;AACd,QAAA;AAAA,MACF;AACE,QAAA,WAAA,GAAc,SAAA;AAAA;AAGlB,IAAA,OAAO,EAAE,EAAA,EAAI,OAAA,EAAS,QAAQ,WAAA,EAAa,QAAA,EAAU,KAAK,QAAA,EAAS;AAAA,EACrE;AAAA,EAEA,MAAM,UAAA,CACJ,OAAA,EACA,QAAA,EAC6B;AAE7B,IAAA,IAAI,KAAK,UAAA,EAAY;AACnB,MAAA,IAAA,CAAK,UAAA,CAAW,SAAS,CAAA,sCAAA,CAAA,EAA0C;AAAA,QACjE,WAAA,EAAa,CAAC,CAAC,QAAA;AAAA,QACf,cAAc,QAAA,GAAW,MAAA,CAAO,IAAA,CAAK,QAAQ,IAAI,EAAC;AAAA,QAClD,YAAA,EAAc,CAAC,CAAC,QAAA,EAAU,SAAA;AAAA,QAC1B,cAAA,EAAgB,MAAM,OAAA,CAAQ,QAAA,EAAU,SAAS,CAAA,GAC7C,QAAA,CAAS,UAAU,MAAA,GACnB;AAAA,OACL,CAAA;AAAA,IACH;AAEA,IAAA,MAAM,MAAA,GAAS;AAAA,MACb,EAAA,EAAI,OAAA;AAAA,MACJ,UAAU,IAAA,CAAK,QAAA;AAAA,MACf,YAAA,EAAc,CAAA;AAAA,MACd,SAAA,sBAAe,IAAA,EAAK;AAAA,MACpB;AAAA,KACF;AACA,IAAA,IAAI,UAAA;AAQJ,IAAA,IAAI,IAAA,CAAK,QAAA,KAAa,QAAA,IAAY,IAAA,CAAK,cAAA,EAAgB;AAErD,MAAA,UAAA,GAAa,MAAM,IAAA,CAAK,cAAA,CAAe,UAAA,CAAW,MAAM,CAAA;AAAA,IAC1D,CAAA,MAAA,IAAW,IAAA,CAAK,QAAA,KAAa,WAAA,IAAe,KAAK,iBAAA,EAAmB;AAClE,MAAA,UAAA,GAAa,MAAM,IAAA,CAAK,iBAAA,CAAkB,UAAA,CAAW,MAAM,CAAA;AAAA,IAC7D,CAAA,MAAA,IAAW,IAAA,CAAK,QAAA,KAAa,QAAA,IAAY,KAAK,cAAA,EAAgB;AAC5D,MAAA,UAAA,GAAa,MAAM,IAAA,CAAK,cAAA,CAAe,UAAA,CAAW,MAAM,CAAA;AAAA,IAC1D,CAAA,MAAO;AACL,MAAA,MAAM,IAAI,KAAA;AAAA,QACR,CAAA,4BAAA,EAA+B,KAAK,QAAQ,CAAA,yEAAA;AAAA,OAE9C;AAAA,IACF;AAGA,IAAA,MAAM,mBAAmB,UAAA,CAAW,MAAA;AAAA,MAClC,CAAC,GAAA,EAAK,CAAA,KAAM,GAAA,IAAO,EAAE,WAAA,IAAe,CAAA,CAAA;AAAA,MACpC;AAAA,KACF;AACA,IAAA,MAAM,oBAAoB,UAAA,CAAW,MAAA;AAAA,MACnC,CAAC,GAAA,EAAK,CAAA,KAAM,GAAA,IAAO,EAAE,YAAA,IAAgB,CAAA,CAAA;AAAA,MACrC;AAAA,KACF;AACA,IAAA,MAAM,cAAc,UAAA,CAAW,MAAA,CAAO,CAAC,CAAA,KAAM,CAAA,CAAE,KAAK,CAAA,CAAE,MAAA;AACtD,IAAA,MAAA,CAAO,MAAM,CAAA,yBAAA,CAAA,EAA6B;AAAA,MACxC,OAAA;AAAA,MACA,UAAU,IAAA,CAAK,QAAA;AAAA,MACf,aAAa,UAAA,CAAW,MAAA;AAAA,MACxB,WAAA;AAAA,MACA,gBAAA;AAAA,MACA;AAAA,KACD,CAAA;AAGD,IAAA,MAAM,OAAA,GAA8B,UAAA,CAAW,GAAA,CAAI,CAAC,KAAK,KAAA,KAAU;AAEjE,MAAA,IAAI,IAAI,KAAA,EAAO;AACb,QAAA,OAAO;AAAA,UACL,EAAA,EAAI,GAAA,CAAI,QAAA,IAAY,CAAA,OAAA,EAAU,KAAK,CAAA,CAAA;AAAA,UACnC,MAAA,EAAQ,EAAA;AAAA;AAAA,UACR,QAAQ,EAAC;AAAA;AAAA,UACT,WAAA,EAAa,IAAI,WAAA,IAAe,CAAA;AAAA,UAChC,YAAA,EAAc,IAAI,YAAA,IAAgB,CAAA;AAAA,UAClC,MAAA,EAAQ,QAAA;AAAA,UACR,OAAO,GAAA,CAAI;AAAA,SACb;AAAA,MACF;AAGA,MAAA,IAAI,YAAA;AACJ,MAAA,IAAI;AAEF,QAAA,IAAI,OAAA,GAAU,GAAA,CAAI,IAAA,CAAK,IAAA,EAAK;AAC5B,QAAA,IAAI,OAAA,CAAQ,UAAA,CAAW,SAAS,CAAA,EAAG;AACjC,UAAA,OAAA,GAAU,OAAA,CAAQ,MAAM,CAAC,CAAA;AAAA,QAC3B,CAAA,MAAA,IAAW,OAAA,CAAQ,UAAA,CAAW,KAAK,CAAA,EAAG;AACpC,UAAA,OAAA,GAAU,OAAA,CAAQ,MAAM,CAAC,CAAA;AAAA,QAC3B;AACA,QAAA,IAAI,OAAA,CAAQ,QAAA,CAAS,KAAK,CAAA,EAAG;AAC3B,UAAA,OAAA,GAAU,OAAA,CAAQ,KAAA,CAAM,CAAA,EAAG,CAAA,CAAE,CAAA;AAAA,QAC/B;AACA,QAAA,OAAA,GAAU,QAAQ,IAAA,EAAK;AACvB,QAAA,YAAA,GAAe,IAAA,CAAK,MAAM,OAAO,CAAA;AAAA,MACnC,CAAA,CAAA,MAAQ;AACN,QAAA,YAAA,GAAe,GAAA,CAAI,IAAA;AAAA,MACrB;AAEA,MAAA,OAAO;AAAA,QACL,EAAA,EAAI,GAAA,CAAI,QAAA,IAAY,CAAA,OAAA,EAAU,KAAK,CAAA,CAAA;AAAA,QACnC,MAAA,EAAQ,EAAA;AAAA;AAAA,QACR,MAAA,EAAQ,YAAA;AAAA,QACR,WAAA,EAAa,IAAI,WAAA,IAAe,CAAA;AAAA,QAChC,YAAA,EAAc,IAAI,YAAA,IAAgB,CAAA;AAAA,QAClC,MAAA,EAAQ;AAAA,OACV;AAAA,IACF,CAAC,CAAA;AAGD,IAAA,MAAM,IAAA,CAAK,aAAA,CAAc,OAAA,EAAS,OAAO,CAAA;AAEzC,IAAA,OAAO,OAAA;AAAA,EACT;AAAA,EAEA,MAAM,WAAW,OAAA,EAAmC;AAClD,IAAA,OAAO,IAAA,CAAK,MAAA,CAAO,SAAA,EAAU,CAAE,WAAW,OAAO,CAAA;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,aAAA,CACJ,OAAA,EACA,OAAA,EACe;AAEf,IAAA,IAAI,MAAM,IAAA,CAAK,UAAA,CAAW,OAAO,CAAA,EAAG;AAClC,MAAA,MAAA,CAAO,KAAA,CAAM,CAAA,MAAA,EAAS,OAAO,CAAA,4BAAA,CAA8B,CAAA;AAC3D,MAAA;AAAA,IACF;AAEA,IAAA,MAAM,WAAA,GAAc,QAAA,CAAS,IAAA,CAAK,QAAQ,CAAA;AAC1C,IAAA,MAAM,eAAA,GAAkB,YAAY,oBAAA,IAAwB,CAAA;AAG5D,IAAA,MAAM,IAAA,CAAK,MAAA,CAAO,SAAA,EAAU,CAAE,eAAA;AAAA,MAC5B,OAAA;AAAA,MACA,OAAA,CAAQ,GAAA,CAAI,CAAC,CAAA,KAAM;AACjB,QAAA,MAAM,QAAA,GAAW,aAAA;AAAA,UACf,IAAA,CAAK,QAAA;AAAA,UACL,CAAA,CAAE,WAAA;AAAA,UACF,CAAA,CAAE;AAAA,SACJ;AACA,QAAA,MAAM,IAAA,GAAO,QAAA,CAAS,SAAA,IAAa,CAAA,GAAI,eAAA,GAAkB,GAAA,CAAA;AAEzD,QAAA,OAAO;AAAA,UACL,KAAA,EAAO,KAAK,MAAA,CAAO,KAAA;AAAA,UACnB,QAAA,EAAU,OAAA;AAAA,UACV,UAAU,IAAA,CAAK,QAAA;AAAA,UACf,SAAS,WAAA,CAAY,EAAA;AAAA,UACrB,QAAQ,CAAA,CAAE,MAAA;AAAA,UACV,QAAA,EACE,OAAO,CAAA,CAAE,MAAA,KAAW,QAAA,GAAW,EAAE,MAAA,GAAS,IAAA,CAAK,SAAA,CAAU,CAAA,CAAE,MAAM,CAAA;AAAA,UACnE,aAAa,CAAA,CAAE,WAAA;AAAA,UACf,cAAc,CAAA,CAAE,YAAA;AAAA,UAChB,IAAA;AAAA,UACA,QAAA,EAAU,EAAE,OAAA,EAAS,SAAA,EAAW,EAAE,EAAA;AAAG,SACvC;AAAA,MACF,CAAC;AAAA,KACH;AAAA,EACF;AACF,CAAA;AA0BO,SAAS,cAAA,CACd,KAAA,EACAD,OAAAA,EACA,UAAA,EACU;AACV,EAAA,OAAO,IAAI,YAAA,CAAa,KAAA,EAAOA,OAAAA,EAAQ,UAAU,CAAA;AACnD","file":"chunk-P4KMGCT3.js","sourcesContent":["/**\n * Schema Helpers and Utilities\n *\n * Provides common schemas and utilities for building type-safe workflows.\n * Reduces boilerplate and enforces best practices.\n */\n\nimport { z } from \"zod\";\n\n/**\n * Constant for stages that don't need sequential input\n * Use when a stage receives data from workflowContext instead of the input parameter\n *\n * @example\n * export const myStage: Stage<\n * typeof NoInputSchema, // Explicit: this stage uses workflowContext\n * typeof OutputSchema,\n * typeof ConfigSchema\n * > = {\n * inputSchema: NoInputSchema,\n * // ...\n * };\n */\nexport const NoInputSchema = z.object({});\nexport type NoInput = z.infer<typeof NoInputSchema>;\n\n/**\n * Access previous stage output with guaranteed type safety\n *\n * Requires that the stage output exists, throws clear error if missing.\n * Use this for required dependencies on previous stages.\n *\n * @param workflowContext - The workflow context containing all previous stage outputs\n * @param stageId - ID of the stage to access\n * @param field - Optional: specific field to extract from stage output\n * @returns The stage output (or field within it)\n * @throws Error if stage or field is missing\n *\n * @example\n * // Get entire stage output\n * const extractedData = requireStageOutput<ExtractedData>(\n * context.workflowContext,\n * \"data-extraction\"\n * );\n *\n * // Get specific field\n * const guidelines = requireStageOutput<Guideline[]>(\n * context.workflowContext,\n * \"guidelines\",\n * \"guidelines\"\n * );\n */\nexport function requireStageOutput<T>(\n workflowContext: Record<string, unknown>,\n stageId: string,\n field?: string,\n): T {\n const stageOutput = workflowContext[stageId];\n\n if (!stageOutput) {\n throw new Error(\n `Missing output from required stage: ${stageId}. ` +\n `Check that this stage has been executed before the current stage. ` +\n `Available stages: ${Object.keys(workflowContext).join(\", \")}`,\n );\n }\n\n if (field) {\n if (typeof stageOutput !== \"object\" || stageOutput === null) {\n throw new Error(\n `Stage ${stageId} output is not an object, cannot access field '${field}'. ` +\n `Received: ${typeof stageOutput}`,\n );\n }\n if (!(field in stageOutput)) {\n const availableFields = Object.keys(stageOutput);\n throw new Error(\n `Missing required field '${field}' in ${stageId} output. ` +\n `Available fields: ${availableFields.join(\", \")}`,\n );\n }\n return (stageOutput as any)[field];\n }\n\n return stageOutput as T;\n}\n\n/**\n * Access previous stage output optionally\n *\n * Returns undefined if the stage output doesn't exist.\n * Use this for optional dependencies on previous stages.\n *\n * @param workflowContext - The workflow context containing all previous stage outputs\n * @param stageId - ID of the stage to access\n * @param field - Optional: specific field to extract from stage output\n * @returns The stage output (or field within it), or undefined if not present\n *\n * @example\n * const optionalData = getStageOutput<OptionalData>(\n * context.workflowContext,\n * \"optional-stage\"\n * );\n *\n * if (optionalData) {\n * // Use the data\n * }\n */\nexport function getStageOutput<T>(\n workflowContext: Record<string, unknown>,\n stageId: string,\n field?: string,\n): T | undefined {\n const stageOutput = workflowContext[stageId];\n\n if (!stageOutput) {\n return undefined;\n }\n\n if (field) {\n if (typeof stageOutput !== \"object\" || stageOutput === null) {\n return undefined;\n }\n return (stageOutput as any)[field];\n }\n\n return stageOutput as T;\n}\n","/**\n * Stage Factory - Simplified stage definition with auto-metrics\n *\n * Provides a `defineStage()` function that reduces boilerplate by:\n * - Inferring types from schemas\n * - Auto-calculating metrics (timing handled by executor)\n * - Adding fluent context helpers (require/optional)\n * - Supporting both sync and async-batch modes\n *\n * @example\n * ```typescript\n * export const myStage = defineStage({\n * id: \"my-stage\",\n * name: \"My Stage\",\n * description: \"Does something useful\",\n * dependencies: [\"previous-stage\"],\n *\n * schemas: {\n * input: InputSchema, // or \"none\" for NoInputSchema\n * output: OutputSchema,\n * config: ConfigSchema,\n * },\n *\n * async execute(ctx) {\n * const prevData = ctx.require(\"previous-stage\");\n * // ... stage logic\n * return { output: { ... } };\n * },\n * });\n * ```\n */\n\nimport type { z } from \"zod\";\nimport { NoInputSchema } from \"./schema-helpers\";\nimport type { CheckCompletionContext, Stage, StageContext } from \"./stage\";\nimport type {\n CompletionCheckResult,\n StageResult,\n SuspendedResult,\n SuspendedStateSchema,\n} from \"./types\";\n\n// ============================================================================\n// Type Helper for TInput\n// ============================================================================\n\n/**\n * Helper type to safely infer input type, handling the \"none\" special case\n */\nexport type InferInput<TInput extends z.ZodTypeAny | \"none\"> =\n TInput extends \"none\"\n ? z.infer<typeof NoInputSchema>\n : TInput extends z.ZodTypeAny\n ? z.infer<TInput>\n : never;\n\n// ============================================================================\n// Enhanced Context Type\n// ============================================================================\n\n/**\n * Enhanced stage context with fluent helpers\n */\nexport interface EnhancedStageContext<\n TInput,\n TConfig,\n TContext extends Record<string, unknown>,\n> extends StageContext<TInput, TConfig, TContext> {\n /**\n * Require output from a previous stage (throws if not found)\n *\n * @example\n * const { extractedData } = ctx.require(\"data-extraction\");\n */\n require: <K extends keyof TContext>(stageId: K) => TContext[K];\n\n /**\n * Optionally get output from a previous stage (returns undefined if not found)\n *\n * @example\n * const optionalData = ctx.optional(\"optional-stage\");\n * if (optionalData) { ... }\n */\n optional: <K extends keyof TContext>(stageId: K) => TContext[K] | undefined;\n}\n\n// ============================================================================\n// Stage Definition Types\n// ============================================================================\n\n/**\n * Simplified execute result - just output and optional custom metrics\n */\nexport interface SimpleStageResult<TOutput> {\n output: TOutput;\n /**\n * Custom metrics specific to this stage (e.g., itemsProcessed, sectionsFound)\n * Timing metrics (startTime, endTime, duration) are auto-calculated by executor\n * AI metrics should be added here by stages that create their own AIHelper\n */\n customMetrics?: Record<string, number>;\n /**\n * Optional artifacts to store\n */\n artifacts?: Record<string, unknown>;\n}\n\n/**\n * Simplified suspended result - metrics are auto-filled by the factory\n */\nexport interface SimpleSuspendedResult {\n suspended: true;\n state: {\n batchId: string;\n submittedAt: string;\n pollInterval: number;\n maxWaitTime: number;\n metadata?: Record<string, unknown>;\n apiKey?: string;\n };\n pollConfig: {\n pollInterval: number;\n maxWaitTime: number;\n nextPollAt: Date;\n };\n /**\n * Optional custom metrics (timing & AI metrics are auto-filled)\n */\n customMetrics?: Record<string, number>;\n}\n\n/**\n * Sync stage definition\n */\nexport interface SyncStageDefinition<\n TInput extends z.ZodTypeAny | \"none\",\n TOutput extends z.ZodTypeAny,\n TConfig extends z.ZodTypeAny,\n TContext extends Record<string, unknown> = Record<string, unknown>,\n> {\n /** Unique stage identifier */\n id: string;\n /** Human-readable name */\n name: string;\n /** Optional description */\n description?: string;\n /** Stage IDs this stage depends on (validated at workflow build time) */\n dependencies?: string[];\n\n /** Zod schemas for validation */\n schemas: {\n /** Input schema, or \"none\" for stages that use workflowContext */\n input: TInput;\n /** Output schema */\n output: TOutput;\n /** Configuration schema */\n config: TConfig;\n };\n\n /**\n * Execute the stage logic\n * Return just { output } - metrics are auto-calculated\n */\n execute: (\n ctx: EnhancedStageContext<InferInput<TInput>, z.infer<TConfig>, TContext>,\n ) => Promise<SimpleStageResult<z.infer<TOutput>>>;\n\n /**\n * Optional: Estimate cost before execution\n */\n estimateCost?: (\n input: InferInput<TInput>,\n config: z.infer<TConfig>,\n ) => number;\n}\n\n/**\n * Async-batch stage definition (for long-running batch jobs)\n */\nexport interface AsyncBatchStageDefinition<\n TInput extends z.ZodTypeAny | \"none\",\n TOutput extends z.ZodTypeAny,\n TConfig extends z.ZodTypeAny,\n TContext extends Record<string, unknown> = Record<string, unknown>,\n> extends Omit<\n SyncStageDefinition<TInput, TOutput, TConfig, TContext>,\n \"execute\"\n > {\n /** Mark as async-batch mode */\n mode: \"async-batch\";\n\n /**\n * Execute the stage - either return result or suspend for batch processing\n *\n * When resuming from suspension, ctx.resumeState contains the suspended state.\n * Check this to determine whether to submit a new batch or fetch results.\n *\n * Return SimpleSuspendedResult when suspending - metrics will be auto-filled.\n */\n execute: (\n ctx: EnhancedStageContext<InferInput<TInput>, z.infer<TConfig>, TContext>,\n ) => Promise<SimpleStageResult<z.infer<TOutput>> | SimpleSuspendedResult>;\n\n /**\n * Check if the batch job is complete\n * Called by the orchestrator when polling suspended stages\n *\n * Context includes workflowRunId, stageId, config, log, and storage\n * so you don't need to store these in metadata.\n */\n checkCompletion: (\n suspendedState: z.infer<typeof SuspendedStateSchema>,\n context: CheckCompletionContext<z.infer<TConfig>>,\n ) => Promise<CompletionCheckResult<z.infer<TOutput>>>;\n}\n\n// ============================================================================\n// Factory Function\n// ============================================================================\n\n/**\n * Define a sync stage with simplified API\n */\nexport function defineStage<\n TInput extends z.ZodTypeAny | \"none\",\n TOutput extends z.ZodTypeAny,\n TConfig extends z.ZodTypeAny,\n TContext extends Record<string, unknown> = Record<string, unknown>,\n>(\n definition: SyncStageDefinition<TInput, TOutput, TConfig, TContext>,\n): Stage<\n TInput extends \"none\" ? typeof NoInputSchema : TInput,\n TOutput,\n TConfig,\n TContext\n>;\n\n/**\n * Define an async-batch stage with simplified API\n */\nexport function defineStage<\n TInput extends z.ZodTypeAny | \"none\",\n TOutput extends z.ZodTypeAny,\n TConfig extends z.ZodTypeAny,\n TContext extends Record<string, unknown> = Record<string, unknown>,\n>(\n definition: AsyncBatchStageDefinition<TInput, TOutput, TConfig, TContext>,\n): Stage<\n TInput extends \"none\" ? typeof NoInputSchema : TInput,\n TOutput,\n TConfig,\n TContext\n>;\n\n/**\n * Implementation\n */\nexport function defineStage<\n TInput extends z.ZodTypeAny | \"none\",\n TOutput extends z.ZodTypeAny,\n TConfig extends z.ZodTypeAny,\n TContext extends Record<string, unknown> = Record<string, unknown>,\n>(\n definition:\n | SyncStageDefinition<TInput, TOutput, TConfig, TContext>\n | AsyncBatchStageDefinition<TInput, TOutput, TConfig, TContext>,\n): Stage<\n TInput extends \"none\" ? typeof NoInputSchema : TInput,\n TOutput,\n TConfig,\n TContext\n> {\n // Resolve input schema\n const inputSchema =\n definition.schemas.input === \"none\"\n ? NoInputSchema\n : (definition.schemas.input as z.ZodTypeAny);\n\n const isAsyncBatch =\n \"mode\" in definition && definition.mode === \"async-batch\";\n\n // Build the Stage object\n const stage: Stage<\n TInput extends \"none\" ? typeof NoInputSchema : TInput,\n TOutput,\n TConfig,\n TContext\n > = {\n id: definition.id,\n name: definition.name,\n description: definition.description,\n mode: isAsyncBatch ? \"async-batch\" : \"sync\",\n dependencies: definition.dependencies,\n\n inputSchema: inputSchema as any,\n outputSchema: definition.schemas.output,\n configSchema: definition.schemas.config,\n\n async execute(context) {\n // Create enhanced context with fluent helpers\n const enhancedContext = createEnhancedContext(\n context as any,\n ) as EnhancedStageContext<InferInput<TInput>, z.infer<TConfig>, TContext>;\n\n // Call the user's execute function\n const result = await definition.execute(enhancedContext);\n\n // If suspended, pass through with auto-filled metrics\n if (\"suspended\" in result && result.suspended === true) {\n const suspendedResult = result as SimpleSuspendedResult;\n return {\n suspended: true as const,\n state: suspendedResult.state,\n pollConfig: suspendedResult.pollConfig,\n metrics: {\n startTime: 0,\n endTime: 0,\n duration: 0,\n ...suspendedResult.customMetrics,\n },\n } satisfies SuspendedResult;\n }\n\n // Narrow type to SimpleStageResult\n const simpleResult = result as SimpleStageResult<z.infer<TOutput>>;\n\n // Build full StageResult - stages add their own AI metrics via customMetrics\n const stageResult: StageResult<z.infer<TOutput>> = {\n output: simpleResult.output,\n metrics: {\n // Timing is set by executor - we provide placeholders\n startTime: 0,\n endTime: 0,\n duration: 0,\n // Custom metrics from stage (including AI metrics if stage tracked them)\n ...simpleResult.customMetrics,\n },\n artifacts: simpleResult.artifacts,\n };\n\n return stageResult;\n },\n\n estimateCost: definition.estimateCost as any,\n };\n\n // Add checkCompletion for async-batch stages\n if (isAsyncBatch) {\n const asyncDef = definition as AsyncBatchStageDefinition<\n TInput,\n TOutput,\n TConfig,\n TContext\n >;\n stage.checkCompletion = asyncDef.checkCompletion;\n }\n\n return stage;\n}\n\n// ============================================================================\n// Helper Functions\n// ============================================================================\n\n/**\n * Create enhanced context with require/optional helpers\n */\nfunction createEnhancedContext<\n TInput,\n TConfig,\n TContext extends Record<string, unknown>,\n>(\n context: StageContext<TInput, TConfig, TContext>,\n): EnhancedStageContext<TInput, TConfig, TContext> {\n return {\n ...context,\n\n require<K extends keyof TContext>(stageId: K): TContext[K] {\n const output = context.workflowContext[stageId as string];\n if (output === undefined) {\n const availableStages = Object.keys(context.workflowContext);\n throw new Error(\n `Missing required stage output: \"${String(stageId)}\". ` +\n `Available stages: ${availableStages.length > 0 ? availableStages.join(\", \") : \"(none)\"}`,\n );\n }\n return output as TContext[K];\n },\n\n optional<K extends keyof TContext>(stageId: K): TContext[K] | undefined {\n return context.workflowContext[stageId as string] as\n | TContext[K]\n | undefined;\n },\n };\n}\n\n// ============================================================================\n// Type Utilities\n// ============================================================================\n\n/**\n * Extract the output type from a stage created with defineStage\n */\nexport type InferStageOutput<T> = T extends Stage<\n infer _I,\n infer O,\n infer _C,\n infer _Ctx\n>\n ? z.infer<O>\n : never;\n\n/**\n * Extract the input type from a stage created with defineStage\n */\nexport type InferStageInput<T> = T extends Stage<\n infer I,\n infer _O,\n infer _C,\n infer _Ctx\n>\n ? z.infer<I>\n : never;\n\n/**\n * Extract the config type from a stage created with defineStage\n */\nexport type InferStageConfig<T> = T extends Stage<\n infer _I,\n infer _O,\n infer C,\n infer _Ctx\n>\n ? z.infer<C>\n : never;\n\n// ============================================================================\n// Async Batch Stage Factory\n// ============================================================================\n\n/**\n * Define an async-batch stage with proper type inference for checkCompletion\n *\n * This is a dedicated function (not an alias) to ensure TypeScript properly\n * infers callback parameter types without overload resolution ambiguity.\n */\nexport function defineAsyncBatchStage<\n TInput extends z.ZodTypeAny | \"none\",\n TOutput extends z.ZodTypeAny,\n TConfig extends z.ZodTypeAny,\n TContext extends Record<string, unknown> = Record<string, unknown>,\n>(\n definition: AsyncBatchStageDefinition<TInput, TOutput, TConfig, TContext>,\n): Stage<\n TInput extends \"none\" ? typeof NoInputSchema : TInput,\n TOutput,\n TConfig,\n TContext\n> {\n return defineStage(definition);\n}\n","/**\n * Model Helper - Centralized model selection and cost tracking for AI scripts\n */\n\nimport z from \"zod\";\n\n// Lazy-loaded tiktoken to avoid bundling 2MB of tokenizer data for browser clients\nlet tiktokenPromise: Promise<{\n Tiktoken: typeof import(\"js-tiktoken/lite\").Tiktoken;\n cl100k_base: typeof import(\"js-tiktoken/ranks/cl100k_base\").default;\n}> | null = null;\n\nasync function getTiktoken() {\n if (!tiktokenPromise) {\n tiktokenPromise = Promise.all([\n import(\"js-tiktoken/lite\"),\n import(\"js-tiktoken/ranks/cl100k_base\"),\n ]).then(([{ Tiktoken }, cl100kModule]) => ({\n Tiktoken,\n cl100k_base: cl100kModule.default,\n }));\n }\n return tiktokenPromise;\n}\n\nexport interface ModelConfig {\n id: string;\n name: string;\n inputCostPerMillion: number; // Cost in USD per 1M input tokens\n outputCostPerMillion: number; // Cost in USD per 1M output tokens\n provider: \"openrouter\" | \"google\" | \"other\";\n description?: string;\n supportsAsyncBatch?: boolean;\n batchDiscountPercent?: number; // e.g., 50 for Google Batch (50% off)\n isEmbeddingModel?: boolean; // true for embedding models\n supportsTools?: boolean; // true if model supports function calling\n supportsStructuredOutputs?: boolean; // true if model supports JSON schema outputs\n contextLength?: number; // Max context window from OpenRouter\n maxCompletionTokens?: number | null; // Max output tokens from OpenRouter\n}\n\n/**\n * Filter options for listModels()\n */\nexport interface ModelFilter {\n /** Only include embedding models */\n isEmbeddingModel?: boolean;\n /** Only include models that support function calling */\n supportsTools?: boolean;\n /** Only include models that support structured outputs */\n supportsStructuredOutputs?: boolean;\n /** Only include models that support async batch */\n supportsAsyncBatch?: boolean;\n}\n\n/**\n * Configuration for workflow-engine.models.ts sync config\n */\nexport interface ModelSyncConfig {\n /** Only include models matching these patterns (applied before exclude) */\n include?: (string | RegExp)[];\n /** Output path relative to consumer's project root (default: src/generated/models.ts) */\n outputPath?: string;\n /** Patterns to exclude models (string for exact match, RegExp for pattern) */\n exclude?: (string | RegExp)[];\n /** Custom models to add (embeddings, rerankers, etc.) */\n customModels?: Record<string, ModelConfig>;\n}\n\n/**\n * Model Registry - augmented by consumer's generated file for autocomplete\n * Import the generated file to populate this interface\n */\nexport interface ModelRegistry {}\n\n/**\n * Runtime model registry populated by registerModels()\n */\nconst MODEL_REGISTRY: Record<string, ModelConfig> = {};\n\n/**\n * Register models at runtime (called by generated file)\n */\nexport function registerModels(models: Record<string, ModelConfig>): void {\n Object.assign(MODEL_REGISTRY, models);\n}\n\n/**\n * Get a model from the runtime registry\n */\nexport function getRegisteredModel(key: string): ModelConfig | undefined {\n return MODEL_REGISTRY[key];\n}\n\n/**\n * List all registered models\n */\nexport function listRegisteredModels(): Array<{\n key: string;\n config: ModelConfig;\n}> {\n return Object.entries(MODEL_REGISTRY).map(([key, config]) => ({\n key,\n config,\n }));\n}\n\nexport interface ModelStats {\n modelId: string;\n modelName: string;\n apiCalls: number;\n inputTokens: number;\n outputTokens: number;\n totalTokens: number;\n inputCost: number;\n outputCost: number;\n totalCost: number;\n}\n\n/**\n * Static enum for built-in models - provides .enum accessor for AVAILABLE_MODELS keys\n */\nexport const ModelKeyEnum = z.enum([\"gemini-2.5-flash\"]);\n\n/**\n * Type representing all available model keys\n * Supports both built-in enum keys AND dynamically registered keys via ModelRegistry\n */\nexport type ModelKey = z.infer<typeof ModelKeyEnum> | keyof ModelRegistry;\n\n/**\n * Zod schema that validates model keys against both the static enum AND the runtime registry\n * Use ModelKey.parse() to validate and type model key strings\n */\nexport const ModelKey = z\n .string()\n .refine(\n (key) => {\n // Check built-in enum first\n if (ModelKeyEnum.safeParse(key).success) {\n return true;\n }\n // Then check runtime registry\n return MODEL_REGISTRY[key] !== undefined;\n },\n {\n message:\n \"Model not found. Make sure to import the generated models file or register the model.\",\n },\n )\n .transform((key) => key as ModelKey);\n\n/**\n * Available AI models with their configurations\n * Prices should be updated regularly from provider pricing pages\n */\nexport const AVAILABLE_MODELS: Record<string, ModelConfig> = {\n [ModelKeyEnum.enum[\"gemini-2.5-flash\"]]: {\n id: \"google/gemini-2.5-flash-preview-09-2025\",\n name: \"Gemini 2.5 Flash Preview\",\n inputCostPerMillion: 0.3,\n outputCostPerMillion: 2.5,\n provider: \"openrouter\",\n description: \"Fast, efficient model for general tasks\",\n supportsAsyncBatch: true,\n batchDiscountPercent: 50,\n },\n};\n\n/**\n * Default model selection\n * Change this to switch the default model across all scripts\n */\nexport const DEFAULT_MODEL_KEY: ModelKey = \"gemini-2.5-flash\";\n\n/**\n * Get a model configuration by key\n * Checks both built-in AVAILABLE_MODELS and runtime MODEL_REGISTRY\n */\nexport function getModel(key: ModelKey): ModelConfig {\n // First check built-in models (for backward compatibility)\n const builtInModel = AVAILABLE_MODELS[key as keyof typeof AVAILABLE_MODELS];\n if (builtInModel) {\n return builtInModel;\n }\n\n // Then check runtime registry (for dynamically registered models)\n const registeredModel = MODEL_REGISTRY[key as string];\n if (registeredModel) {\n return registeredModel;\n }\n\n const allKeys = [\n ...Object.keys(AVAILABLE_MODELS),\n ...Object.keys(MODEL_REGISTRY),\n ];\n throw new Error(\n `Model \"${key}\" not found. Available models: ${allKeys.join(\", \")}`,\n );\n}\n\n/**\n * Get the default model configuration\n */\nexport function getDefaultModel(): ModelConfig {\n return getModel(DEFAULT_MODEL_KEY);\n}\n\n/**\n * List all available models (built-in + registered)\n * @param filter Optional filter to narrow down models by capability\n */\nexport function listModels(\n filter?: ModelFilter,\n): Array<{ key: string; config: ModelConfig }> {\n // Combine built-in models and registered models\n const builtIn = Object.entries(AVAILABLE_MODELS).map(([key, config]) => ({\n key,\n config,\n }));\n\n const registered = Object.entries(MODEL_REGISTRY).map(([key, config]) => ({\n key,\n config,\n }));\n\n // Merge, with registered models taking precedence if there's a duplicate\n const merged = new Map<string, { key: string; config: ModelConfig }>();\n for (const item of builtIn) {\n merged.set(item.key, item);\n }\n for (const item of registered) {\n merged.set(item.key, item);\n }\n\n let models = Array.from(merged.values());\n\n // Apply filters if provided\n if (filter) {\n models = models.filter((item) => {\n const { config } = item;\n\n // Filter by embedding model\n if (filter.isEmbeddingModel !== undefined) {\n if (filter.isEmbeddingModel && !config.isEmbeddingModel) return false;\n if (!filter.isEmbeddingModel && config.isEmbeddingModel) return false;\n }\n\n // Filter by tool support\n if (filter.supportsTools !== undefined) {\n if (filter.supportsTools && !config.supportsTools) return false;\n if (!filter.supportsTools && config.supportsTools) return false;\n }\n\n // Filter by structured outputs support\n if (filter.supportsStructuredOutputs !== undefined) {\n if (\n filter.supportsStructuredOutputs &&\n !config.supportsStructuredOutputs\n )\n return false;\n if (\n !filter.supportsStructuredOutputs &&\n config.supportsStructuredOutputs\n )\n return false;\n }\n\n // Filter by batch support\n if (filter.supportsAsyncBatch !== undefined) {\n if (filter.supportsAsyncBatch && !config.supportsAsyncBatch)\n return false;\n if (!filter.supportsAsyncBatch && config.supportsAsyncBatch)\n return false;\n }\n\n return true;\n });\n }\n\n return models.sort((a, b) => a.key.localeCompare(b.key));\n}\n\n/**\n * Check if a model supports async batch processing\n */\nexport function modelSupportsBatch(modelKey: ModelKey): boolean {\n const model = getModel(modelKey);\n return model.supportsAsyncBatch === true;\n}\n\n/**\n * Interface for model with bound recording function\n * Useful for parallel execution where you want to pass model + recordCall together\n */\nexport interface ModelWithRecorder {\n id: string;\n name: string;\n recordCall: (inputTokens: number, outputTokens: number) => void;\n}\n\n/**\n * Get model by key with bound recordCall function\n * Perfect for parallel execution - no need to write model name twice\n *\n * Usage:\n * const model = getModelById(\"gemini-2.5-flash\", modelTracker);\n * const result = await generateText({\n * model: openRouter(model.id),\n * prompt: \"...\",\n * });\n * model.recordCall(result.usage.inputTokens, result.usage.outputTokens);\n */\nexport function getModelById(\n modelKey: ModelKey,\n tracker?: ModelStatsTracker,\n): ModelWithRecorder {\n const modelConfig = getModel(modelKey);\n return {\n id: modelConfig.id,\n name: modelConfig.name,\n recordCall: (inputTokens: number, outputTokens: number) => {\n if (!tracker) {\n throw new Error(\"ModelStatsTracker required to use recordCall()\");\n }\n tracker.recordCall(inputTokens, outputTokens, modelKey);\n },\n };\n}\n\n/**\n * Calculate costs based on token usage\n */\nexport function calculateCost(\n modelKey: ModelKey,\n inputTokens: number,\n outputTokens: number,\n): {\n inputCost: number;\n outputCost: number;\n totalCost: number;\n} {\n const model = getModel(modelKey);\n\n const inputCost = (inputTokens / 1_000_000) * model.inputCostPerMillion;\n const outputCost = (outputTokens / 1_000_000) * model.outputCostPerMillion;\n const totalCost = inputCost + outputCost;\n\n return {\n inputCost,\n outputCost,\n totalCost,\n };\n}\n\n/**\n * Model stats tracker class - tracks single model OR aggregates multiple models\n */\nexport class ModelStatsTracker {\n private modelKey?: ModelKey;\n private modelConfig?: ModelConfig;\n private stats: {\n apiCalls: number;\n inputTokens: number;\n outputTokens: number;\n };\n private perModelStats: Map<string, ModelStats> = new Map();\n private isAggregating: boolean = false;\n\n constructor(modelKey: ModelKey = DEFAULT_MODEL_KEY) {\n this.modelKey = modelKey;\n this.modelConfig = getModel(modelKey);\n this.stats = {\n apiCalls: 0,\n inputTokens: 0,\n outputTokens: 0,\n };\n }\n\n /**\n * Create an aggregating tracker that combines stats from multiple models\n * Perfect for parallel execution where different calls use different models\n */\n static createAggregating(): ModelStatsTracker {\n const tracker = new ModelStatsTracker();\n tracker.isAggregating = true;\n tracker.modelKey = undefined;\n tracker.modelConfig = undefined;\n return tracker;\n }\n\n /**\n * Get the model ID for use with AI SDK\n * @deprecated Use getModelById(modelKey).id instead for parallel execution\n */\n getModelId(): string {\n if (!this.modelConfig) {\n throw new Error(\"Model not set for this tracker\");\n }\n return this.modelConfig.id;\n }\n\n /**\n * Get the model configuration\n * @deprecated Use getModelById(modelKey) instead for parallel execution\n */\n getModelConfig(): ModelConfig {\n if (!this.modelConfig) {\n throw new Error(\"Model not set for this tracker\");\n }\n return this.modelConfig;\n }\n\n /**\n * Switch model (useful for sequential model switching)\n * @deprecated For parallel execution, pass model key to recordCall() instead\n */\n switchModel(modelKey: ModelKey): void {\n this.modelKey = modelKey;\n this.modelConfig = getModel(modelKey);\n }\n\n /**\n * Get a model helper with bound recordCall for parallel execution\n * Perfect for running multiple AI calls in parallel with different models\n *\n * Usage:\n * const flashModel = tracker.getModelById(\"gemini-2.5-flash\");\n * const liteModel = tracker.getModelById(\"gemini-2.5-flash-lite\");\n *\n * const [result1, result2] = await Promise.all([\n * generateText({\n * model: openRouter(flashModel.id),\n * prompt: prompt1,\n * }).then(r => { flashModel.recordCall(r.usage.inputTokens, r.usage.outputTokens); return r; }),\n * generateText({\n * model: openRouter(liteModel.id),\n * prompt: prompt2,\n * }).then(r => { liteModel.recordCall(r.usage.inputTokens, r.usage.outputTokens); return r; }),\n * ]);\n */\n getModelById(modelKey: ModelKey): {\n id: string;\n name: string;\n recordCall: (inputTokens: number, outputTokens: number) => void;\n } {\n const modelConfig = getModel(modelKey);\n return {\n id: modelConfig.id,\n name: modelConfig.name,\n recordCall: (inputTokens: number, outputTokens: number) => {\n this.recordCall(inputTokens, outputTokens, modelKey);\n },\n };\n }\n\n /**\n * Record an API call with token usage\n *\n * For sequential execution:\n * tracker.switchModel(\"gemini-2.5-flash\")\n * tracker.recordCall(inputTokens, outputTokens)\n *\n * For parallel execution:\n * tracker.recordCall(inputTokens, outputTokens, \"gemini-2.5-flash\")\n * tracker.recordCall(inputTokens, outputTokens, \"gemini-2.5-pro\")\n */\n recordCall(\n inputTokens: number = 0,\n outputTokens: number = 0,\n modelKeyOverride?: ModelKey,\n ): void {\n // Determine which model to use\n const modelKeyToUse = modelKeyOverride || this.modelKey;\n if (!modelKeyToUse) {\n throw new Error(\n \"Model not set and no modelKeyOverride provided to recordCall()\",\n );\n }\n\n const modelConfig = getModel(modelKeyToUse);\n\n this.stats.apiCalls += 1;\n this.stats.inputTokens += inputTokens;\n this.stats.outputTokens += outputTokens;\n\n // Always track per-model stats for aggregating trackers\n if (this.isAggregating) {\n const modelId = modelConfig.id;\n const existing = this.perModelStats.get(modelId) || {\n modelId,\n modelName: modelConfig.name,\n apiCalls: 0,\n inputTokens: 0,\n outputTokens: 0,\n totalTokens: 0,\n inputCost: 0,\n outputCost: 0,\n totalCost: 0,\n };\n\n const costs = calculateCost(modelKeyToUse, inputTokens, outputTokens);\n\n this.perModelStats.set(modelId, {\n modelId,\n modelName: modelConfig.name,\n apiCalls: existing.apiCalls + 1,\n inputTokens: existing.inputTokens + inputTokens,\n outputTokens: existing.outputTokens + outputTokens,\n totalTokens: existing.totalTokens + inputTokens + outputTokens,\n inputCost: existing.inputCost + costs.inputCost,\n outputCost: existing.outputCost + costs.outputCost,\n totalCost: existing.totalCost + costs.totalCost,\n });\n }\n }\n\n /**\n * Estimate cost for a prompt without making an API call\n * Useful for dry-run mode to preview costs\n *\n * Note: This method is async because it lazy-loads the tiktoken library\n * to avoid bundling 2MB of tokenizer data for browser clients.\n *\n * @param prompt - The prompt text to estimate\n * @param estimatedOutputTokens - Estimated number of output tokens (default: 500)\n * @returns Object with token counts and cost estimates\n */\n async estimateCost(\n prompt: string,\n estimatedOutputTokens: number = 500,\n ): Promise<{\n inputTokens: number;\n outputTokens: number;\n totalTokens: number;\n inputCost: number;\n outputCost: number;\n totalCost: number;\n }> {\n if (!this.modelKey) {\n throw new Error(\"Model not set for estimation\");\n }\n\n // Lazy-load tiktoken to avoid bundling for browser clients\n const { Tiktoken, cl100k_base } = await getTiktoken();\n\n // Count tokens in the prompt using tiktoken\n const encoding = new Tiktoken(cl100k_base);\n const inputTokens = encoding.encode(prompt).length;\n\n const costs = calculateCost(\n this.modelKey,\n inputTokens,\n estimatedOutputTokens,\n );\n\n return {\n inputTokens,\n outputTokens: estimatedOutputTokens,\n totalTokens: inputTokens + estimatedOutputTokens,\n inputCost: costs.inputCost,\n outputCost: costs.outputCost,\n totalCost: costs.totalCost,\n };\n }\n\n /**\n * Get current statistics (single model or aggregated)\n * Returns null only if tracker is in aggregating mode - use getAggregatedStats() instead\n */\n getStats(): ModelStats | null {\n if (this.isAggregating) {\n return null; // Use getAggregatedStats() instead\n }\n\n if (!this.modelKey || !this.modelConfig) {\n throw new Error(\"Model not set for this tracker\");\n }\n\n const totalTokens = this.stats.inputTokens + this.stats.outputTokens;\n const costs = calculateCost(\n this.modelKey,\n this.stats.inputTokens,\n this.stats.outputTokens,\n );\n\n return {\n modelId: this.modelConfig.id,\n modelName: this.modelConfig.name,\n apiCalls: this.stats.apiCalls,\n inputTokens: this.stats.inputTokens,\n outputTokens: this.stats.outputTokens,\n totalTokens,\n inputCost: costs.inputCost,\n outputCost: costs.outputCost,\n totalCost: costs.totalCost,\n };\n }\n\n /**\n * Get aggregated statistics from all models\n */\n getAggregatedStats(): {\n perModel: ModelStats[];\n totals: {\n totalApiCalls: number;\n totalInputTokens: number;\n totalOutputTokens: number;\n totalTokens: number;\n totalInputCost: number;\n totalOutputCost: number;\n totalCost: number;\n };\n } {\n const perModel = Array.from(this.perModelStats.values());\n\n const totals = {\n totalApiCalls: perModel.reduce((sum, m) => sum + m.apiCalls, 0),\n totalInputTokens: perModel.reduce((sum, m) => sum + m.inputTokens, 0),\n totalOutputTokens: perModel.reduce((sum, m) => sum + m.outputTokens, 0),\n totalTokens: perModel.reduce((sum, m) => sum + m.totalTokens, 0),\n totalInputCost: perModel.reduce((sum, m) => sum + m.inputCost, 0),\n totalOutputCost: perModel.reduce((sum, m) => sum + m.outputCost, 0),\n totalCost: perModel.reduce((sum, m) => sum + m.totalCost, 0),\n };\n\n return { perModel, totals };\n }\n\n /**\n * Print statistics to console\n */\n printStats(): void {\n if (this.isAggregating) {\n this.printAggregatedStats();\n } else {\n const stats = this.getStats();\n if (!stats) {\n console.log(\"No statistics available\");\n return;\n }\n\n console.log(\"\\n📊 API Usage Statistics:\");\n console.log(` Model: ${stats.modelName} (${stats.modelId})`);\n console.log(` API Calls: ${stats.apiCalls}`);\n console.log(` Input Tokens: ${stats.inputTokens.toLocaleString()}`);\n console.log(` Output Tokens: ${stats.outputTokens.toLocaleString()}`);\n console.log(` Total Tokens: ${stats.totalTokens.toLocaleString()}`);\n\n if (stats.totalCost > 0) {\n console.log(` Input Cost: $${stats.inputCost.toFixed(4)}`);\n console.log(` Output Cost: $${stats.outputCost.toFixed(4)}`);\n console.log(` Total Cost: $${stats.totalCost.toFixed(4)}`);\n } else {\n console.log(\" Cost: Free / Not calculated\");\n }\n }\n }\n\n /**\n * Print aggregated statistics from all models\n */\n printAggregatedStats(): void {\n const { perModel, totals } = this.getAggregatedStats();\n\n console.log(\"\\n📊 Aggregated API Usage Statistics:\");\n console.log(\"═════════════════════════════════════════\\n\");\n\n console.log(\"Per-Model Breakdown:\");\n for (const stats of perModel) {\n console.log(`\\n ${stats.modelName}`);\n console.log(` API Calls: ${stats.apiCalls}`);\n console.log(` Input Tokens: ${stats.inputTokens.toLocaleString()}`);\n console.log(` Output Tokens: ${stats.outputTokens.toLocaleString()}`);\n console.log(` Total Tokens: ${stats.totalTokens.toLocaleString()}`);\n if (stats.totalCost > 0) {\n console.log(` Input Cost: $${stats.inputCost.toFixed(4)}`);\n console.log(` Output Cost: $${stats.outputCost.toFixed(4)}`);\n console.log(` Total Cost: $${stats.totalCost.toFixed(4)}`);\n } else {\n console.log(` Cost: Free`);\n }\n }\n\n console.log(\"\\n─────────────────────────────────────────\");\n console.log(\"Totals Across All Models:\");\n console.log(` Total API Calls: ${totals.totalApiCalls}`);\n console.log(\n ` Total Input Tokens: ${totals.totalInputTokens.toLocaleString()}`,\n );\n console.log(\n ` Total Output Tokens: ${totals.totalOutputTokens.toLocaleString()}`,\n );\n console.log(` Total Tokens: ${totals.totalTokens.toLocaleString()}`);\n console.log(` Total Input Cost: $${totals.totalInputCost.toFixed(4)}`);\n console.log(` Total Output Cost: $${totals.totalOutputCost.toFixed(4)}`);\n console.log(` TOTAL COST: $${totals.totalCost.toFixed(4)}`);\n console.log(\"═════════════════════════════════════════\\n\");\n }\n\n /**\n * Reset statistics\n */\n reset(): void {\n this.stats = {\n apiCalls: 0,\n inputTokens: 0,\n outputTokens: 0,\n };\n this.perModelStats.clear();\n }\n}\n\n/**\n * Print available models to console\n */\nexport function printAvailableModels(): void {\n console.log(\"\\n📋 Available Models:\");\n const models = listModels();\n\n for (const { key, config } of models) {\n const isDefault = key === DEFAULT_MODEL_KEY;\n const defaultMarker = isDefault ? \" (DEFAULT)\" : \"\";\n const costInfo =\n config.inputCostPerMillion === 0 && config.outputCostPerMillion === 0\n ? \"Free\"\n : `$${config.inputCostPerMillion}/1M in, $${config.outputCostPerMillion}/1M out`;\n\n console.log(` ${key}${defaultMarker}`);\n console.log(` Name: ${config.name}`);\n console.log(` ID: ${config.id}`);\n console.log(` Cost: ${costInfo}`);\n if (config.description) {\n console.log(` Description: ${config.description}`);\n }\n console.log(\"\");\n }\n}\n","/**\n * Model Mapping for Batch Providers\n *\n * Dynamically maps models from the registry to provider-specific batch API identifiers.\n * Uses the `supportsAsyncBatch` flag and OpenRouter ID prefix to determine compatibility.\n */\n\nimport z from \"zod\";\nimport {\n getModel,\n listModels,\n ModelConfig,\n ModelKey,\n} from \"../../ai/model-helper\";\n\n// =============================================================================\n// Provider Types\n// =============================================================================\n\nexport const BatchProviderName = z.enum([\"google\", \"anthropic\", \"openai\"]);\nexport type BatchProviderName = z.infer<typeof BatchProviderName>;\n\n// =============================================================================\n// Dynamic Model Resolution\n// =============================================================================\n\n/**\n * Extract the native batch API model ID from an OpenRouter model ID\n *\n * OpenRouter IDs are typically: \"provider/model-name\"\n * Native batch APIs just need: \"model-name\"\n */\nfunction extractNativeModelId(\n openRouterId: string,\n provider: BatchProviderName,\n): string {\n const prefix = `${provider}/`;\n if (openRouterId.startsWith(prefix)) {\n return openRouterId.slice(prefix.length);\n }\n // Handle Google's special case (google/ prefix but also gemini in ID)\n if (provider === \"google\" && openRouterId.startsWith(\"google/\")) {\n return openRouterId.slice(\"google/\".length);\n }\n return openRouterId;\n}\n\n/**\n * Get the provider from an OpenRouter model ID\n */\nfunction getProviderFromOpenRouterId(\n openRouterId: string,\n): BatchProviderName | undefined {\n if (openRouterId.startsWith(\"google/\") || openRouterId.includes(\"gemini\")) {\n return \"google\";\n }\n if (\n openRouterId.startsWith(\"anthropic/\") ||\n openRouterId.includes(\"claude\")\n ) {\n return \"anthropic\";\n }\n if (openRouterId.startsWith(\"openai/\") || openRouterId.includes(\"gpt\")) {\n return \"openai\";\n }\n return undefined;\n}\n\n// =============================================================================\n// Mapping Functions\n// =============================================================================\n\n/**\n * Get the provider-specific model ID for a given ModelKey\n * Dynamically checks if the model supports async batch and extracts the native ID\n *\n * @param modelKey - The ModelKey from model-helper.ts\n * @param provider - The batch provider to get the model ID for\n * @returns Provider-specific model ID or undefined if not supported\n */\nexport function getProviderModelId(\n modelKey: ModelKey,\n provider: BatchProviderName,\n): string | undefined {\n const modelConfig = getModel(modelKey);\n\n // Check if model supports batching\n if (!modelConfig.supportsAsyncBatch) {\n return undefined;\n }\n\n // Check if model belongs to this provider\n const modelProvider = getProviderFromOpenRouterId(modelConfig.id);\n if (modelProvider !== provider) {\n return undefined;\n }\n\n // Extract native model ID\n return extractNativeModelId(modelConfig.id, provider);\n}\n\n/**\n * Get default model for a provider by finding the first batch-compatible model\n */\nfunction getDefaultModelForProvider(provider: BatchProviderName): string {\n const models = listModels({\n supportsAsyncBatch: true,\n isEmbeddingModel: false,\n });\n\n for (const { config } of models) {\n const modelProvider = getProviderFromOpenRouterId(config.id);\n if (modelProvider === provider) {\n return extractNativeModelId(config.id, provider);\n }\n }\n\n // Fallbacks if no models found in registry\n throw new Error(\n `No batch-compatible models found for ${provider}. ` +\n `Ensure you have models with supportsAsyncBatch: true in your generated models file.`,\n );\n}\n\n/**\n * Get the provider-specific model ID, with fallback to default\n *\n * @param modelKey - The ModelKey from model-helper.ts (optional)\n * @param provider - The batch provider\n * @returns Provider-specific model ID\n * @throws Error if model is not supported by the provider\n */\nexport function resolveModelForProvider(\n modelKey: ModelKey | undefined,\n provider: BatchProviderName,\n): string {\n // No model specified - use default\n if (!modelKey) {\n return getDefaultModelForProvider(provider);\n }\n\n // Get model config and check batch support\n const modelConfig = getModel(modelKey);\n\n // Check if model supports batching\n if (!modelConfig.supportsAsyncBatch) {\n throw new Error(\n `Model \"${modelKey}\" does not support async batch processing.`,\n );\n }\n\n // Check if model is compatible with requested provider\n const modelProvider = getProviderFromOpenRouterId(modelConfig.id);\n if (modelProvider !== provider) {\n throw new Error(\n `Model \"${modelKey}\" belongs to ${modelProvider || \"unknown\"} provider, ` +\n `not ${provider}. Use a ${provider} model or change the batch provider.`,\n );\n }\n\n return extractNativeModelId(modelConfig.id, provider);\n}\n\n/**\n * Get list of OpenRouter model IDs supported by a provider for batching\n * Dynamically reads from the registry\n */\nexport function getSupportedModels(provider: BatchProviderName): string[] {\n const models = listModels({\n supportsAsyncBatch: true,\n isEmbeddingModel: false,\n });\n\n return models\n .filter(({ config }) => getProviderFromOpenRouterId(config.id) === provider)\n .map(({ config }) => config.id);\n}\n\n/**\n * Check if a ModelKey is supported by a provider for batching\n */\nexport function isModelSupported(\n modelKey: ModelKey,\n provider: BatchProviderName,\n): boolean {\n return getProviderModelId(modelKey, provider) !== undefined;\n}\n\n/**\n * Get the best provider for a given ModelKey\n * Returns the provider that natively supports the model\n */\nexport function getBestProviderForModel(\n modelKey: ModelKey,\n): BatchProviderName | undefined {\n const modelConfig = getModel(modelKey);\n\n if (!modelConfig.supportsAsyncBatch) {\n return undefined;\n }\n\n return getProviderFromOpenRouterId(modelConfig.id);\n}\n\n/**\n * Get all models that support async batching from the registry\n */\nexport function getBatchCompatibleModels(): Array<{\n key: string;\n config: ModelConfig;\n}> {\n return listModels({ supportsAsyncBatch: true, isEmbeddingModel: false });\n}\n","/**\n * Anthropic Batch Provider\n *\n * Implements batch processing using Anthropic's Message Batches API.\n * Supports up to 10,000 requests per batch with 24h processing window.\n */\n\n// Optional peer dependency\nimport { Anthropic } from \"@anthropic-ai/sdk\";\nimport { jsonSchema } from \"ai\";\nimport { resolveModelForProvider } from \"../model-mapping\";\nimport type {\n AnthropicBatchRequest,\n BatchHandle,\n BatchLogger,\n BatchProvider,\n BatchState,\n BatchStatus,\n BatchSubmitOptions,\n RawBatchResult,\n} from \"../types\";\n\nexport interface AnthropicBatchProviderConfig {\n apiKey?: string;\n}\n\nexport class AnthropicBatchProvider\n implements BatchProvider<AnthropicBatchRequest, RawBatchResult>\n{\n readonly name = \"anthropic\";\n readonly supportsBatching = true;\n\n private client: Anthropic;\n private logger?: BatchLogger;\n\n constructor(config: AnthropicBatchProviderConfig = {}, logger?: BatchLogger) {\n const apiKey = config.apiKey || process.env.ANTHROPIC_API_KEY;\n if (!apiKey) {\n throw new Error(\n \"Anthropic API key is required. Set ANTHROPIC_API_KEY or pass apiKey in config.\",\n );\n }\n this.client = new Anthropic({ apiKey });\n this.logger = logger;\n }\n\n async submit(\n requests: AnthropicBatchRequest[],\n options?: BatchSubmitOptions,\n ): Promise<BatchHandle> {\n if (requests.length === 0) {\n throw new Error(\"Cannot submit empty batch\");\n }\n\n // Convert ModelKey to Anthropic-specific model ID\n const modelKey = requests[0]?.model;\n const model = resolveModelForProvider(modelKey, \"anthropic\");\n\n this.logger?.log(\"INFO\", \"Submitting Anthropic batch\", {\n requestCount: requests.length,\n modelKey: modelKey || \"default\",\n model,\n });\n\n // Transform requests into Anthropic's batch format\n const batchRequests = requests.map((req, i) => {\n // Each request can optionally override the model\n const reqModel = req.model\n ? resolveModelForProvider(req.model, \"anthropic\")\n : model;\n\n // Convert tools to Anthropic format if provided\n const anthropicTools =\n req.tools && Object.keys(req.tools).length > 0\n ? Object.entries(req.tools).map(([name, tool]) => ({\n name,\n description: tool.description || \"\",\n input_schema: (tool.inputSchema\n ? jsonSchema(tool.inputSchema)\n : {\n type: \"object\",\n properties: {},\n }) as Anthropic.Tool[\"input_schema\"],\n }))\n : undefined;\n\n // Map toolChoice to Anthropic's tool_choice format\n let toolChoice: Anthropic.MessageCreateParams[\"tool_choice\"] | undefined;\n if (req.toolChoice) {\n if (req.toolChoice === \"auto\") {\n toolChoice = { type: \"auto\" };\n } else if (req.toolChoice === \"required\") {\n toolChoice = { type: \"any\" };\n } else if (\n typeof req.toolChoice === \"object\" &&\n req.toolChoice.type === \"tool\"\n ) {\n toolChoice = { type: \"tool\", name: req.toolChoice.toolName };\n }\n // 'none' removes tools entirely, so we skip adding them\n }\n\n return {\n custom_id: req.customId || `request-${i}`,\n params: {\n model: reqModel,\n max_tokens: req.maxTokens || 1024,\n messages: [{ role: \"user\" as const, content: req.prompt }],\n ...(req.system && { system: req.system }),\n ...(req.temperature !== undefined && {\n temperature: req.temperature,\n }),\n ...(anthropicTools &&\n req.toolChoice !== \"none\" && { tools: anthropicTools }),\n ...(toolChoice && { tool_choice: toolChoice }),\n },\n };\n });\n\n const response = await this.client.messages.batches.create({\n requests: batchRequests,\n });\n\n this.logger?.log(\"INFO\", \"Anthropic batch submitted\", {\n batchId: response.id,\n requestCount: requests.length,\n processingStatus: response.processing_status,\n });\n\n return {\n id: response.id,\n provider: this.name,\n requestCount: requests.length,\n createdAt: new Date(response.created_at),\n metadata: {\n model,\n processingStatus: response.processing_status,\n ...options?.metadata,\n },\n };\n }\n\n async checkStatus(handle: BatchHandle): Promise<BatchStatus> {\n const batch = await this.client.messages.batches.retrieve(handle.id);\n\n const succeededCount = batch.request_counts?.succeeded || 0;\n const erroredCount = batch.request_counts?.errored || 0;\n const canceledCount = batch.request_counts?.canceled || 0;\n const expiredCount = batch.request_counts?.expired || 0;\n const processingCount = batch.request_counts?.processing || 0;\n\n const processedCount =\n succeededCount + erroredCount + canceledCount + expiredCount;\n const totalCount = processedCount + processingCount;\n\n const status: BatchStatus = {\n state: this.mapStatus(batch.processing_status),\n processedCount,\n totalCount: totalCount || handle.requestCount,\n succeededCount,\n failedCount: erroredCount + canceledCount + expiredCount,\n };\n\n this.logger?.log(\"DEBUG\", \"Anthropic batch status\", {\n batchId: handle.id,\n state: status.state,\n processed: status.processedCount,\n total: status.totalCount,\n });\n\n return status;\n }\n\n async getResults(handle: BatchHandle): Promise<RawBatchResult[]> {\n // First check status\n const status = await this.checkStatus(handle);\n if (status.state !== \"completed\" && status.state !== \"failed\") {\n throw new Error(`Batch not complete: state=${status.state}`);\n }\n\n this.logger?.log(\"INFO\", \"Retrieving Anthropic batch results\", {\n batchId: handle.id,\n });\n\n const results: RawBatchResult[] = [];\n let index = 0;\n\n // Anthropic uses async iteration for results\n const resultsIterator = await this.client.messages.batches.results(\n handle.id,\n );\n for await (const entry of resultsIterator) {\n if (entry.result.type === \"succeeded\") {\n const message = entry.result.message;\n const textContent = message.content.find(\n (c: Anthropic.ContentBlock): c is Anthropic.TextBlock =>\n c.type === \"text\",\n );\n\n results.push({\n index,\n customId: entry.custom_id,\n text: textContent?.text || \"\",\n inputTokens: message.usage?.input_tokens || 0,\n outputTokens: message.usage?.output_tokens || 0,\n });\n } else {\n // Handle errored, canceled, or expired results\n let errorMsg: string;\n switch (entry.result.type) {\n case \"errored\":\n // ErrorResponse has error.type and error.message at different levels\n errorMsg =\n (entry.result.error as { message?: string })?.message ||\n `Error type: ${entry.result.error?.type}` ||\n \"Request errored\";\n break;\n case \"canceled\":\n errorMsg = \"Request was canceled\";\n break;\n case \"expired\":\n errorMsg = \"Request expired\";\n break;\n default:\n errorMsg = `Unknown result type: ${(entry.result as { type: string }).type}`;\n }\n\n results.push({\n index,\n customId: entry.custom_id,\n text: \"\",\n inputTokens: 0,\n outputTokens: 0,\n error: errorMsg,\n });\n }\n index++;\n }\n\n this.logger?.log(\"INFO\", \"Anthropic batch results retrieved\", {\n batchId: handle.id,\n resultCount: results.length,\n successCount: results.filter((r) => !r.error).length,\n errorCount: results.filter((r) => r.error).length,\n });\n\n return results;\n }\n\n async cancel(handle: BatchHandle): Promise<void> {\n await this.client.messages.batches.cancel(handle.id);\n this.logger?.log(\"INFO\", \"Anthropic batch cancelled\", {\n batchId: handle.id,\n });\n }\n\n private mapStatus(status: string): BatchState {\n switch (status) {\n case \"ended\":\n return \"completed\";\n case \"canceling\":\n case \"canceled\":\n return \"cancelled\";\n case \"in_progress\":\n return \"processing\";\n default:\n return \"pending\";\n }\n }\n}\n","/**\n * Google Batch Provider\n *\n * Implements batch processing using Google's GenAI Batch API.\n * Uses inline requests for simpler API.\n */\n\n/* eslint-disable @typescript-eslint/ban-ts-comment */\nimport {\n FunctionCallingConfigMode,\n GoogleGenAI,\n type InlinedRequest,\n JobState,\n} from \"@google/genai\";\nimport { jsonSchema } from \"ai\";\nimport { resolveModelForProvider } from \"../model-mapping\";\nimport type {\n BatchHandle,\n BatchLogger,\n BatchProvider,\n BatchState,\n BatchStatus,\n BatchSubmitOptions,\n GoogleBatchRequest,\n RawBatchResult,\n} from \"../types\";\n\nexport interface GoogleBatchProviderConfig {\n apiKey?: string;\n}\n\nexport class GoogleBatchProvider\n implements BatchProvider<GoogleBatchRequest, RawBatchResult>\n{\n readonly name = \"google\";\n readonly supportsBatching = true;\n\n private ai: GoogleGenAI;\n private logger?: BatchLogger;\n\n constructor(config: GoogleBatchProviderConfig = {}, logger?: BatchLogger) {\n const apiKey = config.apiKey || process.env.GOOGLE_GENERATIVE_AI_API_KEY;\n if (!apiKey) {\n throw new Error(\n \"Google API key is required. Set GOOGLE_GENERATIVE_AI_API_KEY or pass apiKey in config.\",\n );\n }\n this.ai = new GoogleGenAI({ apiKey });\n this.logger = logger;\n }\n\n async submit(\n requests: GoogleBatchRequest[],\n options?: BatchSubmitOptions,\n ): Promise<BatchHandle> {\n if (requests.length === 0) {\n throw new Error(\"Cannot submit empty batch\");\n }\n\n // Convert ModelKey to Google-specific model ID\n const modelKey = requests[0]?.model;\n const model = resolveModelForProvider(modelKey, \"google\");\n\n // Extract customIds from requests to store in handle metadata\n const customIds = requests.map(\n (req, idx) => (req as { id?: string }).id || `request-${idx}`,\n );\n\n this.logger?.log(\"INFO\", \"Submitting Google batch\", {\n requestCount: requests.length,\n modelKey: modelKey || \"default\",\n model,\n });\n\n // Transform requests into Google's inline format\n const inlinedRequests: InlinedRequest[] = requests.map((req) => {\n const parts: Array<{ text: string }> = [{ text: req.prompt }];\n\n if (req.schema) {\n // Use schema description since toJSONSchema may not be available in all Zod versions\n parts.push({\n text: `Please respond with a JSON object matching the expected schema structure.`,\n });\n }\n\n if (req.maxTokens) {\n parts.push({\n text: `Limit your response to a maximum of ${req.maxTokens} tokens.`,\n });\n }\n\n if (req.temperature !== undefined) {\n parts.push({\n text: `Use a temperature setting of ${req.temperature} for this response.`,\n });\n }\n\n // Get customId from request (via id field passed from ai-helper)\n const customId =\n (req as { id?: string }).id || `request-${requests.indexOf(req)}`;\n\n // Build the response object with metadata for ID tracking\n const response: Record<string, unknown> = {\n contents: [\n {\n role: \"user\",\n parts,\n },\n ],\n // Store customId in metadata - Google SDK supports this\n metadata: { customId },\n };\n\n // Add tools configuration if provided\n if (req.tools && Object.keys(req.tools).length > 0) {\n const config: Record<string, unknown> = {\n tools: [\n {\n functionDeclarations: Object.entries(req.tools).map(\n ([name, tool]) => ({\n name,\n description: tool.description,\n // AI SDK uses inputSchema, convert to JSON Schema for Google\n parameters: tool.inputSchema\n ? jsonSchema(tool.inputSchema)\n : undefined,\n }),\n ),\n },\n ],\n };\n\n // Map toolChoice if provided\n if (req.toolChoice) {\n if (req.toolChoice === \"required\") {\n config.toolConfig = {\n functionCallingConfig: { mode: FunctionCallingConfigMode.ANY },\n };\n } else if (req.toolChoice === \"none\") {\n config.toolConfig = {\n functionCallingConfig: { mode: FunctionCallingConfigMode.NONE },\n };\n } else if (\n typeof req.toolChoice === \"object\" &&\n req.toolChoice.type === \"tool\"\n ) {\n config.toolConfig = {\n functionCallingConfig: {\n mode: FunctionCallingConfigMode.ANY,\n allowedFunctionNames: [req.toolChoice.toolName],\n },\n };\n }\n // 'auto' is the default, no config needed\n }\n\n response.config = config;\n }\n\n return response as InlinedRequest;\n });\n\n const batchJob = await this.ai.batches.create({\n model,\n src: { inlinedRequests },\n config: {\n displayName:\n options?.displayName || `batch-${Date.now()}-${requests.length}`,\n },\n });\n\n if (!batchJob.name) {\n throw new Error(\"Batch job created but no name returned\");\n }\n\n this.logger?.log(\"INFO\", \"Google batch submitted\", {\n batchName: batchJob.name,\n requestCount: requests.length,\n });\n\n return {\n id: batchJob.name,\n provider: this.name,\n requestCount: requests.length,\n createdAt: new Date(),\n metadata: {\n model,\n displayName: options?.displayName,\n customIds, // Store for getResults to use\n },\n };\n }\n\n async checkStatus(handle: BatchHandle): Promise<BatchStatus> {\n const batch = await this.ai.batches.get({ name: handle.id });\n\n const status: BatchStatus = {\n state: this.mapState(batch.state),\n processedCount: Number(batch.completionStats?.successfulCount) || 0,\n totalCount: handle.requestCount,\n succeededCount: batch.completionStats?.successfulCount\n ? Number(batch.completionStats.successfulCount)\n : undefined,\n failedCount: batch.completionStats?.failedCount\n ? Number(batch.completionStats.failedCount)\n : undefined,\n error: (batch as Record<string, unknown>).error\n ? String((batch as Record<string, unknown>).error)\n : undefined,\n };\n\n // When batch is complete or failed, aggregate token counts from all responses\n // This ensures we capture usage even if getResults fails later\n if (\n batch.state === JobState.JOB_STATE_SUCCEEDED ||\n batch.state === JobState.JOB_STATE_FAILED\n ) {\n const inlinedResponses = batch.dest?.inlinedResponses;\n if (inlinedResponses && Array.isArray(inlinedResponses)) {\n let totalInputTokens = 0;\n let totalOutputTokens = 0;\n\n for (const inlinedResponse of inlinedResponses) {\n const usageMetadata = inlinedResponse.response?.usageMetadata;\n if (usageMetadata) {\n totalInputTokens += usageMetadata.promptTokenCount ?? 0;\n totalOutputTokens += usageMetadata.candidatesTokenCount ?? 0;\n }\n }\n\n status.totalInputTokens = totalInputTokens;\n status.totalOutputTokens = totalOutputTokens;\n\n this.logger?.log(\"INFO\", \"Google batch token usage\", {\n batchId: handle.id,\n totalInputTokens,\n totalOutputTokens,\n responseCount: inlinedResponses.length,\n });\n }\n }\n\n this.logger?.log(\"DEBUG\", \"sdk response\", { batch });\n\n this.logger?.log(\"DEBUG\", \"Google batch status\", {\n batchId: handle.id,\n state: status.state,\n processed: status.processedCount,\n total: status.totalCount,\n startTime: batch.startTime,\n endTime: batch.endTime,\n totalInputTokens: status.totalInputTokens,\n totalOutputTokens: status.totalOutputTokens,\n });\n\n return status;\n }\n\n async getResults(\n handle: BatchHandle,\n customIds?: string[],\n ): Promise<RawBatchResult[]> {\n // Get customIds from: 1) parameter, 2) handle.metadata.customIds, 3) fallback to index-based\n const requestIds =\n customIds || (handle.metadata?.customIds as string[] | undefined);\n\n this.logger?.log(\"DEBUG\", \"Google batch getResults - customIds lookup\", {\n batchId: handle.id,\n hasCustomIdsParam: !!customIds,\n hasHandleMetadata: !!handle.metadata,\n hasMetadataCustomIds: !!handle.metadata?.customIds,\n requestIdsFound: requestIds?.length ?? 0,\n });\n\n const batch = await this.ai.batches.get({ name: handle.id });\n\n if (\n batch.state !== JobState.JOB_STATE_SUCCEEDED &&\n batch.state !== JobState.JOB_STATE_FAILED\n ) {\n throw new Error(`Batch not complete: state=${batch.state}`);\n }\n\n if (batch.state === JobState.JOB_STATE_FAILED) {\n const errorMsg =\n (batch as Record<string, unknown>).error || \"Unknown error\";\n throw new Error(`Batch failed: ${errorMsg}`);\n }\n\n const maybeInlinedResponses = batch.dest?.inlinedResponses;\n if (!maybeInlinedResponses) {\n throw new Error(\n \"Batch response format unexpected - could not find inlinedResponses array\",\n );\n }\n\n if (!maybeInlinedResponses || !Array.isArray(maybeInlinedResponses)) {\n this.logger?.log(\"ERROR\", \"Unexpected batch response format\", {\n batchId: handle.id,\n hasResponse: !!maybeInlinedResponses,\n destKeys: batch.dest ? Object.keys(batch.dest) : [],\n });\n throw new Error(\n \"Batch response format unexpected - could not find inlinedResponses array\",\n );\n }\n\n this.logger?.log(\"INFO\", \"Processing Google batch results\", {\n batchId: handle.id,\n responseCount: maybeInlinedResponses.length,\n firstItem: JSON.stringify(maybeInlinedResponses[0]), // Log entire first item structure to be sure\n });\n\n return maybeInlinedResponses.map((inlinedResponse, index) => {\n try {\n // Try to get customId from response metadata first, fall back to stored requestIds, then index-based\n const responseMetadata = (\n inlinedResponse as { metadata?: Record<string, string> }\n ).metadata;\n const customId =\n responseMetadata?.customId ||\n requestIds?.[index] ||\n `request-${index}`;\n\n if (!requestIds?.[index] && !responseMetadata?.customId) {\n this.logger?.log(\n \"WARN\",\n `No customId found for index ${index}, using fallback`,\n { index },\n );\n }\n\n if (inlinedResponse.error) {\n const result = {\n index,\n customId,\n text: \"\",\n inputTokens: 0,\n outputTokens: 0,\n error: inlinedResponse.error.message || \"Unknown error\",\n };\n this.logger?.log(\"DEBUG\", `Response ${index} has error`, {\n customId,\n error: result.error,\n });\n return result;\n }\n\n const response = inlinedResponse.response;\n if (!response) {\n const result = {\n index,\n text: \"\",\n inputTokens: 0,\n outputTokens: 0,\n error: \"InlinedResponse missing response field\",\n };\n this.logger?.log(\n \"DEBUG\",\n `Response ${index} missing response field`,\n { customId },\n );\n return result;\n }\n\n const text =\n response.text ||\n response.candidates?.[0]?.content?.parts?.[0]?.text ||\n \"\";\n const usageMetadata = response.usageMetadata;\n\n const result = {\n index,\n customId, // Use the already-resolved customId from above\n text,\n inputTokens: usageMetadata?.promptTokenCount ?? 0,\n outputTokens: usageMetadata?.candidatesTokenCount ?? 0,\n };\n this.logger?.log(\"DEBUG\", `Response ${index} parsed successfully`, {\n customId,\n textLength: text.length,\n inputTokens: result.inputTokens,\n outputTokens: result.outputTokens,\n });\n return result;\n } catch (error) {\n const result = {\n index,\n text: \"\",\n inputTokens: 0,\n outputTokens: 0,\n error: error instanceof Error ? error.message : String(error),\n };\n this.logger?.log(\"ERROR\", `Response ${index} threw exception`, {\n error: result.error,\n });\n return result;\n }\n });\n }\n\n async cancel(handle: BatchHandle): Promise<void> {\n // Google's batch API supports cancellation via delete\n try {\n await this.ai.batches.delete({ name: handle.id });\n this.logger?.log(\"INFO\", \"Google batch cancelled\", {\n batchId: handle.id,\n });\n } catch (error) {\n this.logger?.log(\"WARN\", \"Failed to cancel Google batch\", {\n batchId: handle.id,\n error: error instanceof Error ? error.message : String(error),\n });\n throw error;\n }\n }\n\n private mapState(state?: JobState): BatchState {\n switch (state) {\n case JobState.JOB_STATE_SUCCEEDED:\n return \"completed\";\n case JobState.JOB_STATE_FAILED:\n return \"failed\";\n case JobState.JOB_STATE_CANCELLED:\n return \"cancelled\";\n case JobState.JOB_STATE_PENDING:\n return \"pending\";\n case JobState.JOB_STATE_RUNNING:\n return \"processing\";\n default:\n return \"processing\";\n }\n }\n}\n","/**\n * OpenAI Batch Provider\n *\n * Implements batch processing using OpenAI's Batch API.\n * Note: OpenAI requires JSONL file uploads for batches.\n * Supports up to 50,000 requests per batch with 24h processing window.\n */\n\nimport { jsonSchema } from \"ai\";\n// Optional peer dependency\nimport OpenAI from \"openai\";\nimport { resolveModelForProvider } from \"../model-mapping\";\nimport type {\n BatchHandle,\n BatchLogger,\n BatchProvider,\n BatchState,\n BatchStatus,\n BatchSubmitOptions,\n OpenAIBatchRequest,\n RawBatchResult,\n} from \"../types\";\n\nexport interface OpenAIBatchProviderConfig {\n apiKey?: string;\n}\n\nexport class OpenAIBatchProvider\n implements BatchProvider<OpenAIBatchRequest, RawBatchResult>\n{\n readonly name = \"openai\";\n readonly supportsBatching = true;\n\n private client: OpenAI;\n private logger?: BatchLogger;\n\n constructor(config: OpenAIBatchProviderConfig = {}, logger?: BatchLogger) {\n const apiKey = config.apiKey || process.env.OPENAI_API_KEY;\n if (!apiKey) {\n throw new Error(\n \"OpenAI API key is required. Set OPENAI_API_KEY or pass apiKey in config.\",\n );\n }\n this.client = new OpenAI({ apiKey });\n this.logger = logger;\n }\n\n async submit(\n requests: OpenAIBatchRequest[],\n options?: BatchSubmitOptions,\n ): Promise<BatchHandle> {\n if (requests.length === 0) {\n throw new Error(\"Cannot submit empty batch\");\n }\n\n // Convert ModelKey to OpenAI-specific model ID\n const modelKey = requests[0]?.model;\n const model = resolveModelForProvider(modelKey, \"openai\");\n\n this.logger?.log(\"INFO\", \"Submitting OpenAI batch\", {\n requestCount: requests.length,\n modelKey: modelKey || \"default\",\n model,\n });\n\n // OpenAI requires JSONL file upload for batches\n const jsonlContent = requests\n .map((req, i) => {\n // Each request can optionally override the model\n const reqModel = req.model\n ? resolveModelForProvider(req.model, \"openai\")\n : model;\n\n // Convert tools to OpenAI format if provided\n const openaiTools =\n req.tools && Object.keys(req.tools).length > 0\n ? Object.entries(req.tools).map(([name, tool]) => ({\n type: \"function\" as const,\n function: {\n name,\n description: tool.description,\n parameters: tool.inputSchema\n ? jsonSchema(tool.inputSchema)\n : undefined,\n },\n }))\n : undefined;\n\n // Map toolChoice to OpenAI format\n let toolChoice:\n | \"auto\"\n | \"required\"\n | \"none\"\n | { type: \"function\"; function: { name: string } }\n | undefined;\n if (req.toolChoice) {\n if (req.toolChoice === \"auto\") {\n toolChoice = \"auto\";\n } else if (req.toolChoice === \"required\") {\n toolChoice = \"required\";\n } else if (req.toolChoice === \"none\") {\n toolChoice = \"none\";\n } else if (\n typeof req.toolChoice === \"object\" &&\n req.toolChoice.type === \"tool\"\n ) {\n toolChoice = {\n type: \"function\",\n function: { name: req.toolChoice.toolName },\n };\n }\n }\n\n return JSON.stringify({\n custom_id: req.customId || `request-${i}`,\n method: \"POST\",\n url: \"/v1/chat/completions\",\n body: {\n model: reqModel,\n messages: [\n ...(req.system\n ? [{ role: \"system\" as const, content: req.system }]\n : []),\n { role: \"user\" as const, content: req.prompt },\n ],\n max_tokens: req.maxTokens || 1024,\n ...(req.temperature !== undefined && {\n temperature: req.temperature,\n }),\n ...(openaiTools && { tools: openaiTools }),\n ...(toolChoice && { tool_choice: toolChoice }),\n },\n });\n })\n .join(\"\\n\");\n\n // Upload the JSONL file\n // OpenAI SDK expects a File object or similar\n const file = await this.client.files.create({\n file: new File([jsonlContent], \"batch-requests.jsonl\", {\n type: \"application/jsonl\",\n }),\n purpose: \"batch\",\n });\n\n this.logger?.log(\"DEBUG\", \"OpenAI batch file uploaded\", {\n fileId: file.id,\n filename: file.filename,\n });\n\n // Create the batch\n const batch = await this.client.batches.create({\n input_file_id: file.id,\n endpoint: \"/v1/chat/completions\",\n completion_window: \"24h\", // OpenAI only supports 24h\n metadata: options?.metadata,\n });\n\n this.logger?.log(\"INFO\", \"OpenAI batch submitted\", {\n batchId: batch.id,\n requestCount: requests.length,\n status: batch.status,\n });\n\n return {\n id: batch.id,\n provider: this.name,\n requestCount: requests.length,\n createdAt: new Date(batch.created_at * 1000),\n metadata: {\n model,\n inputFileId: file.id,\n outputFileId: batch.output_file_id,\n errorFileId: batch.error_file_id,\n ...options?.metadata,\n },\n };\n }\n\n async checkStatus(handle: BatchHandle): Promise<BatchStatus> {\n const batch = await this.client.batches.retrieve(handle.id);\n\n const requestCounts = batch.request_counts;\n const completedCount = requestCounts?.completed || 0;\n const failedCount = requestCounts?.failed || 0;\n const totalCount = requestCounts?.total || handle.requestCount;\n\n // Don't report as completed until output file is actually available\n // This prevents race conditions where status is \"completed\" but file isn't ready\n let state = this.mapStatus(batch.status);\n if (state === \"completed\" && !batch.output_file_id) {\n this.logger?.log(\n \"WARN\",\n \"Batch shows completed but output file not ready yet\",\n {\n batchId: handle.id,\n status: batch.status,\n },\n );\n state = \"processing\"; // Keep polling until file is ready\n }\n\n const status: BatchStatus = {\n state,\n processedCount: completedCount + failedCount,\n totalCount,\n succeededCount: completedCount,\n failedCount,\n error: batch.errors?.data?.[0]?.message,\n };\n\n this.logger?.log(\"INFO\", \"OpenAI batch status check\", {\n batchId: handle.id,\n rawStatus: batch.status,\n mappedState: status.state,\n processed: status.processedCount,\n total: status.totalCount,\n outputFileId: batch.output_file_id,\n });\n\n return status;\n }\n\n async getResults(handle: BatchHandle): Promise<RawBatchResult[]> {\n const batch = await this.client.batches.retrieve(handle.id);\n\n this.logger?.log(\"INFO\", \"OpenAI batch retrieve for results\", {\n batchId: handle.id,\n status: batch.status,\n outputFileId: batch.output_file_id,\n errorFileId: batch.error_file_id,\n });\n\n if (batch.status !== \"completed\" && batch.status !== \"failed\") {\n throw new Error(`Batch not complete: status=${batch.status}`);\n }\n\n // Handle failed batches\n if (batch.status === \"failed\") {\n const errorMessage =\n batch.errors?.data?.[0]?.message || \"Unknown batch error\";\n throw new Error(`Batch failed: ${errorMessage}`);\n }\n\n if (!batch.output_file_id) {\n // This can happen in rare cases - log more details\n this.logger?.log(\"ERROR\", \"Batch completed but no output file\", {\n batchId: handle.id,\n status: batch.status,\n requestCounts: batch.request_counts,\n errors: batch.errors,\n });\n throw new Error(\n `Batch output file not available. Status: ${batch.status}, Request counts: ${JSON.stringify(batch.request_counts)}`,\n );\n }\n\n this.logger?.log(\"INFO\", \"Retrieving OpenAI batch results\", {\n batchId: handle.id,\n outputFileId: batch.output_file_id,\n });\n\n // Download and parse results file\n const fileContent = await this.client.files.content(batch.output_file_id);\n const text = await fileContent.text();\n const lines = text.trim().split(\"\\n\").filter(Boolean);\n\n const results: RawBatchResult[] = lines.map(\n (line: string, index: number) => {\n try {\n const result = JSON.parse(line);\n const response = result.response?.body;\n const choice = response?.choices?.[0];\n\n if (result.error) {\n return {\n index,\n customId: result.custom_id,\n text: \"\",\n inputTokens: 0,\n outputTokens: 0,\n error: result.error.message || \"Unknown error\",\n };\n }\n\n return {\n index,\n customId: result.custom_id,\n text: choice?.message?.content || \"\",\n inputTokens: response?.usage?.prompt_tokens || 0,\n outputTokens: response?.usage?.completion_tokens || 0,\n };\n } catch (error) {\n return {\n index,\n customId: undefined,\n text: \"\",\n inputTokens: 0,\n outputTokens: 0,\n error: error instanceof Error ? error.message : String(error),\n };\n }\n },\n );\n\n this.logger?.log(\"INFO\", \"OpenAI batch results retrieved\", {\n batchId: handle.id,\n resultCount: results.length,\n successCount: results.filter((r) => !r.error).length,\n errorCount: results.filter((r) => r.error).length,\n });\n\n return results;\n }\n\n async cancel(handle: BatchHandle): Promise<void> {\n await this.client.batches.cancel(handle.id);\n this.logger?.log(\"INFO\", \"OpenAI batch cancelled\", { batchId: handle.id });\n }\n\n private mapStatus(status: string): BatchState {\n switch (status) {\n case \"completed\":\n return \"completed\";\n case \"failed\":\n case \"expired\":\n return \"failed\";\n case \"cancelling\":\n case \"cancelled\":\n return \"cancelled\";\n case \"validating\":\n case \"in_progress\":\n case \"finalizing\":\n return \"processing\";\n default:\n return \"pending\";\n }\n }\n}\n","/**\n * AI Helper - Unified AI interaction tracking with hierarchical topics\n *\n * This is the new unified AI tracking system that replaces workflow-specific tracking.\n * It supports:\n * - Hierarchical topics for flexible categorization (e.g., \"workflow.abc.stage.extraction\")\n * - All AI call types: generateText, generateObject, embed, streamText, batch\n * - Automatic cost calculation with batch discounts\n * - Persistent DB logging to AICall table\n *\n * @example\n * ```typescript\n * const ai = createAIHelper(\"workflow.abc123\").createChild(\"stage\", \"extraction\");\n * const result = await ai.generateText(\"gemini-2.5-flash\", prompt);\n * ```\n */\n\nimport { google } from \"@ai-sdk/google\";\nimport { openrouter } from \"@openrouter/ai-sdk-provider\";\nimport type { StepResult, ToolSet } from \"ai\";\nimport { embed, generateText, Output, streamText } from \"ai\";\nimport type { z } from \"zod\";\nimport type { AICallLogger } from \"../persistence\";\nimport { getBestProviderForModel } from \"../utils/batch/model-mapping\";\nimport { AnthropicBatchProvider } from \"../utils/batch/providers/anthropic-batch\";\nimport { GoogleBatchProvider } from \"../utils/batch/providers/google-batch\";\nimport { OpenAIBatchProvider } from \"../utils/batch/providers/openai-batch\";\nimport { createLogger } from \"../utils/logger\";\nimport {\n calculateCost,\n getModel,\n type ModelConfig,\n type ModelKey,\n} from \"./model-helper\";\n\nconst logger = createLogger(\"AIHelper\");\n\n// Default embedding dimensions (can be overridden via options)\nconst DEFAULT_EMBEDDING_DIMENSIONS = 768;\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport type AICallType = \"text\" | \"object\" | \"embed\" | \"stream\" | \"batch\";\n\nexport interface AITextResult {\n text: string;\n inputTokens: number;\n outputTokens: number;\n cost: number;\n /** Structured output when experimental_output is used */\n output?: any;\n}\n\nexport interface AIObjectResult<T> {\n object: T;\n inputTokens: number;\n outputTokens: number;\n cost: number;\n}\n\nexport interface AIEmbedResult {\n embedding: number[]; // Single embedding (first one)\n embeddings: number[][]; // All embeddings (for batch)\n dimensions: number; // Dimensionality of embeddings\n inputTokens: number;\n cost: number;\n}\n\n// Type for the raw AI SDK streamText result\nexport type AISDKStreamResult = ReturnType<typeof streamText>;\n\nexport interface AIStreamResult {\n stream: AsyncIterable<string>;\n getUsage(): Promise<{\n inputTokens: number;\n outputTokens: number;\n cost: number;\n }>;\n /** The raw AI SDK result - use this for methods like toUIMessageStreamResponse */\n rawResult: AISDKStreamResult;\n}\n\n/**\n * Context for logging to workflow persistence (optional).\n * When provided, batch operations can log to the database.\n */\nexport interface LogContext {\n workflowRunId: string;\n stageRecordId: string;\n /** Function to create a log entry in persistence */\n createLog: (data: {\n workflowRunId: string;\n workflowStageId: string;\n level: \"DEBUG\" | \"INFO\" | \"WARN\" | \"ERROR\";\n message: string;\n metadata?: Record<string, unknown>;\n }) => Promise<void>;\n}\n\n/** Log function type for batch operations */\nexport type BatchLogFn = (\n level: \"DEBUG\" | \"INFO\" | \"WARN\" | \"ERROR\",\n message: string,\n meta?: Record<string, unknown>,\n) => void;\n\nexport interface TextOptions<TTools extends ToolSet = ToolSet> {\n temperature?: number;\n maxTokens?: number;\n /** Tool definitions for the model to use */\n tools?: TTools;\n /** Tool choice: 'auto' (default), 'required' (force tool use), 'none', or specific tool name */\n toolChoice?: Parameters<typeof generateText>[0][\"toolChoice\"];\n /** Condition to stop tool execution (e.g., stepCountIs(3)) */\n stopWhen?: Parameters<typeof generateText>[0][\"stopWhen\"];\n /** Callback fired when each step completes (for collecting tool results) */\n onStepFinish?: (stepResult: StepResult<TTools>) => Promise<void> | void;\n /** Experimental structured output - use with tools for combined tool calling + structured output */\n experimental_output?: Parameters<\n typeof generateText\n >[0][\"experimental_output\"];\n}\n\nexport interface ObjectOptions<TTools extends ToolSet = ToolSet> {\n temperature?: number;\n maxTokens?: number;\n /** Tool definitions for the model to use */\n tools?: TTools;\n /** Condition to stop tool execution (e.g., stepCountIs(3)) */\n stopWhen?: Parameters<typeof generateText>[0][\"stopWhen\"];\n /** Callback fired when each step completes (for collecting tool results) */\n onStepFinish?: (stepResult: StepResult<TTools>) => Promise<void> | void;\n}\n\nexport interface EmbedOptions {\n taskType?: \"RETRIEVAL_QUERY\" | \"RETRIEVAL_DOCUMENT\" | \"SEMANTIC_SIMILARITY\";\n /** Override the default embedding dimensions (from embedding-config.ts) */\n dimensions?: number;\n}\n\nexport interface StreamOptions {\n temperature?: number;\n maxTokens?: number;\n onChunk?: (chunk: string) => void;\n /** Tool definitions for the model to use */\n tools?: Parameters<typeof streamText>[0][\"tools\"];\n /** Condition to stop tool execution (e.g., stepCountIs(3)) */\n stopWhen?: Parameters<typeof streamText>[0][\"stopWhen\"];\n /** Callback fired when each step completes (for collecting tool results) */\n onStepFinish?: Parameters<typeof streamText>[0][\"onStepFinish\"];\n}\n\n// Multimodal content types for generateText/generateObject\nexport interface MediaPart {\n type: \"file\";\n data: Buffer | Uint8Array | string; // Base64 or binary data\n mediaType: string; // IANA media type (e.g., \"image/png\", \"application/pdf\")\n filename?: string;\n}\n\nexport interface TextPart {\n type: \"text\";\n text: string;\n}\n\nexport type ContentPart = TextPart | MediaPart;\n\n// Input types - can be string (simple) or array of parts (multimodal)\nexport type TextInput = string | ContentPart[];\n\n// Input types for streamText - mirrors AI SDK's flexible input\nexport type StreamTextInput =\n | { prompt: string; messages?: never; system?: string }\n | {\n messages: Parameters<typeof streamText>[0][\"messages\"];\n prompt?: never;\n system?: string;\n };\n\n// =============================================================================\n// High-Level Batch Types (User-Facing API)\n// =============================================================================\n// These types are for the AIHelper.batch() API. They are distinct from the\n// low-level provider types in utils/batch/types.ts which have more fields\n// for internal provider communication.\n\n/** Provider identifier for batch operations */\nexport type AIBatchProvider = \"google\" | \"anthropic\" | \"openai\";\n\n/** @deprecated Use AIBatchProvider instead */\nexport type BatchProvider = AIBatchProvider;\n\n/** A request to be processed in a batch */\nexport interface AIBatchRequest {\n /** Unique identifier for this request (used to match results) */\n id: string;\n /** The prompt to send to the model */\n prompt: string;\n /** Optional Zod schema for structured JSON output */\n schema?: z.ZodTypeAny;\n}\n\n/** @deprecated Use AIBatchRequest instead */\nexport type BatchRequest = AIBatchRequest;\n\n/** Result of a single request in a batch */\nexport interface AIBatchResult<T = string> {\n /** The request ID (matches the id from AIBatchRequest) */\n id: string;\n /** Original prompt (may be empty if not available from provider) */\n prompt: string;\n /** The parsed result (JSON object if schema was provided, otherwise string) */\n result: T;\n /** Input tokens used */\n inputTokens: number;\n /** Output tokens used */\n outputTokens: number;\n /** Status of this individual result */\n status: \"succeeded\" | \"failed\";\n /** Error message if status is \"failed\" */\n error?: string;\n}\n\n/** @deprecated Use AIBatchResult instead */\nexport type BatchResult<T = string> = AIBatchResult<T>;\n\n/** Handle for tracking a submitted batch */\nexport interface AIBatchHandle {\n /** Batch identifier from the provider */\n id: string;\n /** Current status of the batch */\n status: \"pending\" | \"processing\" | \"completed\" | \"failed\";\n /** The provider used for this batch (for resume support) */\n provider?: AIBatchProvider;\n}\n\n/** @deprecated Use AIBatchHandle instead */\nexport type BatchHandle = AIBatchHandle;\n\n/** Interface for batch operations on an AI model */\nexport interface AIBatch<T = string> {\n /** Submit requests for batch processing */\n submit(requests: AIBatchRequest[]): Promise<AIBatchHandle>;\n /** Check the status of a batch */\n getStatus(batchId: string): Promise<AIBatchHandle>;\n /** Retrieve results from a completed batch */\n getResults(\n batchId: string,\n metadata?: Record<string, unknown>,\n ): Promise<AIBatchResult<T>[]>;\n /** Check if results have been recorded for this batch */\n isRecorded(batchId: string): Promise<boolean>;\n /** Record batch results manually when batch provider integration is not implemented */\n recordResults(batchId: string, results: AIBatchResult<T>[]): Promise<void>;\n}\n\nexport interface RecordCallParams {\n modelKey: ModelKey;\n callType: AICallType;\n prompt: string;\n response: string;\n inputTokens: number;\n outputTokens: number;\n metadata?: Record<string, unknown>;\n}\n\nexport interface AIHelperStats {\n totalCalls: number;\n totalInputTokens: number;\n totalOutputTokens: number;\n totalCost: number;\n perModel: Record<\n string,\n { calls: number; inputTokens: number; outputTokens: number; cost: number }\n >;\n}\n\n// ============================================================================\n// AIHelper Interface\n// ============================================================================\n\nexport interface AIHelper {\n /** Current topic path */\n readonly topic: string;\n\n // Core AI Methods\n generateText<TTools extends ToolSet = ToolSet>(\n modelKey: ModelKey,\n prompt: TextInput,\n options?: TextOptions<TTools>,\n ): Promise<AITextResult>;\n\n generateObject<TSchema extends z.ZodTypeAny>(\n modelKey: ModelKey,\n prompt: TextInput,\n schema: TSchema,\n options?: ObjectOptions,\n ): Promise<AIObjectResult<z.infer<TSchema>>>;\n\n embed(\n modelKey: ModelKey,\n text: string | string[],\n options?: EmbedOptions,\n ): Promise<AIEmbedResult>;\n\n streamText(\n modelKey: ModelKey,\n input: StreamTextInput,\n options?: StreamOptions,\n ): AIStreamResult;\n\n // Batch Methods - provider is optional, will auto-detect based on model\n batch<T = string>(modelKey: ModelKey, provider?: AIBatchProvider): AIBatch<T>;\n\n // Hierarchy Methods\n createChild(segment: string, id?: string): AIHelper;\n\n // Manual Recording (new object-based API)\n recordCall(params: RecordCallParams): void;\n\n // Manual Recording (legacy positional API for workflow compatibility)\n recordCall(\n modelKey: ModelKey,\n prompt: string,\n response: string,\n tokens: { input: number; output: number },\n options?: {\n callType?: AICallType;\n isBatch?: boolean;\n metadata?: Record<string, unknown>;\n },\n ): void;\n // Stats (queries DB)\n getStats(): Promise<AIHelperStats>;\n}\n\n// ============================================================================\n// Helper Functions\n// ============================================================================\n\nfunction getModelProvider(modelConfig: ModelConfig) {\n if (modelConfig.provider === \"openrouter\") {\n // strict pricing: don't pay more than the model's defined cost\n // this effectively prevents routing to more expensive providers\n // require_parameters ensures routing only to providers that support\n // the requested features (e.g., json_schema for structured output)\n return openrouter(modelConfig.id, {\n extraBody: {\n provider: {\n sort: \"throughput\",\n require_parameters: true,\n max_price: {\n prompt: modelConfig.inputCostPerMillion,\n completion: modelConfig.outputCostPerMillion,\n },\n },\n },\n });\n }\n return google(modelConfig.id);\n}\n\nfunction calculateCostWithDiscount(\n modelKey: ModelKey,\n inputTokens: number,\n outputTokens: number,\n isBatch: boolean = false,\n): number {\n const model = getModel(modelKey);\n const baseCost = calculateCost(modelKey, inputTokens, outputTokens);\n\n if (isBatch && model.batchDiscountPercent) {\n return baseCost.totalCost * (1 - model.batchDiscountPercent / 100);\n }\n\n return baseCost.totalCost;\n}\n\n// ============================================================================\n// AIHelper Implementation\n// ============================================================================\n\nclass AIHelperImpl implements AIHelper {\n readonly topic: string;\n private readonly aiCallLogger: AICallLogger;\n private readonly logContext?: LogContext;\n private readonly batchLogFn?: BatchLogFn;\n\n constructor(\n topic: string,\n aiCallLogger: AICallLogger,\n logContext?: LogContext,\n ) {\n if (!aiCallLogger) {\n throw new Error(\n \"AIHelperImpl requires a logger. Create one using createPrismaAICallLogger(prisma).\",\n );\n }\n this.topic = topic;\n this.aiCallLogger = aiCallLogger;\n this.logContext = logContext;\n\n // Create batch log function if logContext is provided\n if (logContext) {\n this.batchLogFn = (level, message, meta) => {\n // Fire and forget - don't block on persistence logging\n logContext\n .createLog({\n workflowRunId: logContext.workflowRunId,\n workflowStageId: logContext.stageRecordId,\n level,\n message,\n metadata: meta,\n })\n .catch((err) => logger.error(\"Failed to persist log:\", err));\n // Also log to console for immediate visibility\n logger.debug(`[${level}] ${message}`, meta ? JSON.stringify(meta) : \"\");\n };\n }\n }\n\n async generateText<TTools extends ToolSet = ToolSet>(\n modelKey: ModelKey,\n prompt: TextInput,\n options: TextOptions<TTools> = {} as TextOptions<TTools>,\n ): Promise<AITextResult> {\n const modelConfig = getModel(modelKey);\n const model = getModelProvider(modelConfig);\n const startTime = Date.now();\n\n // Determine if we have multimodal content\n const isMultimodal = Array.isArray(prompt);\n const hasTools = options.tools !== undefined;\n const hasOutputSchema = options.experimental_output !== undefined;\n\n // Extract text prompt for logging (for multimodal, join text parts)\n const promptForLog = isMultimodal\n ? (prompt as ContentPart[])\n .filter((p): p is TextPart => p.type === \"text\")\n .map((p) => p.text)\n .join(\"\\n\") || \"[multimodal content]\"\n : (prompt as string);\n\n // Debug logging\n if (hasTools || hasOutputSchema) {\n logger.debug(\n `generateText config: hasTools=${hasTools}, hasOutputSchema=${hasOutputSchema}, toolNames=${hasTools ? Object.keys(options.tools || {}).join(\", \") : \"none\"}`,\n );\n }\n\n // Create internal wrapper that logs tool usage and then calls user's callback\n const wrappedOnStepFinish = options.onStepFinish\n ? async (stepResult: StepResult<TTools>) => {\n // Log each tool result to a child topic\n if (stepResult.toolResults && Array.isArray(stepResult.toolResults)) {\n for (const toolResult of stepResult.toolResults) {\n const result = toolResult as {\n toolName?: string;\n toolCallId?: string;\n input?: unknown;\n output?: unknown;\n };\n if (result.toolName) {\n const childTopic = `${this.topic}.tool.${result.toolName}`;\n this.aiCallLogger.logCall({\n topic: childTopic,\n callType: \"text\",\n modelKey: modelKey,\n modelId: modelConfig.id,\n prompt: JSON.stringify(result.input ?? {}, null, 2),\n response: JSON.stringify(result.output ?? {}, null, 2),\n inputTokens: stepResult.usage.inputTokens || 0,\n outputTokens: stepResult.usage.outputTokens || 0,\n cost: calculateCostWithDiscount(\n modelKey,\n stepResult.usage.inputTokens || 0,\n stepResult.usage.outputTokens || 0,\n ),\n metadata: {\n toolName: result.toolName,\n toolCallId: result.toolCallId,\n finishReason: stepResult.finishReason,\n },\n });\n }\n }\n }\n // Call user's callback\n await options.onStepFinish?.(stepResult);\n }\n : undefined;\n\n // Build request based on input type\n const baseOptions = {\n model,\n temperature: options.temperature ?? 0.7,\n maxOutputTokens: options.maxTokens,\n // Tool-related options (only included if tools are provided)\n ...(hasTools && {\n tools: options.tools,\n // Cast to any because TTools generic doesn't match NoInfer<ToolSet> at compile time\n toolChoice: options.toolChoice as Parameters<\n typeof generateText\n >[0][\"toolChoice\"],\n stopWhen: options.stopWhen,\n onStepFinish: wrappedOnStepFinish as Parameters<\n typeof generateText\n >[0][\"onStepFinish\"],\n }),\n // Experimental structured output (for tools + schema)\n ...(hasOutputSchema && {\n experimental_output: options.experimental_output,\n }),\n };\n\n // Trace log before AI call\n logger.debug(`generateText request`, {\n model: modelKey,\n modelId: modelConfig.id,\n prompt:\n promptForLog.substring(0, 500) +\n (promptForLog.length > 500 ? \"...\" : \"\"),\n temperature: options.temperature ?? 0.7,\n maxTokens: options.maxTokens,\n hasTools,\n hasOutputSchema,\n isMultimodal,\n });\n\n try {\n // Cast to 'as any' to bypass AI SDK's strict NoInfer<TTools> constraints\n // Our TextOptions<TTools> provides proper typing at the interface level\n const result = isMultimodal\n ? await generateText({\n ...baseOptions,\n messages: [\n {\n role: \"user\" as const,\n content: (prompt as ContentPart[]).map((part) =>\n part.type === \"text\"\n ? { type: \"text\" as const, text: part.text }\n : {\n type: \"file\" as const,\n data: part.data,\n mediaType: part.mediaType,\n ...(part.filename && { filename: part.filename }),\n },\n ),\n },\n ],\n } as any)\n : await generateText({\n ...baseOptions,\n prompt,\n } as any);\n\n // Debug logging for result\n if (hasTools || hasOutputSchema) {\n const resultAny = result as { steps?: unknown[]; output?: unknown };\n logger.debug(\n `generateText result: stepsCount=${resultAny.steps?.length ?? 0}, hasOutput=${resultAny.output !== undefined}, finishReason=${result.finishReason}`,\n );\n }\n\n const inputTokens = result.usage?.inputTokens ?? 0;\n const outputTokens = result.usage?.outputTokens ?? 0;\n const cost = calculateCostWithDiscount(\n modelKey,\n inputTokens,\n outputTokens,\n );\n const durationMs = Date.now() - startTime;\n\n // Log the call (including error cases where finishReason is \"error\")\n this.aiCallLogger.logCall({\n topic: this.topic,\n callType: \"text\",\n modelKey,\n modelId: modelConfig.id,\n prompt: promptForLog,\n response: result.text,\n inputTokens,\n outputTokens,\n cost,\n metadata: {\n temperature: options.temperature,\n maxTokens: options.maxTokens,\n finishReason: result.finishReason,\n durationMs,\n isMultimodal,\n ...(result.finishReason === \"error\" && { status: \"error\" }),\n ...(isMultimodal && {\n mediaTypes: (prompt as ContentPart[])\n .filter((p): p is MediaPart => p.type === \"file\")\n .map((p) => p.mediaType),\n }),\n },\n });\n\n // Trace log after successful AI call\n logger.debug(`generateText response`, {\n model: modelKey,\n response:\n result.text.substring(0, 500) +\n (result.text.length > 500 ? \"...\" : \"\"),\n inputTokens,\n outputTokens,\n cost: cost.toFixed(6),\n durationMs,\n finishReason: result.finishReason,\n });\n\n return {\n text: result.text,\n inputTokens,\n outputTokens,\n cost,\n // Include structured output if experimental_output was used\n ...(hasOutputSchema && {\n output: (result as { output?: unknown }).output,\n }),\n };\n } catch (error) {\n // Log the failed call before re-throwing\n const durationMs = Date.now() - startTime;\n const errorMessage =\n error instanceof Error ? error.message : String(error);\n\n // Trace log for error\n logger.error(`generateText error`, {\n model: modelKey,\n error: errorMessage,\n durationMs,\n });\n\n this.aiCallLogger.logCall({\n topic: this.topic,\n callType: \"text\",\n modelKey,\n modelId: modelConfig.id,\n prompt: promptForLog,\n response: \"\",\n inputTokens: 0,\n outputTokens: 0,\n cost: 0,\n metadata: {\n temperature: options.temperature,\n maxTokens: options.maxTokens,\n finishReason: \"error\",\n durationMs,\n isMultimodal,\n status: \"error\",\n error: errorMessage,\n },\n });\n\n throw error;\n }\n }\n\n async generateObject<TSchema extends z.ZodTypeAny>(\n modelKey: ModelKey,\n prompt: TextInput,\n schema: TSchema,\n options: ObjectOptions = {},\n ): Promise<AIObjectResult<z.infer<TSchema>>> {\n const modelConfig = getModel(modelKey);\n const model = getModelProvider(modelConfig);\n const startTime = Date.now();\n\n // Determine if we have multimodal content\n const isMultimodal = Array.isArray(prompt);\n const hasTools = options.tools !== undefined;\n\n // Extract text prompt for logging (for multimodal, join text parts)\n const promptForLog = isMultimodal\n ? (prompt as ContentPart[])\n .filter((p): p is TextPart => p.type === \"text\")\n .map((p) => p.text)\n .join(\"\\n\") || \"[multimodal content]\"\n : (prompt as string);\n\n // Build request using AI SDK v6 pattern: generateText with Output.object()\n // This replaces the deprecated generateObject() and has better provider compatibility\n const baseOptions = {\n model,\n output: Output.object({ schema }),\n temperature: options.temperature ?? 0,\n maxOutputTokens: options.maxTokens,\n // Tool-related options (only included if tools are provided)\n ...(hasTools && {\n tools: options.tools,\n stopWhen: options.stopWhen,\n onStepFinish: options.onStepFinish,\n }),\n };\n\n // Trace log before AI call\n logger.debug(`generateObject request`, {\n model: modelKey,\n modelId: modelConfig.id,\n prompt:\n promptForLog.substring(0, 500) +\n (promptForLog.length > 500 ? \"...\" : \"\"),\n temperature: options.temperature ?? 0,\n maxTokens: options.maxTokens,\n hasTools,\n isMultimodal,\n });\n\n try {\n const result = isMultimodal\n ? await generateText({\n ...baseOptions,\n messages: [\n {\n role: \"user\" as const,\n content: (prompt as ContentPart[]).map((part) =>\n part.type === \"text\"\n ? { type: \"text\" as const, text: part.text }\n : {\n type: \"file\" as const,\n data: part.data,\n mediaType: part.mediaType,\n ...(part.filename && { filename: part.filename }),\n },\n ),\n },\n ],\n })\n : await generateText({\n ...baseOptions,\n prompt,\n });\n\n const inputTokens = result.usage?.inputTokens ?? 0;\n const outputTokens = result.usage?.outputTokens ?? 0;\n const cost = calculateCostWithDiscount(\n modelKey,\n inputTokens,\n outputTokens,\n );\n const durationMs = Date.now() - startTime;\n\n // Log the call (including error cases where finishReason is \"error\")\n this.aiCallLogger.logCall({\n topic: this.topic,\n callType: \"object\",\n modelKey,\n modelId: modelConfig.id,\n prompt: promptForLog,\n response: JSON.stringify(result.output, null, 2),\n inputTokens,\n outputTokens,\n cost,\n metadata: {\n temperature: options.temperature,\n maxTokens: options.maxTokens,\n finishReason: result.finishReason,\n durationMs,\n isMultimodal,\n ...(result.finishReason === \"error\" && { status: \"error\" }),\n ...(isMultimodal && {\n mediaTypes: (prompt as ContentPart[])\n .filter((p): p is MediaPart => p.type === \"file\")\n .map((p) => p.mediaType),\n }),\n },\n });\n\n // Trace log after successful AI call\n const responseStr = JSON.stringify(result.output);\n logger.debug(`generateObject response`, {\n model: modelKey,\n response:\n responseStr.substring(0, 500) +\n (responseStr.length > 500 ? \"...\" : \"\"),\n inputTokens,\n outputTokens,\n cost: cost.toFixed(6),\n durationMs,\n finishReason: result.finishReason,\n });\n\n return {\n object: result.output as z.infer<TSchema>,\n inputTokens,\n outputTokens,\n cost,\n };\n } catch (error) {\n // Log the failed call before re-throwing\n const durationMs = Date.now() - startTime;\n const errorMessage =\n error instanceof Error ? error.message : String(error);\n\n // Trace log for error\n logger.error(`generateObject error`, {\n model: modelKey,\n error: errorMessage,\n durationMs,\n });\n\n this.aiCallLogger.logCall({\n topic: this.topic,\n callType: \"object\",\n modelKey,\n modelId: modelConfig.id,\n prompt: promptForLog,\n response: \"\",\n inputTokens: 0,\n outputTokens: 0,\n cost: 0,\n metadata: {\n temperature: options.temperature,\n maxTokens: options.maxTokens,\n finishReason: \"error\",\n durationMs,\n isMultimodal,\n status: \"error\",\n error: errorMessage,\n },\n });\n\n throw error;\n }\n }\n\n async embed(\n modelKey: ModelKey,\n text: string | string[],\n options: EmbedOptions = {},\n ): Promise<AIEmbedResult> {\n const modelConfig = getModel(modelKey);\n const texts = Array.isArray(text) ? text : [text];\n const startTime = Date.now();\n\n // Use dimensions from options, or fall back to active config\n const dimensions = options.dimensions ?? DEFAULT_EMBEDDING_DIMENSIONS;\n\n // Trace log before embed call\n const textPreview =\n texts.length === 1\n ? texts[0].substring(0, 200) + (texts[0].length > 200 ? \"...\" : \"\")\n : `[${texts.length} texts]`;\n logger.debug(`embed request`, {\n model: modelKey,\n modelId: modelConfig.id,\n textCount: texts.length,\n textPreview,\n dimensions,\n taskType: options.taskType ?? \"RETRIEVAL_DOCUMENT\",\n });\n\n try {\n // For single text, use embed directly\n // For multiple texts, we need to call embed for each (AI SDK doesn't have batch embed)\n const embeddings: number[][] = [];\n let totalInputTokens = 0;\n\n for (const t of texts) {\n // Strip google/ prefix if present - Google SDK expects just the model name\n const embeddingModelId = modelConfig.id.replace(/^google\\//, \"\");\n\n const result = await embed({\n model: google.embeddingModel(embeddingModelId),\n value: t,\n providerOptions: {\n google: {\n outputDimensionality: dimensions,\n taskType: options.taskType ?? \"RETRIEVAL_DOCUMENT\",\n },\n },\n });\n\n embeddings.push(result.embedding);\n totalInputTokens += result.usage?.tokens || 0;\n }\n\n const outputTokens = 0; // Embeddings have no output tokens\n const cost = calculateCostWithDiscount(\n modelKey,\n totalInputTokens,\n outputTokens,\n );\n const durationMs = Date.now() - startTime;\n\n this.aiCallLogger.logCall({\n topic: this.topic,\n callType: \"embed\",\n modelKey,\n modelId: modelConfig.id,\n prompt: texts.length === 1 ? texts[0] : `[${texts.length} texts]`,\n response: `[${embeddings.length} embeddings, ${dimensions} dims]`,\n inputTokens: totalInputTokens,\n outputTokens,\n cost,\n metadata: {\n taskType: options.taskType,\n textCount: texts.length,\n dimensions,\n durationMs,\n },\n });\n\n // Trace log after successful embed call\n logger.debug(`embed response`, {\n model: modelKey,\n embeddingsCount: embeddings.length,\n dimensions,\n inputTokens: totalInputTokens,\n cost: cost.toFixed(6),\n durationMs,\n });\n\n return {\n embedding: embeddings[0], // Convenience: first embedding\n embeddings, // All embeddings\n dimensions, // Dimensionality used\n inputTokens: totalInputTokens,\n cost,\n };\n } catch (error) {\n // Log the failed call before re-throwing\n const durationMs = Date.now() - startTime;\n const errorMessage =\n error instanceof Error ? error.message : String(error);\n\n // Trace log for error\n logger.error(`embed error`, {\n model: modelKey,\n error: errorMessage,\n durationMs,\n });\n\n this.aiCallLogger.logCall({\n topic: this.topic,\n callType: \"embed\",\n modelKey,\n modelId: modelConfig.id,\n prompt: texts.length === 1 ? texts[0] : `[${texts.length} texts]`,\n response: \"\",\n inputTokens: 0,\n outputTokens: 0,\n cost: 0,\n metadata: {\n taskType: options.taskType,\n textCount: texts.length,\n dimensions,\n durationMs,\n status: \"error\",\n error: errorMessage,\n },\n });\n\n throw error;\n }\n }\n\n streamText(\n modelKey: ModelKey,\n input: StreamTextInput,\n options: StreamOptions = {},\n ): AIStreamResult {\n const modelConfig = getModel(modelKey);\n const model = getModelProvider(modelConfig);\n const startTime = Date.now();\n const hasTools = options.tools !== undefined;\n\n // For logging, extract prompt string\n const promptForLog =\n \"prompt\" in input && input.prompt\n ? input.prompt\n : JSON.stringify(input.messages);\n\n // Track whether we've logged an error (to avoid duplicate logs)\n let errorLogged = false;\n\n // Error handler that logs the error to DB\n const logError = (error: unknown) => {\n if (errorLogged) return;\n errorLogged = true;\n\n const durationMs = Date.now() - startTime;\n const errorMessage =\n error instanceof Error ? error.message : String(error);\n\n this.aiCallLogger.logCall({\n topic: this.topic,\n callType: \"stream\",\n modelKey,\n modelId: modelConfig.id,\n prompt: promptForLog,\n response: \"\",\n inputTokens: 0,\n outputTokens: 0,\n cost: 0,\n metadata: {\n temperature: options.temperature,\n maxTokens: options.maxTokens,\n durationMs,\n status: \"error\",\n error: errorMessage,\n ...(input.system ? { system: input.system } : {}),\n },\n });\n };\n\n // Trace log before stream starts\n logger.debug(`streamText request`, {\n model: modelKey,\n modelId: modelConfig.id,\n prompt:\n promptForLog.substring(0, 500) +\n (promptForLog.length > 500 ? \"...\" : \"\"),\n temperature: options.temperature ?? 0.7,\n maxTokens: options.maxTokens,\n hasTools,\n hasSystem: !!input.system,\n });\n\n // Build the streamText params based on input type\n const baseParams = {\n model,\n temperature: options.temperature ?? 0.7,\n maxOutputTokens: options.maxTokens,\n ...(input.system ? { system: input.system } : {}),\n // Tool-related options (only included if tools are provided)\n ...(hasTools && {\n tools: options.tools,\n stopWhen: options.stopWhen,\n onStepFinish: options.onStepFinish,\n }),\n // Error callback to log streaming errors\n onError: ({ error }: { error: unknown }) => {\n logError(error);\n },\n };\n\n const result =\n \"messages\" in input && input.messages\n ? streamText({ ...baseParams, messages: input.messages })\n : streamText({\n ...baseParams,\n prompt: (input as { prompt: string }).prompt,\n });\n\n let fullText = \"\";\n let chunkCount = 0;\n let streamConsumed = false;\n let usageResolved = false;\n let cachedUsage: {\n inputTokens: number;\n outputTokens: number;\n cost: number;\n } | null = null;\n\n // Create async iterable that collects text and calls onChunk\n const streamIterable: AsyncIterable<string> = {\n [Symbol.asyncIterator]: () => {\n const reader = result.textStream[Symbol.asyncIterator]();\n return {\n async next() {\n try {\n const { done, value } = await reader.next();\n if (done) {\n streamConsumed = true;\n return { done: true, value: undefined };\n }\n fullText += value;\n chunkCount++;\n options.onChunk?.(value);\n return { done: false, value };\n } catch (error) {\n // Log streaming error before re-throwing\n logError(error);\n throw error;\n }\n },\n };\n },\n };\n\n // Create usage getter that waits for stream completion and persists\n const getUsage = async () => {\n // Return cached usage if already resolved\n if (usageResolved && cachedUsage) {\n return cachedUsage;\n }\n\n // If stream not yet consumed, consume it first\n if (!streamConsumed) {\n for await (const _ of streamIterable) {\n // Consume the stream\n }\n }\n\n const usage = await result.usage;\n const inputTokens = usage?.inputTokens ?? 0;\n const outputTokens = usage?.outputTokens ?? 0;\n const cost = calculateCostWithDiscount(\n modelKey,\n inputTokens,\n outputTokens,\n );\n const durationMs = Date.now() - startTime;\n\n // Only persist once\n if (!usageResolved) {\n usageResolved = true;\n cachedUsage = { inputTokens, outputTokens, cost };\n\n // Trace log after stream completion\n logger.debug(`streamText response`, {\n model: modelKey,\n response:\n fullText.substring(0, 500) + (fullText.length > 500 ? \"...\" : \"\"),\n inputTokens,\n outputTokens,\n cost: cost.toFixed(6),\n durationMs,\n chunkCount,\n });\n\n // Persist to DB\n this.aiCallLogger.logCall({\n topic: this.topic,\n callType: \"stream\",\n modelKey,\n modelId: modelConfig.id,\n prompt: promptForLog,\n response: fullText,\n inputTokens,\n outputTokens,\n cost,\n metadata: {\n temperature: options.temperature,\n maxTokens: options.maxTokens,\n streamChunks: chunkCount,\n durationMs,\n ...(input.system ? { system: input.system } : {}),\n },\n });\n }\n\n return cachedUsage ?? { inputTokens, outputTokens, cost };\n };\n\n return {\n stream: streamIterable,\n getUsage,\n rawResult: result,\n };\n }\n\n batch<T = string>(\n modelKey: ModelKey,\n provider?: AIBatchProvider,\n ): AIBatch<T> {\n const resolvedProvider =\n provider ?? getBestProviderForModel(modelKey) ?? \"google\";\n return new AIBatchImpl<T>(\n this,\n modelKey,\n resolvedProvider,\n this.batchLogFn,\n );\n }\n\n createChild(segment: string, id?: string): AIHelper {\n const newTopic = id\n ? `${this.topic}.${segment}.${id}`\n : `${this.topic}.${segment}`;\n // Preserve logContext for child helpers (same workflow context)\n return new AIHelperImpl(newTopic, this.aiCallLogger, this.logContext);\n }\n\n /** @internal Get the logger for batch operations */\n getLogger(): AICallLogger {\n return this.aiCallLogger;\n }\n\n // Overloaded recordCall to support both new object-based API and legacy positional API\n recordCall(\n paramsOrModelKey: RecordCallParams | ModelKey,\n prompt?: string,\n response?: string,\n tokens?: { input: number; output: number },\n options?: {\n callType?: AICallType;\n isBatch?: boolean;\n metadata?: Record<string, unknown>;\n },\n ): void {\n let modelKey: ModelKey;\n let actualPrompt: string;\n let actualResponse: string;\n let inputTokens: number;\n let outputTokens: number;\n let callType: AICallType;\n let isBatch: boolean;\n let metadata: Record<string, unknown> | undefined;\n\n // Check if first argument is the new object-based params or legacy modelKey string\n if (\n typeof paramsOrModelKey === \"object\" &&\n \"modelKey\" in paramsOrModelKey\n ) {\n // New object-based API\n const params = paramsOrModelKey as RecordCallParams;\n modelKey = params.modelKey;\n actualPrompt = params.prompt;\n actualResponse = params.response;\n inputTokens = params.inputTokens;\n outputTokens = params.outputTokens;\n callType = params.callType;\n isBatch = callType === \"batch\";\n metadata = params.metadata;\n } else {\n // Legacy positional API\n if (!prompt || !response || !tokens) {\n throw new Error(\n \"recordCall: legacy API requires prompt, response, and tokens\",\n );\n }\n modelKey = paramsOrModelKey as ModelKey;\n actualPrompt = prompt;\n actualResponse = response;\n inputTokens = tokens.input;\n outputTokens = tokens.output;\n callType = options?.callType ?? \"text\";\n isBatch = options?.isBatch ?? false;\n metadata = options?.metadata;\n }\n\n const modelConfig = getModel(modelKey);\n const cost = calculateCostWithDiscount(\n modelKey,\n inputTokens,\n outputTokens,\n isBatch,\n );\n\n // Persist to DB (fire and forget)\n this.aiCallLogger.logCall({\n topic: this.topic,\n callType,\n modelKey,\n modelId: modelConfig.id,\n prompt: actualPrompt,\n response: actualResponse,\n inputTokens,\n outputTokens,\n cost,\n metadata: isBatch ? { ...metadata, isBatch: true } : metadata,\n });\n }\n\n async getStats(): Promise<AIHelperStats> {\n return this.aiCallLogger.getStats(this.topic);\n }\n}\n\n// ============================================================================\n// Batch Implementation\n// ============================================================================\n\nclass AIBatchImpl<T = string> implements AIBatch<T> {\n private googleProvider: GoogleBatchProvider | null = null;\n private anthropicProvider: AnthropicBatchProvider | null = null;\n private openaiProvider: OpenAIBatchProvider | null = null;\n\n constructor(\n private helper: AIHelperImpl,\n private modelKey: ModelKey,\n private provider: AIBatchProvider,\n private batchLogFn?: BatchLogFn,\n ) {\n // Create a logger adapter for batch providers\n // Uses provided log function (for persistence) or falls back to console\n const batchLogger = {\n log: (\n level: \"DEBUG\" | \"INFO\" | \"WARN\" | \"ERROR\",\n message: string,\n meta?: Record<string, unknown>,\n ) => {\n if (this.batchLogFn) {\n this.batchLogFn(level, `[Batch:${provider}] ${message}`, meta);\n } else {\n logger.debug(\n `[Batch:${provider}] [${level}] ${message}`,\n meta ? JSON.stringify(meta) : \"\",\n );\n }\n },\n };\n\n // Initialize the actual provider with logger\n if (provider === \"google\") {\n this.googleProvider = new GoogleBatchProvider({}, batchLogger);\n } else if (provider === \"anthropic\") {\n this.anthropicProvider = new AnthropicBatchProvider({}, batchLogger);\n } else if (provider === \"openai\") {\n this.openaiProvider = new OpenAIBatchProvider({}, batchLogger);\n }\n }\n\n async submit(requests: AIBatchRequest[]): Promise<AIBatchHandle> {\n // Trace log before batch submission\n logger.debug(`batch submit request`, {\n provider: this.provider,\n model: this.modelKey,\n requestCount: requests.length,\n requestIds: requests.slice(0, 10).map((r) => r.id),\n hasMoreRequests: requests.length > 10,\n });\n\n const jsonSystemPrompt =\n \"You must respond with valid JSON only. No markdown, no explanation, just the JSON object.\";\n\n if (this.provider === \"google\" && this.googleProvider) {\n const googleRequests = requests.map((req) => ({\n id: req.id,\n prompt: req.prompt,\n model: this.modelKey,\n ...(req.schema && { system: jsonSystemPrompt, schema: req.schema }),\n }));\n const handle = await this.googleProvider.submit(googleRequests);\n logger.debug(`batch submitted`, {\n provider: \"google\",\n batchId: handle.id,\n requestCount: requests.length,\n });\n return { id: handle.id, status: \"pending\", provider: this.provider };\n }\n\n if (this.provider === \"anthropic\" && this.anthropicProvider) {\n const anthropicRequests = requests.map((req) => ({\n customId: req.id,\n prompt: req.prompt,\n model: this.modelKey,\n ...(req.schema && { system: jsonSystemPrompt }),\n }));\n const handle = await this.anthropicProvider.submit(anthropicRequests);\n logger.debug(`batch submitted`, {\n provider: \"anthropic\",\n batchId: handle.id,\n requestCount: requests.length,\n });\n return { id: handle.id, status: \"pending\", provider: this.provider };\n }\n\n if (this.provider === \"openai\" && this.openaiProvider) {\n const openaiRequests = requests.map((req) => ({\n customId: req.id,\n prompt: req.prompt,\n model: this.modelKey,\n ...(req.schema && { system: jsonSystemPrompt }),\n }));\n const handle = await this.openaiProvider.submit(openaiRequests);\n logger.debug(`batch submitted`, {\n provider: \"openai\",\n batchId: handle.id,\n requestCount: requests.length,\n });\n return { id: handle.id, status: \"pending\", provider: this.provider };\n }\n\n throw new Error(\n `Batch submission for provider \"${this.provider}\" not yet implemented. ` +\n `Use recordCall() to manually record batch results.`,\n );\n }\n\n async getStatus(batchId: string): Promise<AIBatchHandle> {\n const handle = {\n id: batchId,\n provider: this.provider,\n requestCount: 0,\n createdAt: new Date(),\n };\n let status: { state: string };\n\n if (this.provider === \"google\" && this.googleProvider) {\n status = await this.googleProvider.checkStatus(handle);\n } else if (this.provider === \"anthropic\" && this.anthropicProvider) {\n status = await this.anthropicProvider.checkStatus(handle);\n } else if (this.provider === \"openai\" && this.openaiProvider) {\n status = await this.openaiProvider.checkStatus(handle);\n } else {\n throw new Error(\n `Batch status check for provider \"${this.provider}\" not yet implemented.`,\n );\n }\n\n let batchStatus: \"pending\" | \"processing\" | \"completed\" | \"failed\";\n switch (status.state) {\n case \"completed\":\n batchStatus = \"completed\";\n break;\n case \"failed\":\n batchStatus = \"failed\";\n break;\n case \"processing\":\n batchStatus = \"processing\";\n break;\n default:\n batchStatus = \"pending\";\n }\n\n return { id: batchId, status: batchStatus, provider: this.provider };\n }\n\n async getResults(\n batchId: string,\n metadata?: Record<string, unknown>,\n ): Promise<AIBatchResult<T>[]> {\n // Debug: Log received metadata to trace customIds flow\n if (this.batchLogFn) {\n this.batchLogFn(\"DEBUG\", `[AIBatch:getResults] Received metadata`, {\n hasMetadata: !!metadata,\n metadataKeys: metadata ? Object.keys(metadata) : [],\n hasCustomIds: !!metadata?.customIds,\n customIdsCount: Array.isArray(metadata?.customIds)\n ? metadata.customIds.length\n : 0,\n });\n }\n\n const handle = {\n id: batchId,\n provider: this.provider,\n requestCount: 0,\n createdAt: new Date(),\n metadata,\n };\n let rawResults: Array<{\n customId?: string;\n text: string;\n inputTokens: number;\n outputTokens: number;\n error?: string;\n }>;\n\n if (this.provider === \"google\" && this.googleProvider) {\n // Pass metadata for customId mapping\n rawResults = await this.googleProvider.getResults(handle);\n } else if (this.provider === \"anthropic\" && this.anthropicProvider) {\n rawResults = await this.anthropicProvider.getResults(handle);\n } else if (this.provider === \"openai\" && this.openaiProvider) {\n rawResults = await this.openaiProvider.getResults(handle);\n } else {\n throw new Error(\n `Batch results for provider \"${this.provider}\" not yet implemented. ` +\n `Use recordCall() to manually record batch results.`,\n );\n }\n\n // Trace log after results retrieved\n const totalInputTokens = rawResults.reduce(\n (sum, r) => sum + (r.inputTokens || 0),\n 0,\n );\n const totalOutputTokens = rawResults.reduce(\n (sum, r) => sum + (r.outputTokens || 0),\n 0,\n );\n const failedCount = rawResults.filter((r) => r.error).length;\n logger.debug(`batch getResults response`, {\n batchId,\n provider: this.provider,\n resultCount: rawResults.length,\n failedCount,\n totalInputTokens,\n totalOutputTokens,\n });\n\n // Transform RawBatchResult to AIBatchResult<T>\n const results: AIBatchResult<T>[] = rawResults.map((raw, index) => {\n // Check if this request failed\n if (raw.error) {\n return {\n id: raw.customId || `result-${index}`,\n prompt: \"\", // Not available from raw results\n result: {} as T, // Empty result for failed requests\n inputTokens: raw.inputTokens || 0,\n outputTokens: raw.outputTokens || 0,\n status: \"failed\" as const,\n error: raw.error,\n };\n }\n\n // Try to parse JSON if the result looks like JSON\n let parsedResult: T;\n try {\n // Clean markdown code blocks before parsing\n let cleaned = raw.text.trim();\n if (cleaned.startsWith(\"```json\")) {\n cleaned = cleaned.slice(7);\n } else if (cleaned.startsWith(\"```\")) {\n cleaned = cleaned.slice(3);\n }\n if (cleaned.endsWith(\"```\")) {\n cleaned = cleaned.slice(0, -3);\n }\n cleaned = cleaned.trim();\n parsedResult = JSON.parse(cleaned) as T;\n } catch {\n parsedResult = raw.text as unknown as T;\n }\n\n return {\n id: raw.customId || `result-${index}`,\n prompt: \"\", // Not available from raw results\n result: parsedResult,\n inputTokens: raw.inputTokens || 0,\n outputTokens: raw.outputTokens || 0,\n status: \"succeeded\" as const,\n };\n });\n\n // Auto-record results\n await this.recordResults(batchId, results);\n\n return results;\n }\n\n async isRecorded(batchId: string): Promise<boolean> {\n return this.helper.getLogger().isRecorded(batchId);\n }\n\n /**\n * Record batch results manually.\n * Use this when batch provider integration is not yet implemented.\n */\n async recordResults(\n batchId: string,\n results: AIBatchResult<T>[],\n ): Promise<void> {\n // Check if already recorded\n if (await this.isRecorded(batchId)) {\n logger.debug(`Batch ${batchId} already recorded, skipping.`);\n return;\n }\n\n const modelConfig = getModel(this.modelKey);\n const discountPercent = modelConfig.batchDiscountPercent ?? 0;\n\n // Record all results via logger\n await this.helper.getLogger().logBatchResults(\n batchId,\n results.map((r) => {\n const baseCost = calculateCost(\n this.modelKey,\n r.inputTokens,\n r.outputTokens,\n );\n const cost = baseCost.totalCost * (1 - discountPercent / 100);\n\n return {\n topic: this.helper.topic,\n callType: \"batch\",\n modelKey: this.modelKey,\n modelId: modelConfig.id,\n prompt: r.prompt,\n response:\n typeof r.result === \"string\" ? r.result : JSON.stringify(r.result),\n inputTokens: r.inputTokens,\n outputTokens: r.outputTokens,\n cost,\n metadata: { batchId, requestId: r.id },\n };\n }),\n );\n }\n}\n\n// ============================================================================\n// Factory Function\n// ============================================================================\n\n/**\n * Create an AI helper instance with topic-based tracking.\n *\n * @param topic - Initial topic path (e.g., \"workflow.abc123\" or \"reranker\")\n * @returns AIHelper instance\n *\n * @example\n * ```typescript\n * // Simple topic\n * const ai = createAIHelper(\"reranker\");\n *\n * // Hierarchical topic\n * const ai = createAIHelper(\"workflow.abc123\", logger)\n * .createChild(\"stage\", \"extraction\");\n * // topic: \"workflow.abc123.stage.extraction\"\n *\n * // Use AI methods\n * const result = await ai.generateText(\"gemini-2.5-flash\", prompt);\n * ```\n */\nexport function createAIHelper(\n topic: string,\n logger: AICallLogger,\n logContext?: LogContext,\n): AIHelper {\n return new AIHelperImpl(topic, logger, logContext);\n}\n\n// Re-export types from this module\nexport type { ModelKey } from \"./model-helper\";\n"]}
@@ -0,0 +1,17 @@
1
+ // src/persistence/interface.ts
2
+ var StaleVersionError = class extends Error {
3
+ constructor(entity, id, expected, actual) {
4
+ super(
5
+ `Stale version on ${entity} ${id}: expected ${expected}, got ${actual}`
6
+ );
7
+ this.entity = entity;
8
+ this.id = id;
9
+ this.expected = expected;
10
+ this.actual = actual;
11
+ this.name = "StaleVersionError";
12
+ }
13
+ };
14
+
15
+ export { StaleVersionError };
16
+ //# sourceMappingURL=chunk-SPXBCZLB.js.map
17
+ //# sourceMappingURL=chunk-SPXBCZLB.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/persistence/interface.ts"],"names":[],"mappings":";AAkDO,IAAM,iBAAA,GAAN,cAAgC,KAAA,CAAM;AAAA,EAC3C,WAAA,CACkB,MAAA,EACA,EAAA,EACA,QAAA,EACA,MAAA,EAChB;AACA,IAAA,KAAA;AAAA,MACE,oBAAoB,MAAM,CAAA,CAAA,EAAI,EAAE,CAAA,WAAA,EAAc,QAAQ,SAAS,MAAM,CAAA;AAAA,KACvE;AAPgB,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AACA,IAAA,IAAA,CAAA,EAAA,GAAA,EAAA;AACA,IAAA,IAAA,CAAA,QAAA,GAAA,QAAA;AACA,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AAKhB,IAAA,IAAA,CAAK,IAAA,GAAO,mBAAA;AAAA,EACd;AACF","file":"chunk-SPXBCZLB.js","sourcesContent":["/**\n * Persistence Interfaces for Workflow Engine\n *\n * These interfaces abstract database operations to enable:\n * - Testing with mock implementations\n * - Future extraction into @bratsos/workflow-engine package\n * - Alternative database backends\n *\n * Implementations:\n * - PrismaWorkflowPersistence (default, in ./prisma/)\n * - InMemoryPersistence (for testing)\n */\n\n// ============================================================================\n// Unified Status Type\n// ============================================================================\n\n/**\n * Unified status type for workflows, stages, and jobs.\n *\n * - PENDING: Not started yet\n * - RUNNING: Currently executing\n * - SUSPENDED: Paused, waiting for external event (e.g., batch job completion)\n * - COMPLETED: Finished successfully\n * - FAILED: Finished with error\n * - CANCELLED: Manually stopped by user\n * - SKIPPED: Stage-specific - bypassed due to condition\n */\nexport type Status =\n | \"PENDING\"\n | \"RUNNING\"\n | \"SUSPENDED\"\n | \"COMPLETED\"\n | \"FAILED\"\n | \"CANCELLED\"\n | \"SKIPPED\";\n\n/** @deprecated Use Status instead */\nexport type WorkflowStatus = Status;\n\n/** @deprecated Use Status instead */\nexport type WorkflowStageStatus = Status;\n\n/** @deprecated Use Status instead. Note: PROCESSING is now RUNNING. */\nexport type JobStatus = Status;\n\nexport type LogLevel = \"DEBUG\" | \"INFO\" | \"WARN\" | \"ERROR\";\n\nexport type ArtifactType = \"STAGE_OUTPUT\" | \"ARTIFACT\" | \"METADATA\";\n\nexport class StaleVersionError extends Error {\n constructor(\n public readonly entity: string,\n public readonly id: string,\n public readonly expected: number,\n public readonly actual: number,\n ) {\n super(\n `Stale version on ${entity} ${id}: expected ${expected}, got ${actual}`,\n );\n this.name = \"StaleVersionError\";\n }\n}\n\n// ============================================================================\n// Record Types (minimal fields needed by the workflow engine)\n// ============================================================================\n\nexport interface WorkflowRunRecord {\n id: string;\n createdAt: Date;\n updatedAt: Date;\n version: number;\n workflowId: string;\n workflowName: string;\n workflowType: string;\n status: WorkflowStatus;\n startedAt: Date | null;\n completedAt: Date | null;\n duration: number | null;\n input: unknown;\n output: unknown | null;\n config: unknown;\n totalCost: number;\n totalTokens: number;\n priority: number;\n}\n\nexport interface WorkflowStageRecord {\n id: string;\n createdAt: Date;\n updatedAt: Date;\n version: number;\n workflowRunId: string;\n stageId: string;\n stageName: string;\n stageNumber: number;\n executionGroup: number;\n status: WorkflowStageStatus;\n startedAt: Date | null;\n completedAt: Date | null;\n duration: number | null;\n inputData: unknown | null;\n outputData: unknown | null;\n config: unknown | null;\n suspendedState: unknown | null;\n resumeData: unknown | null;\n nextPollAt: Date | null;\n pollInterval: number | null;\n maxWaitUntil: Date | null;\n metrics: unknown | null;\n embeddingInfo: unknown | null;\n errorMessage: string | null;\n}\n\nexport interface WorkflowLogRecord {\n id: string;\n createdAt: Date;\n workflowStageId: string | null;\n workflowRunId: string | null;\n level: LogLevel;\n message: string;\n metadata: unknown | null;\n}\n\nexport interface WorkflowArtifactRecord {\n id: string;\n createdAt: Date;\n updatedAt: Date;\n workflowRunId: string;\n workflowStageId: string | null;\n key: string;\n type: ArtifactType;\n data: unknown;\n size: number;\n metadata: unknown | null;\n}\n\n// ============================================================================\n// Outbox and Idempotency Record Types (for kernel transactional outbox)\n// ============================================================================\n\nexport interface OutboxRecord {\n id: string;\n workflowRunId: string;\n sequence: number;\n eventType: string;\n payload: unknown;\n causationId: string;\n occurredAt: Date;\n publishedAt: Date | null;\n retryCount: number;\n dlqAt: Date | null;\n}\n\nexport interface CreateOutboxEventInput {\n workflowRunId: string;\n eventType: string;\n payload: unknown;\n causationId: string;\n occurredAt: Date;\n}\n\nexport interface IdempotencyRecord {\n key: string;\n commandType: string;\n result: unknown;\n createdAt: Date;\n}\n\n// ============================================================================\n// AI Call Record Types\n// ============================================================================\n\nexport interface AICallRecord {\n id: string;\n createdAt: Date;\n topic: string;\n callType: string;\n modelKey: string;\n modelId: string;\n prompt: string;\n response: string;\n inputTokens: number;\n outputTokens: number;\n cost: number;\n metadata: unknown | null;\n}\n\nexport interface JobRecord {\n id: string;\n createdAt: Date;\n updatedAt: Date;\n workflowRunId: string;\n workflowId: string;\n stageId: string;\n status: JobStatus;\n priority: number;\n workerId: string | null;\n lockedAt: Date | null;\n startedAt: Date | null;\n completedAt: Date | null;\n attempt: number;\n maxAttempts: number;\n lastError: string | null;\n nextPollAt: Date | null;\n payload: Record<string, unknown>;\n}\n\n// ============================================================================\n// Input Types (for creating/updating records)\n// ============================================================================\n\nexport interface CreateRunInput {\n id?: string;\n workflowId: string;\n workflowName: string;\n workflowType: string;\n input: unknown;\n config?: unknown;\n priority?: number;\n /** Optional metadata for domain-specific fields */\n metadata?: Record<string, unknown>;\n}\n\nexport interface UpdateRunInput {\n status?: WorkflowStatus;\n startedAt?: Date;\n completedAt?: Date | null;\n duration?: number;\n output?: unknown;\n totalCost?: number;\n totalTokens?: number;\n expectedVersion?: number;\n}\n\nexport interface CreateStageInput {\n workflowRunId: string;\n stageId: string;\n stageName: string;\n stageNumber: number;\n executionGroup: number;\n status?: WorkflowStageStatus;\n startedAt?: Date;\n config?: unknown;\n inputData?: unknown;\n}\n\nexport interface UpdateStageInput {\n status?: WorkflowStageStatus;\n startedAt?: Date;\n completedAt?: Date;\n duration?: number;\n outputData?: unknown;\n config?: unknown;\n suspendedState?: unknown;\n resumeData?: unknown;\n nextPollAt?: Date | null;\n pollInterval?: number;\n maxWaitUntil?: Date;\n metrics?: unknown;\n embeddingInfo?: unknown;\n artifacts?: unknown;\n errorMessage?: string;\n expectedVersion?: number;\n}\n\nexport interface UpsertStageInput {\n workflowRunId: string;\n stageId: string;\n create: CreateStageInput;\n update: UpdateStageInput;\n}\n\nexport interface CreateLogInput {\n workflowRunId?: string;\n workflowStageId?: string;\n level: LogLevel;\n message: string;\n metadata?: unknown;\n}\n\nexport interface SaveArtifactInput {\n workflowRunId: string;\n workflowStageId?: string;\n key: string;\n type: ArtifactType;\n data: unknown;\n size: number;\n metadata?: unknown;\n}\n\nexport interface CreateAICallInput {\n topic: string;\n callType: string;\n modelKey: string;\n modelId: string;\n prompt: string;\n response: string;\n inputTokens: number;\n outputTokens: number;\n cost: number;\n metadata?: unknown;\n}\n\nexport interface EnqueueJobInput {\n workflowRunId: string;\n workflowId: string;\n stageId: string;\n priority?: number;\n payload?: Record<string, unknown>;\n scheduledFor?: Date;\n}\n\nexport interface DequeueResult {\n jobId: string;\n workflowRunId: string;\n workflowId: string;\n stageId: string;\n priority: number;\n attempt: number;\n maxAttempts: number;\n payload: Record<string, unknown>;\n}\n\n// ============================================================================\n// WorkflowPersistence Interface\n// ============================================================================\n\nexport interface WorkflowPersistence {\n /** Execute operations within a transaction boundary. */\n withTransaction<T>(fn: (tx: WorkflowPersistence) => Promise<T>): Promise<T>;\n\n // WorkflowRun operations\n createRun(data: CreateRunInput): Promise<WorkflowRunRecord>;\n updateRun(id: string, data: UpdateRunInput): Promise<void>;\n getRun(id: string): Promise<WorkflowRunRecord | null>;\n getRunStatus(id: string): Promise<WorkflowStatus | null>;\n getRunsByStatus(status: WorkflowStatus): Promise<WorkflowRunRecord[]>;\n\n /**\n * Atomically claim a pending workflow run for processing.\n * Uses atomic update with WHERE status = 'PENDING' to prevent race conditions.\n *\n * @param id - The workflow run ID to claim\n * @returns true if successfully claimed, false if already claimed by another worker\n */\n claimPendingRun(id: string): Promise<boolean>;\n\n /**\n * Atomically find and claim the next pending workflow run.\n * Uses FOR UPDATE SKIP LOCKED pattern (in Postgres) to prevent race conditions\n * when multiple workers try to claim workflows simultaneously.\n *\n * Priority ordering: higher priority first, then oldest (FIFO within same priority).\n *\n * @returns The claimed workflow run (now with status RUNNING), or null if no pending runs\n */\n claimNextPendingRun(): Promise<WorkflowRunRecord | null>;\n\n // WorkflowStage operations\n createStage(data: CreateStageInput): Promise<WorkflowStageRecord>;\n upsertStage(data: UpsertStageInput): Promise<WorkflowStageRecord>;\n updateStage(id: string, data: UpdateStageInput): Promise<void>;\n updateStageByRunAndStageId(\n workflowRunId: string,\n stageId: string,\n data: UpdateStageInput,\n ): Promise<void>;\n getStage(runId: string, stageId: string): Promise<WorkflowStageRecord | null>;\n getStageById(id: string): Promise<WorkflowStageRecord | null>;\n getStagesByRun(\n runId: string,\n options?: { status?: WorkflowStageStatus; orderBy?: \"asc\" | \"desc\" },\n ): Promise<WorkflowStageRecord[]>;\n getSuspendedStages(beforeDate: Date): Promise<WorkflowStageRecord[]>;\n getFirstSuspendedStageReadyToResume(\n runId: string,\n ): Promise<WorkflowStageRecord | null>;\n getFirstFailedStage(runId: string): Promise<WorkflowStageRecord | null>;\n getLastCompletedStage(runId: string): Promise<WorkflowStageRecord | null>;\n getLastCompletedStageBefore(\n runId: string,\n executionGroup: number,\n ): Promise<WorkflowStageRecord | null>;\n deleteStage(id: string): Promise<void>;\n\n // WorkflowLog operations\n createLog(data: CreateLogInput): Promise<void>;\n\n // WorkflowArtifact operations (for StageStorage)\n saveArtifact(data: SaveArtifactInput): Promise<void>;\n loadArtifact(runId: string, key: string): Promise<unknown>;\n hasArtifact(runId: string, key: string): Promise<boolean>;\n deleteArtifact(runId: string, key: string): Promise<void>;\n listArtifacts(runId: string): Promise<WorkflowArtifactRecord[]>;\n getStageIdForArtifact(runId: string, stageId: string): Promise<string | null>;\n\n // Stage output convenience methods (replaces separate StageStorage)\n saveStageOutput(\n runId: string,\n workflowType: string,\n stageId: string,\n output: unknown,\n ): Promise<string>;\n\n // Outbox DLQ operations\n /** Increment retry count for a failed outbox event. Returns new count. */\n incrementOutboxRetryCount(id: string): Promise<number>;\n\n /** Move an outbox event to DLQ (sets dlqAt). */\n moveOutboxEventToDLQ(id: string): Promise<void>;\n\n /** Reset DLQ events so they can be reprocessed by outbox.flush. Returns count reset. */\n replayDLQEvents(maxEvents: number): Promise<number>;\n\n // Outbox operations\n /** Write events to the outbox. Sequences are auto-assigned per workflowRunId. */\n appendOutboxEvents(events: CreateOutboxEventInput[]): Promise<void>;\n\n /** Read unpublished events ordered by (workflowRunId, sequence). */\n getUnpublishedOutboxEvents(limit?: number): Promise<OutboxRecord[]>;\n\n /** Mark events as published. */\n markOutboxEventsPublished(ids: string[]): Promise<void>;\n\n // Idempotency operations\n /** Atomically acquire an idempotency key for command execution. */\n acquireIdempotencyKey(\n key: string,\n commandType: string,\n ): Promise<\n | { status: \"acquired\" }\n | { status: \"replay\"; result: unknown }\n | { status: \"in_progress\" }\n >;\n\n /** Mark an idempotency key as completed and cache the command result. */\n completeIdempotencyKey(\n key: string,\n commandType: string,\n result: unknown,\n ): Promise<void>;\n\n /** Release an in-progress idempotency key after command failure. */\n releaseIdempotencyKey(key: string, commandType: string): Promise<void>;\n}\n\n// ============================================================================\n// AICallLogger Interface\n// ============================================================================\n\nexport interface AIHelperStats {\n totalCalls: number;\n totalInputTokens: number;\n totalOutputTokens: number;\n totalCost: number;\n perModel: Record<\n string,\n { calls: number; inputTokens: number; outputTokens: number; cost: number }\n >;\n}\n\nexport interface AICallLogger {\n /**\n * Log a single AI call (fire and forget)\n */\n logCall(call: CreateAICallInput): void;\n\n /**\n * Log batch results (for recording batch API results)\n */\n logBatchResults(batchId: string, results: CreateAICallInput[]): Promise<void>;\n\n /**\n * Get aggregated stats for a topic prefix\n */\n getStats(topicPrefix: string): Promise<AIHelperStats>;\n\n /**\n * Check if batch results are already recorded\n */\n isRecorded(batchId: string): Promise<boolean>;\n}\n\n// ============================================================================\n// JobQueue Interface\n// ============================================================================\n\nexport interface JobQueue {\n /**\n * Add a new job to the queue\n */\n enqueue(options: EnqueueJobInput): Promise<string>;\n\n /**\n * Enqueue multiple stages in parallel (same execution group)\n */\n enqueueParallel(jobs: EnqueueJobInput[]): Promise<string[]>;\n\n /**\n * Atomically dequeue the next available job\n */\n dequeue(): Promise<DequeueResult | null>;\n\n /**\n * Mark job as completed\n */\n complete(jobId: string): Promise<void>;\n\n /**\n * Mark job as suspended (for async-batch)\n */\n suspend(jobId: string, nextPollAt: Date): Promise<void>;\n\n /**\n * Mark job as failed\n */\n fail(jobId: string, error: string, shouldRetry?: boolean): Promise<void>;\n\n /**\n * Get suspended jobs that are ready to be checked\n */\n getSuspendedJobsReadyToPoll(): Promise<\n Array<{ jobId: string; stageId: string; workflowRunId: string }>\n >;\n\n /**\n * Release stale locks (for crashed workers)\n */\n releaseStaleJobs(staleThresholdMs?: number): Promise<number>;\n}\n\n// ============================================================================\n// Default Implementations (lazy loaded to avoid circular deps)\n// ============================================================================\n\n// Re-export from prisma implementations for convenience\n// These will be the default implementations used when no custom persistence is provided\n"]}
@@ -0,0 +1 @@
1
+ #!/usr/bin/env node