@mastra/memory 1.1.0-alpha.0 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +95 -0
- package/dist/chunk-6TXUWFIU.js +3188 -0
- package/dist/chunk-6TXUWFIU.js.map +1 -0
- package/dist/chunk-FQJWVCDF.cjs +3205 -0
- package/dist/chunk-FQJWVCDF.cjs.map +1 -0
- package/dist/docs/README.md +1 -1
- package/dist/docs/SKILL.md +12 -1
- package/dist/docs/SOURCE_MAP.json +62 -2
- package/dist/docs/memory/02-storage.md +10 -0
- package/dist/index.cjs +96 -1
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +53 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +96 -1
- package/dist/index.js.map +1 -1
- package/dist/observational-memory-3Q42SITP.cjs +52 -0
- package/dist/observational-memory-3Q42SITP.cjs.map +1 -0
- package/dist/observational-memory-VXLHOSDZ.js +3 -0
- package/dist/observational-memory-VXLHOSDZ.js.map +1 -0
- package/dist/processors/index.cjs +52 -0
- package/dist/processors/index.cjs.map +1 -0
- package/dist/processors/index.d.ts +2 -0
- package/dist/processors/index.d.ts.map +1 -0
- package/dist/processors/index.js +3 -0
- package/dist/processors/index.js.map +1 -0
- package/dist/processors/observational-memory/index.d.ts +18 -0
- package/dist/processors/observational-memory/index.d.ts.map +1 -0
- package/dist/processors/observational-memory/observational-memory.d.ts +579 -0
- package/dist/processors/observational-memory/observational-memory.d.ts.map +1 -0
- package/dist/processors/observational-memory/observer-agent.d.ts +117 -0
- package/dist/processors/observational-memory/observer-agent.d.ts.map +1 -0
- package/dist/processors/observational-memory/reflector-agent.d.ts +46 -0
- package/dist/processors/observational-memory/reflector-agent.d.ts.map +1 -0
- package/dist/processors/observational-memory/token-counter.d.ts +30 -0
- package/dist/processors/observational-memory/token-counter.d.ts.map +1 -0
- package/dist/processors/observational-memory/types.d.ts +288 -0
- package/dist/processors/observational-memory/types.d.ts.map +1 -0
- package/package.json +18 -8
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/processors/observational-memory/observer-agent.ts","../src/processors/observational-memory/reflector-agent.ts","../src/processors/observational-memory/token-counter.ts","../src/processors/observational-memory/observational-memory.ts"],"names":[],"mappings":";;;;;;;;;;;AAOA,IAAM,uCAAA,GAA0C,CAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA,qQAAA,CAAA;AAmDhD,IAAM,oBAAoB,OAAA,CAAQ,GAAA,CAAI,yBAAyB,GAAA,IAAO,OAAA,CAAQ,IAAI,oBAAA,KAAyB,MAAA;AAC3G,IAAM,uBACJ,OAAA,CAAQ,GAAA,CAAI,4BAA4B,GAAA,IAAO,OAAA,CAAQ,IAAI,uBAAA,KAA4B,MAAA;AAOzF,IAAM,0CAAA,GAA6C,CAAA;;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;AAAA;;AAAA;;AAAA,sOAAA,CAAA;AAmDnD,IAAM,wCAAA,GAA2C,CAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;;AAAA;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;;AAAA;;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA,qQAAA,CAAA;AAyM1C,IAAM,gCAAA,GAAmC,oBAAA,GAC5C,0CAAA,GACA,iBAAA,GACE,uCAAA,GACA,wCAAA;AAUN,IAAM,gCAAA,GAAmC,CAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA,qBAAA,CAAA;AA8ClC,IAAM,2BAAA,GAA8B,CAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAA,CAAA;AAyC3C,IAAM,6BAAA,GAAgC,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wFAAA,CAAA;AAc/B,IAAM,mBAAA,GAAsB,uBAC/B,6BAAA,GACA,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,yFAAA,CAAA;AAiBG,SAAS,yBAAA,CAA0B,cAAuB,KAAA,EAAe;AAG9E,EAAA,MAAM,YAAA,GAAe,uBAAuB,gCAAA,GAAmC,2BAAA;AAE/E,EAAA,IAAI,WAAA,EAAa;AACf,IAAA,OAAO,CAAA;;AAAA;;AAAA,EAIT,gCAAgC;;AAAA;;AAAA;AAAA;;AAAA;;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;;AAAA,EA+ChC,mBAAmB;;AAAA;;AAAA,kJAAA,CAAA;AAAA,EAKnB;AAEA,EAAA,OAAO,CAAA;;AAAA;;AAAA,EAIP,gCAAgC;;AAAA;;AAAA;;AAAA,EAMhC,YAAY;;AAAA;;AAAA,EAIZ,mBAAmB;;AAAA;;AAAA;AAAA;AAAA;;AAAA;;AAAA,qSAAA,CAAA;AAWrB;AASO,IAAM,yBAAyB,yBAAA;AAuB/B,SAAS,yBAAA,CAA0B,UAA6B,OAAA,EAA8C;AACnH,EAAA,MAAM,SAAS,OAAA,EAAS,aAAA;AAExB,EAAA,OAAO,QAAA,CACJ,IAAI,CAAA,GAAA,KAAO;AACV,IAAA,MAAM,SAAA,GAAY,IAAI,SAAA,GAClB,IAAI,KAAK,GAAA,CAAI,SAAS,CAAA,CAAE,cAAA,CAAe,OAAA,EAAS;AAAA,MAC9C,IAAA,EAAM,SAAA;AAAA,MACN,KAAA,EAAO,OAAA;AAAA,MACP,GAAA,EAAK,SAAA;AAAA,MACL,IAAA,EAAM,SAAA;AAAA,MACN,MAAA,EAAQ,SAAA;AAAA,MACR,MAAA,EAAQ;AAAA,KACT,CAAA,GACD,EAAA;AAEJ,IAAA,MAAM,IAAA,GAAO,GAAA,CAAI,IAAA,CAAK,MAAA,CAAO,CAAC,CAAA,CAAE,WAAA,EAAY,GAAI,GAAA,CAAI,IAAA,CAAK,KAAA,CAAM,CAAC,CAAA;AAChE,IAAA,MAAM,YAAA,GAAe,SAAA,GAAY,CAAA,EAAA,EAAK,SAAS,CAAA,CAAA,CAAA,GAAM,EAAA;AAKrD,IAAA,IAAI,OAAA,GAAU,EAAA;AACd,IAAA,IAAI,OAAO,GAAA,CAAI,OAAA,KAAY,QAAA,EAAU;AACnC,MAAA,OAAA,GAAU,aAAA,CAAc,GAAA,CAAI,OAAA,EAAS,MAAM,CAAA;AAAA,IAC7C,CAAA,MAAA,IAAW,GAAA,CAAI,OAAA,EAAS,KAAA,IAAS,MAAM,OAAA,CAAQ,GAAA,CAAI,OAAA,CAAQ,KAAK,CAAA,IAAK,GAAA,CAAI,OAAA,CAAQ,KAAA,CAAM,SAAS,CAAA,EAAG;AAEjG,MAAA,OAAA,GAAU,GAAA,CAAI,OAAA,CAAQ,KAAA,CACnB,GAAA,CAAI,CAAA,IAAA,KAAQ;AACX,QAAA,IAAI,KAAK,IAAA,KAAS,MAAA,SAAe,aAAA,CAAc,IAAA,CAAK,MAAM,MAAM,CAAA;AAChE,QAAA,IAAI,IAAA,CAAK,SAAS,iBAAA,EAAmB;AACnC,UAAA,MAAM,MAAM,IAAA,CAAK,cAAA;AACjB,UAAA,IAAI,GAAA,CAAI,UAAU,QAAA,EAAU;AAC1B,YAAA,MAAM,YAAY,IAAA,CAAK,SAAA,CAAU,GAAA,CAAI,MAAA,EAAQ,MAAM,CAAC,CAAA;AACpD,YAAA,OAAO,CAAA,cAAA,EAAiB,IAAI,QAAQ,CAAA;AAAA,EAAM,aAAA,CAAc,SAAA,EAAW,MAAM,CAAC,CAAA,CAAA;AAAA,UAC5E;AACA,UAAA,MAAM,UAAU,IAAA,CAAK,SAAA,CAAU,GAAA,CAAI,IAAA,EAAM,MAAM,CAAC,CAAA;AAChD,UAAA,OAAO,CAAA,YAAA,EAAe,IAAI,QAAQ,CAAA;AAAA,EAAM,aAAA,CAAc,OAAA,EAAS,MAAM,CAAC,CAAA,CAAA;AAAA,QACxE;AAEA,QAAA,IAAI,IAAA,CAAK,IAAA,EAAM,UAAA,CAAW,sBAAsB,GAAG,OAAO,EAAA;AAC1D,QAAA,OAAO,EAAA;AAAA,MACT,CAAC,CAAA,CACA,MAAA,CAAO,OAAO,CAAA,CACd,KAAK,IAAI,CAAA;AAAA,IACd,CAAA,MAAA,IAAW,GAAA,CAAI,OAAA,EAAS,OAAA,EAAS;AAE/B,MAAA,OAAA,GAAU,aAAA,CAAc,GAAA,CAAI,OAAA,CAAQ,OAAA,EAAS,MAAM,CAAA;AAAA,IACrD;AAEA,IAAA,OAAO,CAAA,EAAA,EAAK,IAAI,CAAA,EAAG,YAAY,CAAA;AAAA,EAAQ,OAAO,CAAA,CAAA;AAAA,EAChD,CAAC,CAAA,CACA,IAAA,CAAK,aAAa,CAAA;AACvB;AAGA,SAAS,aAAA,CAAc,KAAa,MAAA,EAAyB;AAC3D,EAAA,IAAI,CAAC,MAAA,IAAU,GAAA,CAAI,MAAA,IAAU,QAAQ,OAAO,GAAA;AAC5C,EAAA,MAAM,SAAA,GAAY,GAAA,CAAI,KAAA,CAAM,CAAA,EAAG,MAAM,CAAA;AACrC,EAAA,MAAM,SAAA,GAAY,IAAI,MAAA,GAAS,MAAA;AAC/B,EAAA,OAAO,GAAG,SAAS;AAAA,eAAA,EAAoB,SAAS,CAAA,YAAA,CAAA;AAClD;AAMO,SAAS,oCAAA,CACd,kBACA,WAAA,EACQ;AACR,EAAA,MAAM,WAAqB,EAAC;AAE5B,EAAA,KAAA,MAAW,YAAY,WAAA,EAAa;AAClC,IAAA,MAAM,QAAA,GAAW,gBAAA,CAAiB,GAAA,CAAI,QAAQ,CAAA;AAC9C,IAAA,IAAI,CAAC,QAAA,IAAY,QAAA,CAAS,MAAA,KAAW,CAAA,EAAG;AAExC,IAAA,MAAM,iBAAA,GAAoB,0BAA0B,QAAQ,CAAA;AAC5D,IAAA,QAAA,CAAS,IAAA,CAAK,eAAe,QAAQ,CAAA;AAAA,EAAO,iBAAiB;AAAA,SAAA,CAAa,CAAA;AAAA,EAC5E;AAEA,EAAA,OAAO,QAAA,CAAS,KAAK,MAAM,CAAA;AAC7B;AAKO,SAAS,8BAAA,CACd,oBAAA,EACA,gBAAA,EACA,WAAA,EACQ;AACR,EAAA,MAAM,iBAAA,GAAoB,oCAAA,CAAqC,gBAAA,EAAkB,WAAW,CAAA;AAE5F,EAAA,IAAI,MAAA,GAAS,EAAA;AAEb,EAAA,IAAI,oBAAA,EAAsB;AACxB,IAAA,MAAA,IAAU,CAAA;;AAAA,EAA+B,oBAAoB;;AAAA;;AAAA,CAAA;AAC7D,IAAA,MAAA,IACE,qHAAA;AAAA,EACJ;AAEA,EAAA,MAAA,IAAU,CAAA;;AAAA,gCAAA,EAAwE,YAAY,MAAM,CAAA;;AAAA,EAA0F,iBAAiB;;AAAA;;AAAA,CAAA;AAE/M,EAAA,MAAA,IAAU,CAAA;;AAAA,CAAA;AACV,EAAA,MAAA,IAAU,CAAA;;AAAA,CAAA;AACV,EAAA,MAAA,IAAU,CAAA;AAAA,CAAA;AACV,EAAA,MAAA,IAAU,CAAA;AAAA,CAAA;AACV,EAAA,MAAA,IAAU,CAAA;AAAA,CAAA;AACV,EAAA,MAAA,IAAU,CAAA;AAAA,CAAA;AACV,EAAA,MAAA,IAAU,CAAA;AAAA,CAAA;AACV,EAAA,MAAA,IAAU,CAAA;AAAA,CAAA;AACV,EAAA,MAAA,IAAU,CAAA;AAAA,CAAA;AACV,EAAA,MAAA,IAAU,CAAA;AAAA,CAAA;AACV,EAAA,MAAA,IAAU,CAAA;AAAA,CAAA;AACV,EAAA,MAAA,IAAU,CAAA;AAAA,CAAA;AACV,EAAA,MAAA,IAAU,CAAA;AAAA,CAAA;AACV,EAAA,MAAA,IAAU,CAAA;AAAA,CAAA;AACV,EAAA,MAAA,IAAU,CAAA;AAAA,CAAA;AACV,EAAA,MAAA,IAAU,CAAA;AAAA,CAAA;AACV,EAAA,MAAA,IAAU,CAAA,eAAA,CAAA;AAEV,EAAA,OAAO,MAAA;AACT;AAeO,SAAS,+BAA+B,MAAA,EAA2C;AACxF,EAAA,MAAM,OAAA,uBAAc,GAAA,EAA4B;AAGhD,EAAA,MAAM,iBAAA,GAAoB,MAAA,CAAO,KAAA,CAAM,0DAA0D,CAAA;AACjG,EAAA,MAAM,mBAAA,GAAsB,iBAAA,GAAoB,CAAC,CAAA,IAAK,MAAA;AAGtD,EAAA,MAAM,WAAA,GAAc,+CAAA;AACpB,EAAA,IAAI,KAAA;AAEJ,EAAA,OAAA,CAAQ,KAAA,GAAQ,WAAA,CAAY,IAAA,CAAK,mBAAmB,OAAO,IAAA,EAAM;AAC/D,IAAA,MAAM,QAAA,GAAW,MAAM,CAAC,CAAA;AACxB,IAAA,MAAM,aAAA,GAAgB,MAAM,CAAC,CAAA;AAC7B,IAAA,IAAI,CAAC,QAAA,IAAY,CAAC,aAAA,EAAe;AAIjC,IAAA,IAAI,YAAA,GAAe,aAAA;AAGnB,IAAA,IAAI,WAAA;AACJ,IAAA,MAAM,gBAAA,GAAmB,aAAA,CAAc,KAAA,CAAM,2CAA2C,CAAA;AACxF,IAAA,IAAI,gBAAA,GAAmB,CAAC,CAAA,EAAG;AACzB,MAAA,WAAA,GAAc,gBAAA,CAAiB,CAAC,CAAA,CAAE,IAAA,EAAK;AACvC,MAAA,YAAA,GAAe,YAAA,CAAa,OAAA,CAAQ,yCAAA,EAA2C,EAAE,CAAA;AAAA,IACnF;AAGA,IAAA,IAAI,qBAAA;AACJ,IAAA,MAAM,cAAA,GAAiB,aAAA,CAAc,KAAA,CAAM,uDAAuD,CAAA;AAClG,IAAA,IAAI,cAAA,GAAiB,CAAC,CAAA,EAAG;AACvB,MAAA,qBAAA,GAAwB,cAAA,CAAe,CAAC,CAAA,CAAE,IAAA,EAAK;AAC/C,MAAA,YAAA,GAAe,YAAA,CAAa,OAAA,CAAQ,qDAAA,EAAuD,EAAE,CAAA;AAAA,IAC/F;AAGA,IAAA,YAAA,GAAe,aAAa,IAAA,EAAK;AAEjC,IAAA,OAAA,CAAQ,IAAI,QAAA,EAAU;AAAA,MACpB,YAAA;AAAA,MACA,WAAA;AAAA,MACA,qBAAA;AAAA,MACA,SAAA,EAAW;AAAA,KACZ,CAAA;AAAA,EACH;AAKA,EAAA,OAAO;AAAA,IACL,OAAA;AAAA,IACA,SAAA,EAAW;AAAA,GACb;AACF;AAMO,SAAS,mBAAA,CACd,sBACA,iBAAA,EACQ;AACR,EAAA,MAAM,iBAAA,GAAoB,0BAA0B,iBAAiB,CAAA;AAErE,EAAA,IAAI,MAAA,GAAS,EAAA;AAEb,EAAA,IAAI,oBAAA,EAAsB;AACxB,IAAA,MAAA,IAAU,CAAA;;AAAA,EAA+B,oBAAoB;;AAAA;;AAAA,CAAA;AAC7D,IAAA,MAAA,IACE,qHAAA;AAAA,EACJ;AAEA,EAAA,MAAA,IAAU,CAAA;;AAAA,EAAwC,iBAAiB;;AAAA;;AAAA,CAAA;AAEnE,EAAA,MAAA,IAAU,CAAA;;AAAA,CAAA;AACV,EAAA,MAAA,IAAU,CAAA,0MAAA,CAAA;AAEV,EAAA,OAAO,MAAA;AACT;AAMO,SAAS,oBAAoB,MAAA,EAAgC;AAClE,EAAA,MAAM,MAAA,GAAS,sBAAsB,MAAM,CAAA;AAI3C,EAAA,MAAM,YAAA,GAAe,OAAO,YAAA,IAAgB,EAAA;AAE5C,EAAA,OAAO;AAAA,IACL,YAAA;AAAA,IACA,WAAA,EAAa,OAAO,WAAA,IAAe,MAAA;AAAA,IACnC,qBAAA,EAAuB,OAAO,iBAAA,IAAqB,MAAA;AAAA,IACnD,SAAA,EAAW;AAAA,GACb;AACF;AAeO,SAAS,sBAAsB,OAAA,EAAsC;AAC1E,EAAA,MAAM,MAAA,GAA8B;AAAA,IAClC,YAAA,EAAc,EAAA;AAAA,IACd,WAAA,EAAa,EAAA;AAAA,IACb,iBAAA,EAAmB;AAAA,GACrB;AAKA,EAAA,MAAM,iBAAA,GAAoB,2DAAA;AAC1B,EAAA,MAAM,sBAAsB,CAAC,GAAG,OAAA,CAAQ,QAAA,CAAS,iBAAiB,CAAC,CAAA;AACnE,EAAA,IAAI,mBAAA,CAAoB,SAAS,CAAA,EAAG;AAClC,IAAA,MAAA,CAAO,YAAA,GAAe,mBAAA,CACnB,GAAA,CAAI,CAAA,CAAA,KAAK,EAAE,CAAC,CAAA,EAAG,IAAA,EAAK,IAAK,EAAE,CAAA,CAC3B,MAAA,CAAO,OAAO,CAAA,CACd,KAAK,IAAI,CAAA;AAAA,EACd,CAAA,MAAO;AAGL,IAAA,MAAA,CAAO,YAAA,GAAe,qBAAqB,OAAO,CAAA;AAAA,EACpD;AAIA,EAAA,MAAM,gBAAA,GAAmB,OAAA,CAAQ,KAAA,CAAM,0DAA0D,CAAA;AACjG,EAAA,IAAI,gBAAA,GAAmB,CAAC,CAAA,EAAG;AACzB,IAAA,MAAA,CAAO,WAAA,GAAc,gBAAA,CAAiB,CAAC,CAAA,CAAE,IAAA,EAAK;AAAA,EAChD;AAIA,EAAA,MAAM,sBAAA,GAAyB,OAAA,CAAQ,KAAA,CAAM,sEAAsE,CAAA;AACnH,EAAA,IAAI,sBAAA,GAAyB,CAAC,CAAA,EAAG;AAC/B,IAAA,MAAA,CAAO,iBAAA,GAAoB,sBAAA,CAAuB,CAAC,CAAA,CAAE,IAAA,EAAK;AAAA,EAC5D;AAEA,EAAA,OAAO,MAAA;AACT;AAMA,SAAS,qBAAqB,OAAA,EAAyB;AACrD,EAAA,MAAM,KAAA,GAAQ,OAAA,CAAQ,KAAA,CAAM,IAAI,CAAA;AAChC,EAAA,MAAM,YAAsB,EAAC;AAE7B,EAAA,KAAA,MAAW,QAAQ,KAAA,EAAO;AAGxB,IAAA,IAAI,aAAa,IAAA,CAAK,IAAI,KAAK,aAAA,CAAc,IAAA,CAAK,IAAI,CAAA,EAAG;AACvD,MAAA,SAAA,CAAU,KAAK,IAAI,CAAA;AAAA,IACrB;AAAA,EACF;AAEA,EAAA,OAAO,SAAA,CAAU,IAAA,CAAK,IAAI,CAAA,CAAE,IAAA,EAAK;AACnC;AAMO,SAAS,sBAAsB,YAAA,EAA+B;AAEnE,EAAA,IAAI,iBAAA,CAAkB,IAAA,CAAK,YAAY,CAAA,EAAG;AACxC,IAAA,OAAO,IAAA;AAAA,EACT;AAGA,EAAA,MAAM,mBAAA,GAAsB;AAAA,IAC1B,yBAAA;AAAA,IACA,kBAAA;AAAA,IACA,wBAAA;AAAA,IACA;AAAA,GACF;AAEA,EAAA,OAAO,oBAAoB,IAAA,CAAK,CAAA,OAAA,KAAW,OAAA,CAAQ,IAAA,CAAK,YAAY,CAAC,CAAA;AACvE;AAKO,SAAS,mBAAmB,YAAA,EAAqC;AACtE,EAAA,MAAM,OAAA,GAAU,gBAAA;AAChB,EAAA,MAAM,QAAA,GAAW,iBAAA;AACjB,EAAA,MAAM,QAAA,GAAW,YAAA,CAAa,WAAA,EAAY,CAAE,QAAQ,OAAO,CAAA;AAC3D,EAAA,IAAI,QAAA,KAAa,IAAI,OAAO,IAAA;AAC5B,EAAA,MAAM,YAAA,GAAe,WAAW,OAAA,CAAQ,MAAA;AACxC,EAAA,MAAM,SAAS,YAAA,CAAa,WAAA,EAAY,CAAE,OAAA,CAAQ,UAAU,YAAY,CAAA;AACxE,EAAA,IAAI,MAAA,KAAW,IAAI,OAAO,IAAA;AAC1B,EAAA,MAAM,UAAU,YAAA,CAAa,KAAA,CAAM,YAAA,EAAc,MAAM,EAAE,IAAA,EAAK;AAC9D,EAAA,OAAO,OAAA,IAAW,IAAA;AACpB;AAaO,SAAS,+BAA+B,YAAA,EAA8B;AAC3E,EAAA,IAAI,SAAA,GAAY,YAAA;AAGhB,EAAA,SAAA,GAAY,SAAA,CAAU,OAAA,CAAQ,QAAA,EAAU,EAAE,CAAA;AAC1C,EAAA,SAAA,GAAY,SAAA,CAAU,OAAA,CAAQ,QAAA,EAAU,EAAE,CAAA;AAG1C,EAAA,SAAA,GAAY,SAAA,CAAU,OAAA,CAAQ,uCAAA,EAAyC,EAAE,CAAA;AAGzE,EAAA,SAAA,GAAY,SAAA,CAAU,OAAA,CAAQ,WAAA,EAAa,GAAG,CAAA;AAG9C,EAAA,SAAA,GAAY,SAAA,CAAU,OAAA,CAAQ,MAAA,EAAQ,GAAG,CAAA;AAGzC,EAAA,SAAA,GAAY,SAAA,CAAU,OAAA,CAAQ,SAAA,EAAW,MAAM,CAAA;AAE/C,EAAA,OAAO,UAAU,IAAA,EAAK;AACxB;;;ACl6BO,SAAS,0BAAA,GAAqC;AACnD,EAAA,OAAO,CAAA;;AAAA;AAAA;;AAAA;AAAA,EAMP,gCAAgC;;AAAA;;AAAA,EAIhC,2BAA2B;;AAAA;;AAAA,EAI3B,mBAAmB;AAAA;;AAAA;AAAA;;AAAA;;AAAA;;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA,qSAAA,CAAA;AA+ErB;AAUO,IAAM,wBAAA,GAA2B;AAAA;;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA,CAAA;AAkBjC,SAAS,oBAAA,CAAqB,YAAA,EAAsB,YAAA,EAAuB,gBAAA,EAAoC;AACpH,EAAA,IAAI,MAAA,GAAS,CAAA;;AAAA,EAEb,YAAY;;AAAA;;AAAA,wIAAA,CAAA;AAMZ,EAAA,IAAI,YAAA,EAAc;AAChB,IAAA,MAAA,IAAU;;AAAA;;AAAA,EAIZ,YAAY,CAAA,CAAA;AAAA,EACZ;AAEA,EAAA,IAAI,gBAAA,EAAkB;AACpB,IAAA,MAAA,IAAU;;AAAA,EAEZ,wBAAwB,CAAA,CAAA;AAAA,EACxB;AAEA,EAAA,OAAO,MAAA;AACT;AAMO,SAAS,qBAAqB,MAAA,EAAiC;AACpE,EAAA,MAAM,MAAA,GAAS,yBAAyB,MAAM,CAAA;AAI9C,EAAA,MAAM,YAAA,GAAe,OAAO,YAAA,IAAgB,EAAA;AAE5C,EAAA,OAAO;AAAA,IACL,YAAA;AAAA,IACA,qBAAA,EAAuB,OAAO,iBAAA,IAAqB;AAAA;AAAA,GAErD;AACF;AAeA,SAAS,yBAAyB,OAAA,EAAyC;AACzE,EAAA,MAAM,MAAA,GAAiC;AAAA,IACrC,YAAA,EAAc,EAAA;AAAA,IACd,WAAA,EAAa,EAAA;AAAA,IACb,iBAAA,EAAmB;AAAA,GACrB;AAKA,EAAA,MAAM,iBAAA,GAAoB,2DAAA;AAC1B,EAAA,MAAM,sBAAsB,CAAC,GAAG,OAAA,CAAQ,QAAA,CAAS,iBAAiB,CAAC,CAAA;AACnE,EAAA,IAAI,mBAAA,CAAoB,SAAS,CAAA,EAAG;AAClC,IAAA,MAAA,CAAO,YAAA,GAAe,mBAAA,CACnB,GAAA,CAAI,CAAA,CAAA,KAAK,EAAE,CAAC,CAAA,EAAG,IAAA,EAAK,IAAK,EAAE,CAAA,CAC3B,MAAA,CAAO,OAAO,CAAA,CACd,KAAK,IAAI,CAAA;AAAA,EACd,CAAA,MAAO;AAEL,IAAA,MAAM,SAAA,GAAY,0BAA0B,OAAO,CAAA;AACnD,IAAA,MAAA,CAAO,YAAA,GAAe,SAAA,IAAa,OAAA,CAAQ,IAAA,EAAK;AAAA,EAClD;AAGA,EAAA,MAAM,gBAAA,GAAmB,OAAA,CAAQ,KAAA,CAAM,2CAA2C,CAAA;AAClF,EAAA,IAAI,gBAAA,GAAmB,CAAC,CAAA,EAAG;AACzB,IAAA,MAAA,CAAO,WAAA,GAAc,gBAAA,CAAiB,CAAC,CAAA,CAAE,IAAA,EAAK;AAAA,EAChD;AAGA,EAAA,MAAM,sBAAA,GAAyB,OAAA,CAAQ,KAAA,CAAM,uDAAuD,CAAA;AACpG,EAAA,IAAI,sBAAA,GAAyB,CAAC,CAAA,EAAG;AAC/B,IAAA,MAAA,CAAO,iBAAA,GAAoB,sBAAA,CAAuB,CAAC,CAAA,CAAE,IAAA,EAAK;AAAA,EAC5D;AAEA,EAAA,OAAO,MAAA;AACT;AAKA,SAAS,0BAA0B,OAAA,EAAyB;AAC1D,EAAA,MAAM,KAAA,GAAQ,OAAA,CAAQ,KAAA,CAAM,IAAI,CAAA;AAChC,EAAA,MAAM,YAAsB,EAAC;AAE7B,EAAA,KAAA,MAAW,QAAQ,KAAA,EAAO;AAExB,IAAA,IAAI,aAAa,IAAA,CAAK,IAAI,KAAK,aAAA,CAAc,IAAA,CAAK,IAAI,CAAA,EAAG;AACvD,MAAA,SAAA,CAAU,KAAK,IAAI,CAAA;AAAA,IACrB;AAAA,EACF;AAEA,EAAA,OAAO,SAAA,CAAU,IAAA,CAAK,IAAI,CAAA,CAAE,IAAA,EAAK;AACnC;AASO,SAAS,mBAAA,CAAoB,iBAAyB,eAAA,EAAkC;AAE7F,EAAA,OAAO,eAAA,GAAkB,eAAA;AAC3B;AChQO,IAAM,YAAA,GAAN,MAAM,aAAA,CAAa;AAAA,EAChB,OAAA;AAAA;AAAA;AAAA;AAAA,EAKR,OAAwB,kBAAA,GAAqB,GAAA;AAAA;AAAA,EAE7C,OAAwB,uBAAA,GAA0B,EAAA;AAAA,EAElD,YAAY,QAAA,EAAwB;AAClC,IAAA,IAAA,CAAK,OAAA,GAAU,IAAI,QAAA,CAAS,QAAA,IAAY,UAAU,CAAA;AAAA,EACpD;AAAA;AAAA;AAAA;AAAA,EAKA,YAAY,IAAA,EAAsB;AAChC,IAAA,IAAI,CAAC,MAAM,OAAO,CAAA;AAElB,IAAA,OAAO,IAAA,CAAK,OAAA,CAAQ,MAAA,CAAO,IAAA,EAAM,KAAK,CAAA,CAAE,MAAA;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKA,aAAa,OAAA,EAAkC;AAC7C,IAAA,IAAI,cAAc,OAAA,CAAQ,IAAA;AAC1B,IAAA,IAAI,WAAW,aAAA,CAAa,kBAAA;AAC5B,IAAA,IAAI,eAAA,GAAkB,CAAA;AAEtB,IAAA,IAAI,OAAO,OAAA,CAAQ,OAAA,KAAY,QAAA,EAAU;AACvC,MAAA,WAAA,IAAe,OAAA,CAAQ,OAAA;AAAA,IACzB,WAAW,OAAA,CAAQ,OAAA,IAAW,OAAO,OAAA,CAAQ,YAAY,QAAA,EAAU;AACjE,MAAA,IAAI,OAAA,CAAQ,QAAQ,OAAA,IAAW,CAAC,MAAM,OAAA,CAAQ,OAAA,CAAQ,OAAA,CAAQ,KAAK,CAAA,EAAG;AACpE,QAAA,WAAA,IAAe,QAAQ,OAAA,CAAQ,OAAA;AAAA,MACjC,WAAW,KAAA,CAAM,OAAA,CAAQ,OAAA,CAAQ,OAAA,CAAQ,KAAK,CAAA,EAAG;AAC/C,QAAA,KAAA,MAAW,IAAA,IAAQ,OAAA,CAAQ,OAAA,CAAQ,KAAA,EAAO;AACxC,UAAA,IAAI,IAAA,CAAK,SAAS,MAAA,EAAQ;AACxB,YAAA,WAAA,IAAe,IAAA,CAAK,IAAA;AAAA,UACtB,CAAA,MAAA,IAAW,IAAA,CAAK,IAAA,KAAS,iBAAA,EAAmB;AAC1C,YAAA,MAAM,aAAa,IAAA,CAAK,cAAA;AACxB,YAAA,IAAI,UAAA,CAAW,KAAA,KAAU,MAAA,IAAU,UAAA,CAAW,UAAU,cAAA,EAAgB;AACtE,cAAA,IAAI,WAAW,QAAA,EAAU;AACvB,gBAAA,WAAA,IAAe,UAAA,CAAW,QAAA;AAAA,cAC5B;AACA,cAAA,IAAI,WAAW,IAAA,EAAM;AACnB,gBAAA,IAAI,OAAO,UAAA,CAAW,IAAA,KAAS,QAAA,EAAU;AACvC,kBAAA,WAAA,IAAe,UAAA,CAAW,IAAA;AAAA,gBAC5B,CAAA,MAAO;AACL,kBAAA,WAAA,IAAe,IAAA,CAAK,SAAA,CAAU,UAAA,CAAW,IAAI,CAAA;AAG7C,kBAAA,QAAA,IAAY,EAAA;AAAA,gBACd;AAAA,cACF;AAAA,YACF,CAAA,MAAA,IAAW,UAAA,CAAW,KAAA,KAAU,QAAA,EAAU;AACxC,cAAA,eAAA,EAAA;AACA,cAAA,IAAI,UAAA,CAAW,WAAW,MAAA,EAAW;AACnC,gBAAA,IAAI,OAAO,UAAA,CAAW,MAAA,KAAW,QAAA,EAAU;AACzC,kBAAA,WAAA,IAAe,UAAA,CAAW,MAAA;AAAA,gBAC5B,CAAA,MAAO;AACL,kBAAA,WAAA,IAAe,IAAA,CAAK,SAAA,CAAU,UAAA,CAAW,MAAM,CAAA;AAC/C,kBAAA,QAAA,IAAY,EAAA;AAAA,gBACd;AAAA,cACF;AAAA,YACF,CAAA,MAAO;AACL,cAAA,MAAM,IAAI,KAAA;AAAA,gBACR,oCAAqC,IAAA,CAAa,cAAA,EAAgB,KAAK,CAAA,mCAAA,EAAsC,KAAK,IAAI,CAAA,CAAA;AAAA,eACxH;AAAA,YACF;AAAA,UACF,CAAA,MAAO;AACL,YAAA,WAAA,IAAe,IAAA,CAAK,UAAU,IAAI,CAAA;AAAA,UACpC;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAGA,IAAA,IAAI,kBAAkB,CAAA,EAAG;AACvB,MAAA,QAAA,IAAY,kBAAkB,aAAA,CAAa,kBAAA;AAAA,IAC7C;AAGA,IAAA,OAAO,KAAK,OAAA,CAAQ,MAAA,CAAO,WAAA,EAAa,KAAK,EAAE,MAAA,GAAS,QAAA;AAAA,EAC1D;AAAA;AAAA;AAAA;AAAA,EAKA,cAAc,QAAA,EAAqC;AACjD,IAAA,IAAI,CAAC,QAAA,IAAY,QAAA,CAAS,MAAA,KAAW,GAAG,OAAO,CAAA;AAE/C,IAAA,IAAI,QAAQ,aAAA,CAAa,uBAAA;AACzB,IAAA,KAAA,MAAW,WAAW,QAAA,EAAU;AAC9B,MAAA,KAAA,IAAS,IAAA,CAAK,aAAa,OAAO,CAAA;AAAA,IACpC;AACA,IAAA,OAAO,KAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,kBAAkB,YAAA,EAA8B;AAC9C,IAAA,OAAO,IAAA,CAAK,YAAY,YAAY,CAAA;AAAA,EACtC;AACF;;;ACpEA,SAAS,kBAAA,CAAmB,MAAY,WAAA,EAA2B;AACjE,EAAA,MAAM,MAAA,GAAS,WAAA,CAAY,OAAA,EAAQ,GAAI,KAAK,OAAA,EAAQ;AACpD,EAAA,MAAM,WAAW,IAAA,CAAK,KAAA,CAAM,UAAU,GAAA,GAAO,EAAA,GAAK,KAAK,EAAA,CAAG,CAAA;AAE1D,EAAA,IAAI,QAAA,KAAa,GAAG,OAAO,OAAA;AAC3B,EAAA,IAAI,QAAA,KAAa,GAAG,OAAO,WAAA;AAC3B,EAAA,IAAI,QAAA,GAAW,CAAA,EAAG,OAAO,CAAA,EAAG,QAAQ,CAAA,SAAA,CAAA;AACpC,EAAA,IAAI,QAAA,GAAW,IAAI,OAAO,YAAA;AAC1B,EAAA,IAAI,QAAA,GAAW,IAAI,OAAO,CAAA,EAAG,KAAK,KAAA,CAAM,QAAA,GAAW,CAAC,CAAC,CAAA,UAAA,CAAA;AACrD,EAAA,IAAI,QAAA,GAAW,IAAI,OAAO,aAAA;AAC1B,EAAA,IAAI,QAAA,GAAW,KAAK,OAAO,CAAA,EAAG,KAAK,KAAA,CAAM,QAAA,GAAW,EAAE,CAAC,CAAA,WAAA,CAAA;AACvD,EAAA,OAAO,CAAA,EAAG,IAAA,CAAK,KAAA,CAAM,QAAA,GAAW,GAAG,CAAC,CAAA,KAAA,EAAQ,IAAA,CAAK,KAAA,CAAM,QAAA,GAAW,GAAG,CAAA,GAAI,CAAA,GAAI,MAAM,EAAE,CAAA,IAAA,CAAA;AACvF;AAMA,SAAS,qBAAA,CAAsB,UAAgB,QAAA,EAA+B;AAC5E,EAAA,MAAM,MAAA,GAAS,QAAA,CAAS,OAAA,EAAQ,GAAI,SAAS,OAAA,EAAQ;AACrD,EAAA,MAAM,WAAW,IAAA,CAAK,KAAA,CAAM,UAAU,GAAA,GAAO,EAAA,GAAK,KAAK,EAAA,CAAG,CAAA;AAE1D,EAAA,IAAI,YAAY,CAAA,EAAG;AACjB,IAAA,OAAO,IAAA;AAAA,EACT,CAAA,MAAA,IAAW,WAAW,CAAA,EAAG;AACvB,IAAA,OAAO,IAAI,QAAQ,CAAA,YAAA,CAAA;AAAA,EACrB,CAAA,MAAA,IAAW,WAAW,EAAA,EAAI;AACxB,IAAA,OAAO,CAAA,cAAA,CAAA;AAAA,EACT,CAAA,MAAA,IAAW,WAAW,EAAA,EAAI;AACxB,IAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,KAAA,CAAM,QAAA,GAAW,CAAC,CAAA;AACrC,IAAA,OAAO,IAAI,KAAK,CAAA,aAAA,CAAA;AAAA,EAClB,CAAA,MAAA,IAAW,WAAW,EAAA,EAAI;AACxB,IAAA,OAAO,CAAA,eAAA,CAAA;AAAA,EACT,CAAA,MAAO;AACL,IAAA,MAAM,MAAA,GAAS,IAAA,CAAK,KAAA,CAAM,QAAA,GAAW,EAAE,CAAA;AACvC,IAAA,OAAO,IAAI,MAAM,CAAA,cAAA,CAAA;AAAA,EACnB;AACF;AAWA,SAAS,qBAAqB,WAAA,EAAkC;AAC9D,EAAA,IAAI,UAAA,GAA0B,IAAA;AAG9B,EAAA,MAAM,eAAA,GAAkB,WAAA,CAAY,KAAA,CAAM,uCAAuC,CAAA;AACjF,EAAA,IAAI,eAAA,EAAiB;AACnB,IAAA,MAAM,MAAA,mBAAS,IAAI,IAAA,CAAK,CAAA,EAAG,gBAAgB,CAAC,CAAC,CAAA,CAAA,EAAI,eAAA,CAAgB,CAAC,CAAC,CAAA,EAAA,EAAK,eAAA,CAAgB,CAAC,CAAC,CAAA,CAAE,CAAA;AAC5F,IAAA,IAAI,CAAC,KAAA,CAAM,MAAA,CAAO,OAAA,EAAS,CAAA,EAAG;AAC5B,MAAA,UAAA,GAAa,MAAA;AAAA,IACf;AAAA,EACF;AAGA,EAAA,IAAI,CAAC,UAAA,EAAY;AACf,IAAA,MAAM,UAAA,GAAa,WAAA,CAAY,KAAA,CAAM,+CAA+C,CAAA;AACpF,IAAA,IAAI,UAAA,EAAY;AACd,MAAA,MAAM,MAAA,mBAAS,IAAI,IAAA,CAAK,CAAA,EAAG,WAAW,CAAC,CAAC,CAAA,CAAA,EAAI,UAAA,CAAW,CAAC,CAAC,CAAA,EAAA,EAAK,UAAA,CAAW,CAAC,CAAC,CAAA,CAAE,CAAA;AAC7E,MAAA,IAAI,CAAC,KAAA,CAAM,MAAA,CAAO,OAAA,EAAS,CAAA,EAAG;AAC5B,QAAA,UAAA,GAAa,MAAA;AAAA,MACf;AAAA,IACF;AAAA,EACF;AAGA,EAAA,IAAI,CAAC,UAAA,EAAY;AACf,IAAA,MAAM,aAAa,WAAA,CAAY,KAAA;AAAA,MAC7B;AAAA,KACF;AACA,IAAA,IAAI,UAAA,EAAY;AACd,MAAA,MAAM,KAAA,GAAQ,WAAW,CAAC,CAAA;AAC1B,MAAA,MAAM,IAAA,GAAO,WAAW,CAAC,CAAA;AACzB,MAAA,MAAM,QAAA,GAAW,UAAA,CAAW,CAAC,CAAA,CAAG,WAAA,EAAY;AAC5C,MAAA,IAAI,GAAA,GAAM,EAAA;AACV,MAAA,IAAI,QAAA,KAAa,SAAS,GAAA,GAAM,CAAA;AAChC,MAAA,IAAI,QAAA,KAAa,QAAQ,GAAA,GAAM,EAAA;AAC/B,MAAA,MAAM,MAAA,uBAAa,IAAA,CAAK,CAAA,EAAG,KAAK,CAAA,CAAA,EAAI,GAAG,CAAA,EAAA,EAAK,IAAI,CAAA,CAAE,CAAA;AAClD,MAAA,IAAI,CAAC,KAAA,CAAM,MAAA,CAAO,OAAA,EAAS,CAAA,EAAG;AAC5B,QAAA,UAAA,GAAa,MAAA;AAAA,MACf;AAAA,IACF;AAAA,EACF;AAGA,EAAA,IAAI,CAAC,UAAA,EAAY;AACf,IAAA,MAAM,eAAA,GAAkB,WAAA,CAAY,KAAA,CAAM,4DAA4D,CAAA;AACtG,IAAA,IAAI,eAAA,EAAiB;AAEnB,MAAA,MAAM,MAAA,mBAAS,IAAI,IAAA,CAAK,CAAA,EAAG,eAAA,CAAgB,CAAC,CAAC,CAAA,IAAA,EAAO,eAAA,CAAgB,CAAC,CAAC,CAAA,CAAE,CAAA;AACxE,MAAA,IAAI,CAAC,KAAA,CAAM,MAAA,CAAO,OAAA,EAAS,CAAA,EAAG;AAC5B,QAAA,UAAA,GAAa,MAAA;AAAA,MACf;AAAA,IACF;AAAA,EACF;AAEA,EAAA,OAAO,UAAA;AACT;AAKA,SAAS,0BAA0B,IAAA,EAAuB;AACxD,EAAA,MAAM,oBAAA,GAAuB;AAAA,IAC3B,sCAAA;AAAA,IACA,kBAAA;AAAA,IACA,oBAAA;AAAA,IACA,6BAAA;AAAA,IACA,iBAAA;AAAA,IACA,oBAAA;AAAA,IACA,kBAAA;AAAA,IACA,kBAAA;AAAA,IACA;AAAA,GACF;AACA,EAAA,OAAO,qBAAqB,IAAA,CAAK,CAAA,OAAA,KAAW,OAAA,CAAQ,IAAA,CAAK,IAAI,CAAC,CAAA;AAChE;AAEA,SAAS,0BAAA,CAA2B,cAAsB,WAAA,EAA2B;AAOnF,EAAA,MAAM,eAAA,GAAkB,0CAAA;AAExB,EAAA,OAAO,aAAa,OAAA,CAAQ,eAAA,EAAiB,CAAC,KAAA,EAAO,QAAgB,WAAA,KAAwB;AAC3F,IAAA,MAAM,UAAA,GAAa,qBAAqB,WAAW,CAAA;AAEnD,IAAA,IAAI,UAAA,EAAY;AACd,MAAA,MAAM,QAAA,GAAW,kBAAA,CAAmB,UAAA,EAAY,WAAW,CAAA;AAI3D,MAAA,MAAM,UAAA,GAAa,YAAA,CAAa,OAAA,CAAQ,KAAK,CAAA;AAC7C,MAAA,MAAM,SAAA,GAAY,YAAA,CAAa,WAAA,CAAY,IAAA,EAAM,UAAU,CAAA,GAAI,CAAA;AAC/D,MAAA,MAAM,cAAA,GAAiB,YAAA,CAAa,SAAA,CAAU,SAAA,EAAW,UAAU,CAAA;AAEnE,MAAA,MAAM,aAAa,UAAA,GAAa,WAAA;AAChC,MAAA,MAAM,cAAA,GAAiB,0BAA0B,cAAc,CAAA;AAE/D,MAAA,IAAI,cAAc,cAAA,EAAgB;AAEhC,QAAA,OAAO,CAAA,CAAA,EAAI,MAAM,CAAA,CAAA,EAAI,WAAW,MAAM,QAAQ,CAAA,0BAAA,CAAA;AAAA,MAChD;AAEA,MAAA,OAAO,CAAA,CAAA,EAAI,MAAM,CAAA,CAAA,EAAI,WAAW,MAAM,QAAQ,CAAA,CAAA,CAAA;AAAA,IAChD;AAGA,IAAA,OAAO,KAAA;AAAA,EACT,CAAC,CAAA;AACH;AAEA,SAAS,6BAAA,CAA8B,cAAsB,WAAA,EAA2B;AAEtF,EAAA,MAAM,eAAA,GAAkB,0BAAA,CAA2B,YAAA,EAAc,WAAW,CAAA;AAG5E,EAAA,MAAM,eAAA,GAAkB,4CAAA;AAGxB,EAAA,MAAM,QAAyF,EAAC;AAChG,EAAA,IAAI,UAAA;AACJ,EAAA,OAAA,CAAQ,UAAA,GAAa,eAAA,CAAgB,IAAA,CAAK,eAAe,OAAO,IAAA,EAAM;AACpE,IAAA,MAAM,OAAA,GAAU,WAAW,CAAC,CAAA;AAC5B,IAAA,MAAM,MAAA,GAAS,IAAI,IAAA,CAAK,OAAO,CAAA;AAC/B,IAAA,IAAI,CAAC,KAAA,CAAM,MAAA,CAAO,OAAA,EAAS,CAAA,EAAG;AAC5B,MAAA,KAAA,CAAM,IAAA,CAAK;AAAA,QACT,OAAO,UAAA,CAAW,KAAA;AAAA,QAClB,IAAA,EAAM,MAAA;AAAA,QACN,KAAA,EAAO,WAAW,CAAC,CAAA;AAAA,QACnB,MAAA,EAAQ,WAAW,CAAC,CAAA;AAAA,QACpB;AAAA,OACD,CAAA;AAAA,IACH;AAAA,EACF;AAGA,EAAA,IAAI,KAAA,CAAM,WAAW,CAAA,EAAG;AACtB,IAAA,OAAO,eAAA;AAAA,EACT;AAGA,EAAA,IAAI,MAAA,GAAS,EAAA;AACb,EAAA,IAAI,SAAA,GAAY,CAAA;AAEhB,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,KAAA,CAAM,QAAQ,CAAA,EAAA,EAAK;AACrC,IAAA,MAAM,IAAA,GAAO,MAAM,CAAC,CAAA;AACpB,IAAA,MAAM,OAAO,CAAA,GAAI,CAAA,GAAI,KAAA,CAAM,CAAA,GAAI,CAAC,CAAA,GAAK,IAAA;AAGrC,IAAA,MAAA,IAAU,eAAA,CAAgB,KAAA,CAAM,SAAA,EAAW,IAAA,CAAK,KAAK,CAAA;AAGrD,IAAA,IAAI,IAAA,EAAM;AACR,MAAA,MAAM,GAAA,GAAM,qBAAA,CAAsB,IAAA,CAAK,IAAA,EAAM,KAAK,IAAI,CAAA;AACtD,MAAA,IAAI,GAAA,EAAK;AACP,QAAA,MAAA,IAAU;AAAA,EAAK,GAAG;;AAAA,CAAA;AAAA,MACpB;AAAA,IACF;AAGA,IAAA,MAAM,QAAA,GAAW,kBAAA,CAAmB,IAAA,CAAK,IAAA,EAAM,WAAW,CAAA;AAC1D,IAAA,MAAA,IAAU,GAAG,IAAA,CAAK,MAAM,GAAG,IAAA,CAAK,OAAO,KAAK,QAAQ,CAAA,CAAA,CAAA;AAEpD,IAAA,SAAA,GAAY,IAAA,CAAK,KAAA,GAAQ,IAAA,CAAK,KAAA,CAAM,MAAA;AAAA,EACtC;AAGA,EAAA,MAAA,IAAU,eAAA,CAAgB,MAAM,SAAS,CAAA;AAEzC,EAAA,OAAO,MAAA;AACT;AA0IO,IAAM,6BAAA,GAAgC;AAAA,EAC3C,WAAA,EAAa;AAAA,IACX,KAAA,EAAO,yBAAA;AAAA,IACP,aAAA,EAAe,GAAA;AAAA,IACf,aAAA,EAAe;AAAA,MACb,WAAA,EAAa,GAAA;AAAA,MACb,eAAA,EAAiB;AAAA,KACnB;AAAA,IACA,eAAA,EAAiB;AAAA,MACf,MAAA,EAAQ;AAAA,QACN,cAAA,EAAgB;AAAA,UACd,cAAA,EAAgB;AAAA;AAClB;AACF,KACF;AAAA,IACA,iBAAA,EAAmB;AAAA,GACrB;AAAA,EACA,UAAA,EAAY;AAAA,IACV,KAAA,EAAO,yBAAA;AAAA,IACP,iBAAA,EAAmB,GAAA;AAAA,IACnB,aAAA,EAAe;AAAA,MACb,WAAA,EAAa,CAAA;AAAA;AAAA,MACb,eAAA,EAAiB;AAAA,KACnB;AAAA,IACA,eAAA,EAAiB;AAAA,MACf,MAAA,EAAQ;AAAA,QACN,cAAA,EAAgB;AAAA,UACd,cAAA,EAAgB;AAAA;AAClB;AACF;AACF;AAEJ;AAyCO,IAAM,sBAAN,MAAuE;AAAA,EACnE,EAAA,GAAK,sBAAA;AAAA,EACL,IAAA,GAAO,sBAAA;AAAA,EAER,OAAA;AAAA,EACA,YAAA;AAAA,EACA,KAAA;AAAA,EACA,iBAAA;AAAA,EACA,gBAAA;AAAA,EACA,YAAA;AAAA;AAAA,EAGA,aAAA;AAAA;AAAA,EAGA,cAAA;AAAA,EAEA,sBAAA,GAAyB,KAAA;AAAA,EACzB,SAAS,MAAA,EAAO;AAAA,EAChB,aAAA,uBAAoB,GAAA,EAAoB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOxC,kBAAA,uBAAyB,GAAA,EAAY;AAAA;AAAA,EAGrC,cAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,KAAA,uBAAY,GAAA,EAA2B;AAAA;AAAA;AAAA;AAAA;AAAA,EAM/C,MAAc,QAAA,CAAY,GAAA,EAAa,EAAA,EAAkC;AAEvE,IAAA,MAAM,YAAA,GAAe,IAAA,CAAK,KAAA,CAAM,GAAA,CAAI,GAAG,CAAA;AACvC,IAAA,IAAI,YAAA,EAAc;AAChB,MAAA,MAAM,YAAA;AAAA,IACR;AAGA,IAAA,IAAI,WAAA;AACJ,IAAA,MAAM,WAAA,GAAc,IAAI,OAAA,CAAc,CAAA,OAAA,KAAW;AAC/C,MAAA,WAAA,GAAc,OAAA;AAAA,IAChB,CAAC,CAAA;AACD,IAAA,IAAA,CAAK,KAAA,CAAM,GAAA,CAAI,GAAA,EAAK,WAAW,CAAA;AAE/B,IAAA,IAAI;AACF,MAAA,OAAO,MAAM,EAAA,EAAG;AAAA,IAClB,CAAA,SAAE;AAEA,MAAA,WAAA,EAAa;AAEb,MAAA,IAAI,IAAA,CAAK,KAAA,CAAM,GAAA,CAAI,GAAG,MAAM,WAAA,EAAa;AACvC,QAAA,IAAA,CAAK,KAAA,CAAM,OAAO,GAAG,CAAA;AAAA,MACvB;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAA,CAAW,UAAqC,UAAA,EAA+C;AACrG,IAAA,IAAI,IAAA,CAAK,KAAA,KAAU,UAAA,IAAc,UAAA,EAAY;AAC3C,MAAA,OAAO,YAAY,UAAU,CAAA,CAAA;AAAA,IAC/B;AACA,IAAA,OAAO,CAAA,OAAA,EAAU,YAAY,SAAS,CAAA,CAAA;AAAA,EACxC;AAAA,EAEA,YAAY,MAAA,EAAmC;AAE7C,IAAA,IAAI,MAAA,CAAO,KAAA,IAAS,MAAA,CAAO,WAAA,EAAa,KAAA,EAAO;AAC7C,MAAA,MAAM,IAAI,KAAA;AAAA,QACR;AAAA,OACF;AAAA,IACF;AACA,IAAA,IAAI,MAAA,CAAO,KAAA,IAAS,MAAA,CAAO,UAAA,EAAY,KAAA,EAAO;AAC5C,MAAA,MAAM,IAAI,KAAA;AAAA,QACR;AAAA,OACF;AAAA,IACF;AAEA,IAAA,IAAA,CAAK,sBAAA,GAAyB,OAAO,gBAAA,IAAoB,KAAA;AACzD,IAAA,IAAA,CAAK,UAAU,MAAA,CAAO,OAAA;AACtB,IAAA,IAAA,CAAK,KAAA,GAAQ,OAAO,KAAA,IAAS,QAAA;AAG7B,IAAA,MAAM,mBACJ,MAAA,CAAO,KAAA,IAAS,OAAO,WAAA,EAAa,KAAA,IAAS,8BAA8B,WAAA,CAAY,KAAA;AACzF,IAAA,MAAM,kBAAkB,MAAA,CAAO,KAAA,IAAS,OAAO,UAAA,EAAY,KAAA,IAAS,8BAA8B,UAAA,CAAW,KAAA;AAG7G,IAAA,MAAM,aAAA,GAAgB,MAAA,CAAO,WAAA,EAAa,aAAA,IAAiB,8BAA8B,WAAA,CAAY,aAAA;AACrG,IAAA,MAAM,iBAAA,GACJ,MAAA,CAAO,UAAA,EAAY,iBAAA,IAAqB,8BAA8B,UAAA,CAAW,iBAAA;AACnF,IAAA,MAAM,cAAA,GAAiB,OAAO,gBAAA,IAAoB,KAAA;AAGlD,IAAA,MAAM,cAAc,aAAA,GAAgB,iBAAA;AAGpC,IAAA,IAAA,CAAK,iBAAA,GAAoB;AAAA,MACvB,KAAA,EAAO,gBAAA;AAAA;AAAA;AAAA,MAGP,eAAe,cAAA,GAAiB,EAAE,KAAK,aAAA,EAAe,GAAA,EAAK,aAAY,GAAI,aAAA;AAAA,MAC3E,gBAAA,EAAkB,cAAA;AAAA,MAClB,aAAA,EAAe;AAAA,QACb,aACE,MAAA,CAAO,WAAA,EAAa,eAAe,WAAA,IACnC,6BAAA,CAA8B,YAAY,aAAA,CAAc,WAAA;AAAA,QAC1D,iBACE,MAAA,CAAO,WAAA,EAAa,eAAe,eAAA,IACnC,6BAAA,CAA8B,YAAY,aAAA,CAAc;AAAA,OAC5D;AAAA,MACA,eAAA,EAAiB,MAAA,CAAO,WAAA,EAAa,eAAA,IAAmB,8BAA8B,WAAA,CAAY,eAAA;AAAA,MAClG,iBAAA,EACE,MAAA,CAAO,WAAA,EAAa,iBAAA,IAAqB,8BAA8B,WAAA,CAAY;AAAA,KACvF;AAGA,IAAA,IAAA,CAAK,gBAAA,GAAmB;AAAA,MACtB,KAAA,EAAO,eAAA;AAAA,MACP,iBAAA;AAAA,MACA,gBAAA,EAAkB,cAAA;AAAA,MAClB,aAAA,EAAe;AAAA,QACb,aACE,MAAA,CAAO,UAAA,EAAY,eAAe,WAAA,IAClC,6BAAA,CAA8B,WAAW,aAAA,CAAc,WAAA;AAAA,QACzD,iBACE,MAAA,CAAO,UAAA,EAAY,eAAe,eAAA,IAClC,6BAAA,CAA8B,WAAW,aAAA,CAAc;AAAA,OAC3D;AAAA,MACA,eAAA,EAAiB,MAAA,CAAO,UAAA,EAAY,eAAA,IAAmB,8BAA8B,UAAA,CAAW;AAAA,KAClG;AAEA,IAAA,IAAA,CAAK,YAAA,GAAe,IAAI,YAAA,EAAa;AACrC,IAAA,IAAA,CAAK,eAAe,MAAA,CAAO,YAAA;AAK3B,IAAA,IAAA,CAAK,iBAAiB,IAAI,cAAA,CAAe,EAAE,OAAA,EAAS,IAAA,CAAK,SAAS,CAAA;AAAA,EACpE;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,IAAI,MAAA,GAQF;AACA,IAAA,OAAO;AAAA,MACL,OAAO,IAAA,CAAK,KAAA;AAAA,MACZ,WAAA,EAAa;AAAA,QACX,aAAA,EAAe,KAAK,iBAAA,CAAkB;AAAA,OACxC;AAAA,MACA,UAAA,EAAY;AAAA,QACV,iBAAA,EAAmB,KAAK,gBAAA,CAAiB;AAAA;AAC3C,KACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,kBAAkB,cAAA,EAUrB;AAED,IAAA,MAAM,iBAAA,GAAoB,CAAC,KAAA,KAAgC;AACzD,MAAA,IAAI,KAAA,CAAM,OAAA,CAAQ,KAAK,CAAA,EAAG;AACxB,QAAA,OAAO,KAAA,CAAM,CAAC,CAAA,EAAG,KAAA,IAAS,8BAA8B,WAAA,CAAY,KAAA;AAAA,MACtE;AACA,MAAA,OAAO,KAAA;AAAA,IACT,CAAA;AAGA,IAAA,MAAM,eAAA,GAAkB,CAAC,KAAA,KAAkD;AACzE,MAAA,OAAO,KAAA,CAAM,WAAW,CAAA,EAAG,KAAA,CAAM,QAAQ,CAAA,CAAA,EAAI,KAAA,CAAM,OAAO,CAAA,CAAA,GAAK,KAAA,CAAM,OAAA;AAAA,IACvE,CAAA;AAGA,IAAA,MAAM,gBAAA,GAAmB,OAAO,WAAA,KAAuD;AACrF,MAAA,MAAM,cAAA,GAAiB,kBAAkB,WAAW,CAAA;AAEpD,MAAA,IAAI;AAEF,QAAA,MAAM,QAAA,GAAW,MAAM,kBAAA,CAAmB,cAAA,EAAgB,cAAc,CAAA;AACxE,QAAA,OAAO,gBAAgB,QAAQ,CAAA;AAAA,MACjC,SAAS,KAAA,EAAO;AAEd,QAAA,OAAA,CAAQ,KAAA,CAAM,wCAAwC,KAAK,CAAA;AAC3D,QAAA,OAAO,WAAA;AAAA,MACT;AAAA,IACF,CAAA;AAEA,IAAA,MAAM,CAAC,oBAAA,EAAsB,mBAAmB,CAAA,GAAI,MAAM,QAAQ,GAAA,CAAI;AAAA,MACpE,gBAAA,CAAiB,IAAA,CAAK,iBAAA,CAAkB,KAAK,CAAA;AAAA,MAC7C,gBAAA,CAAiB,IAAA,CAAK,gBAAA,CAAiB,KAAK;AAAA,KAC7C,CAAA;AAED,IAAA,OAAO;AAAA,MACL,OAAO,IAAA,CAAK,KAAA;AAAA,MACZ,WAAA,EAAa;AAAA,QACX,aAAA,EAAe,KAAK,iBAAA,CAAkB,aAAA;AAAA,QACtC,KAAA,EAAO;AAAA,OACT;AAAA,MACA,UAAA,EAAY;AAAA,QACV,iBAAA,EAAmB,KAAK,gBAAA,CAAiB,iBAAA;AAAA,QACzC,KAAA,EAAO;AAAA;AACT,KACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAe,KAAA,EAAoC;AACzD,IAAA,IAAI,KAAK,YAAA,EAAc;AACrB,MAAA,IAAA,CAAK,aAAa,KAAK,CAAA;AAAA,IACzB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAyBQ,gBAAgB,SAAA,EAA4C;AAClE,IAAA,IAAI,OAAO,cAAc,QAAA,EAAU;AACjC,MAAA,OAAO,SAAA;AAAA,IACT;AACA,IAAA,OAAO,SAAA,CAAU,GAAA;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeQ,yBAAA,CAA0B,WAAoC,wBAAA,EAA0C;AAE9G,IAAA,IAAI,OAAO,cAAc,QAAA,EAAU;AACjC,MAAA,OAAO,SAAA;AAAA,IACT;AAKA,IAAA,MAAM,cAAc,SAAA,CAAU,GAAA;AAC9B,IAAA,MAAM,gBAAgB,SAAA,CAAU,GAAA;AAIhC,IAAA,MAAM,kBAAA,GAAqB,IAAA,CAAK,GAAA,CAAI,WAAA,GAAc,0BAA0B,aAAa,CAAA;AAEzF,IAAA,OAAO,IAAA,CAAK,MAAM,kBAAkB,CAAA;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAA,GAA0B;AAChC,IAAA,IAAI,CAAC,KAAK,aAAA,EAAe;AACvB,MAAA,MAAM,eAAe,yBAAA,EAA0B;AAE/C,MAAA,IAAA,CAAK,aAAA,GAAgB,IAAI,KAAA,CAAM;AAAA,QAC7B,EAAA,EAAI,+BAAA;AAAA,QACJ,IAAA,EAAM,UAAA;AAAA,QACN,YAAA,EAAc,YAAA;AAAA,QACd,KAAA,EAAO,KAAK,iBAAA,CAAkB;AAAA,OAC/B,CAAA;AAAA,IACH;AACA,IAAA,OAAO,IAAA,CAAK,aAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAA,GAA2B;AACjC,IAAA,IAAI,CAAC,KAAK,cAAA,EAAgB;AACxB,MAAA,MAAM,eAAe,0BAAA,EAA2B;AAEhD,MAAA,IAAA,CAAK,cAAA,GAAiB,IAAI,KAAA,CAAM;AAAA,QAC9B,EAAA,EAAI,gCAAA;AAAA,QACJ,IAAA,EAAM,WAAA;AAAA,QACN,YAAA,EAAc,YAAA;AAAA,QACd,KAAA,EAAO,KAAK,gBAAA,CAAiB;AAAA,OAC9B,CAAA;AAAA,IACH;AACA,IAAA,OAAO,IAAA,CAAK,cAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAA,CAAc,UAAkB,UAAA,EAAsE;AAC5G,IAAA,IAAI,IAAA,CAAK,UAAU,UAAA,EAAY;AAC7B,MAAA,OAAO;AAAA,QACL,QAAA,EAAU,IAAA;AAAA,QACV,YAAY,UAAA,IAAc;AAAA,OAC5B;AAAA,IACF;AACA,IAAA,OAAO;AAAA,MACL,QAAA;AAAA,MACA,YAAY,UAAA,IAAc;AAAA,KAC5B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,iBAAA,CAAkB,QAAA,EAAkB,UAAA,EAAyD;AACzG,IAAA,MAAM,GAAA,GAAM,IAAA,CAAK,aAAA,CAAc,QAAA,EAAU,UAAU,CAAA;AACnD,IAAA,IAAI,MAAA,GAAS,MAAM,IAAA,CAAK,OAAA,CAAQ,uBAAuB,GAAA,CAAI,QAAA,EAAU,IAAI,UAAU,CAAA;AAEnF,IAAA,IAAI,CAAC,MAAA,EAAQ;AAEX,MAAA,MAAM,gBAAA,GAAmB,IAAA,CAAK,cAAA,EAAe,CAAE,iBAAgB,CAAE,QAAA;AAEjE,MAAA,MAAA,GAAS,MAAM,IAAA,CAAK,OAAA,CAAQ,6BAAA,CAA8B;AAAA,QACxD,UAAU,GAAA,CAAI,QAAA;AAAA,QACd,YAAY,GAAA,CAAI,UAAA;AAAA,QAChB,OAAO,IAAA,CAAK,KAAA;AAAA,QACZ,MAAA,EAAQ;AAAA,UACN,aAAa,IAAA,CAAK,iBAAA;AAAA,UAClB,YAAY,IAAA,CAAK,gBAAA;AAAA,UACjB,OAAO,IAAA,CAAK;AAAA,SACd;AAAA,QACA;AAAA,OACD,CAAA;AAAA,IACH;AAEA,IAAA,OAAO,MAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,cAAc,iBAAA,EAAoC;AACxD,IAAA,MAAM,SAAA,GAAY,IAAA,CAAK,eAAA,CAAgB,IAAA,CAAK,iBAAiB,iBAAiB,CAAA;AAC9E,IAAA,OAAO,iBAAA,GAAoB,SAAA;AAAA,EAC7B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAmBQ,0BAAA,GAAsD;AAC5D,IAAA,OAAO;AAAA,MACL,aAAA,EAAe,IAAA,CAAK,eAAA,CAAgB,IAAA,CAAK,kBAAkB,aAAa,CAAA;AAAA,MACxE,iBAAA,EAAmB,IAAA,CAAK,eAAA,CAAgB,IAAA,CAAK,iBAAiB,iBAAiB,CAAA;AAAA,MAC/E,OAAO,IAAA,CAAK;AAAA,KACd;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,6BAA6B,MAAA,EAON;AAC7B,IAAA,OAAO;AAAA,MACL,IAAA,EAAM,2BAAA;AAAA,MACN,IAAA,EAAM;AAAA,QACJ,SAAS,MAAA,CAAO,OAAA;AAAA,QAChB,eAAe,MAAA,CAAO,aAAA;AAAA,QACtB,SAAA,EAAA,iBAAW,IAAI,IAAA,EAAK,EAAE,WAAA,EAAY;AAAA,QAClC,iBAAiB,MAAA,CAAO,eAAA;AAAA,QACxB,UAAU,MAAA,CAAO,QAAA;AAAA,QACjB,UAAU,MAAA,CAAO,QAAA;AAAA,QACjB,WAAW,MAAA,CAAO,SAAA;AAAA,QAClB,MAAA,EAAQ,KAAK,0BAAA;AAA2B;AAC1C,KACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,2BAA2B,MAAA,EAWN;AAC3B,IAAA,MAAM,WAAA,GAAA,iBAAc,IAAI,IAAA,EAAK,EAAE,WAAA,EAAY;AAC3C,IAAA,MAAM,UAAA,GAAa,IAAI,IAAA,CAAK,WAAW,CAAA,CAAE,OAAA,EAAQ,GAAI,IAAI,IAAA,CAAK,MAAA,CAAO,SAAS,CAAA,CAAE,OAAA,EAAQ;AAExF,IAAA,OAAO;AAAA,MACL,IAAA,EAAM,yBAAA;AAAA,MACN,IAAA,EAAM;AAAA,QACJ,SAAS,MAAA,CAAO,OAAA;AAAA,QAChB,eAAe,MAAA,CAAO,aAAA;AAAA,QACtB,WAAA;AAAA,QACA,UAAA;AAAA,QACA,gBAAgB,MAAA,CAAO,cAAA;AAAA,QACvB,mBAAmB,MAAA,CAAO,iBAAA;AAAA,QAC1B,cAAc,MAAA,CAAO,YAAA;AAAA,QACrB,aAAa,MAAA,CAAO,WAAA;AAAA,QACpB,mBAAmB,MAAA,CAAO,iBAAA;AAAA,QAC1B,UAAU,MAAA,CAAO,QAAA;AAAA,QACjB,UAAU,MAAA,CAAO;AAAA;AACnB,KACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,8BAA8B,MAAA,EAQN;AAC9B,IAAA,MAAM,QAAA,GAAA,iBAAW,IAAI,IAAA,EAAK,EAAE,WAAA,EAAY;AACxC,IAAA,MAAM,UAAA,GAAa,IAAI,IAAA,CAAK,QAAQ,CAAA,CAAE,OAAA,EAAQ,GAAI,IAAI,IAAA,CAAK,MAAA,CAAO,SAAS,CAAA,CAAE,OAAA,EAAQ;AAErF,IAAA,OAAO;AAAA,MACL,IAAA,EAAM,4BAAA;AAAA,MACN,IAAA,EAAM;AAAA,QACJ,SAAS,MAAA,CAAO,OAAA;AAAA,QAChB,eAAe,MAAA,CAAO,aAAA;AAAA,QACtB,QAAA;AAAA,QACA,UAAA;AAAA,QACA,iBAAiB,MAAA,CAAO,eAAA;AAAA,QACxB,OAAO,MAAA,CAAO,KAAA;AAAA,QACd,UAAU,MAAA,CAAO,QAAA;AAAA,QACjB,UAAU,MAAA,CAAO;AAAA;AACnB,KACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASQ,qCAAqC,OAAA,EAAkC;AAC7E,IAAA,MAAM,KAAA,GAAQ,QAAQ,OAAA,EAAS,KAAA;AAC/B,IAAA,IAAI,CAAC,KAAA,IAAS,CAAC,MAAM,OAAA,CAAQ,KAAK,GAAG,OAAO,EAAA;AAG5C,IAAA,KAAA,IAAS,IAAI,KAAA,CAAM,MAAA,GAAS,CAAA,EAAG,CAAA,IAAK,GAAG,CAAA,EAAA,EAAK;AAC1C,MAAA,MAAM,IAAA,GAAO,MAAM,CAAC,CAAA;AACpB,MAAA,IAAI,IAAA,EAAM,SAAS,yBAAA,EAA2B;AAE5C,QAAA,OAAO,CAAA;AAAA,MACT;AAAA,IACF;AACA,IAAA,OAAO,EAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,yBAAyB,OAAA,EAAmC;AAClE,IAAA,MAAM,KAAA,GAAQ,QAAQ,OAAA,EAAS,KAAA;AAC/B,IAAA,IAAI,CAAC,KAAA,IAAS,CAAC,MAAM,OAAA,CAAQ,KAAK,GAAG,OAAO,KAAA;AAE5C,IAAA,IAAI,cAAA,GAAiB,EAAA;AACrB,IAAA,IAAI,oBAAA,GAAuB,EAAA;AAE3B,IAAA,KAAA,IAAS,IAAI,KAAA,CAAM,MAAA,GAAS,CAAA,EAAG,CAAA,IAAK,GAAG,CAAA,EAAA,EAAK;AAC1C,MAAA,MAAM,IAAA,GAAO,MAAM,CAAC,CAAA;AACpB,MAAA,IAAI,IAAA,EAAM,IAAA,KAAS,2BAAA,IAA+B,cAAA,KAAmB,EAAA,EAAI;AACvE,QAAA,cAAA,GAAiB,CAAA;AAAA,MACnB;AACA,MAAA,IAAA,CACG,MAAM,IAAA,KAAS,yBAAA,IAA6B,MAAM,IAAA,KAAS,4BAAA,KAC5D,yBAAyB,EAAA,EACzB;AACA,QAAA,oBAAA,GAAuB,CAAA;AAAA,MACzB;AAAA,IACF;AAGA,IAAA,OAAO,cAAA,KAAmB,MAAM,cAAA,GAAiB,oBAAA;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAuBQ,mBAAmB,OAAA,EAA+D;AACxF,IAAA,MAAM,KAAA,GAAQ,QAAQ,OAAA,EAAS,KAAA;AAC/B,IAAA,IAAI,CAAC,SAAS,CAAC,KAAA,CAAM,QAAQ,KAAK,CAAA,SAAU,EAAC;AAE7C,IAAA,MAAM,cAAA,GAAiB,IAAA,CAAK,oCAAA,CAAqC,OAAO,CAAA;AACxE,IAAA,IAAI,mBAAmB,EAAA,EAAI;AAGzB,MAAA,OAAO,KAAA,CAAM,OAAO,CAAA,CAAA,KAAK;AACvB,QAAA,MAAM,IAAA,GAAO,CAAA;AAEb,QAAA,OAAO,MAAM,IAAA,KAAS,2BAAA;AAAA,MACxB,CAAC,CAAA;AAAA,IACH;AAGA,IAAA,OAAO,MAAM,KAAA,CAAM,cAAA,GAAiB,CAAC,CAAA,CAAE,OAAO,CAAA,CAAA,KAAK;AACjD,MAAA,MAAM,IAAA,GAAO,CAAA;AACb,MAAA,OAAO,CAAC,IAAA,EAAM,IAAA,EAAM,UAAA,CAAW,sBAAsB,CAAA;AAAA,IACvD,CAAC,CAAA;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAmB,OAAA,EAAmC;AAC5D,IAAA,OAAO,IAAA,CAAK,kBAAA,CAAmB,OAAO,CAAA,CAAE,MAAA,GAAS,CAAA;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,wBAAwB,OAAA,EAAkD;AAChF,IAAA,MAAM,eAAA,GAAkB,IAAA,CAAK,kBAAA,CAAmB,OAAO,CAAA;AACvD,IAAA,IAAI,eAAA,CAAgB,MAAA,KAAW,CAAA,EAAG,OAAO,IAAA;AAEzC,IAAA,OAAO;AAAA,MACL,GAAG,OAAA;AAAA,MACH,OAAA,EAAS;AAAA,QACP,GAAG,OAAA,CAAQ,OAAA;AAAA,QACX,KAAA,EAAO;AAAA;AACT,KACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYQ,qBAAA,CAAsB,aAAgC,MAAA,EAAsD;AAClH,IAAA,MAAM,iBAAiB,MAAA,CAAO,cAAA;AAG9B,IAAA,MAAM,kBAAA,GAAqB,KAAA,CAAM,OAAA,CAAQ,MAAA,CAAO,kBAAkB,IAC9D,IAAI,GAAA,CAAI,MAAA,CAAO,kBAAkB,CAAA,GACjC,MAAA;AAEJ,IAAA,IAAI,CAAC,cAAA,EAAgB;AAEnB,MAAA,OAAO,WAAA;AAAA,IACT;AAEA,IAAA,MAAM,SAA4B,EAAC;AAEnC,IAAA,KAAA,MAAW,OAAO,WAAA,EAAa;AAE7B,MAAA,IAAI,kBAAA,EAAoB,GAAA,CAAI,GAAA,CAAI,EAAE,CAAA,EAAG;AACnC,QAAA;AAAA,MACF;AAGA,MAAA,MAAM,cAAA,GAAiB,IAAA,CAAK,oCAAA,CAAqC,GAAG,CAAA;AACpE,MAAA,MAAM,UAAA,GAAa,IAAA,CAAK,wBAAA,CAAyB,GAAG,CAAA;AAEpD,MAAA,IAAI,UAAA,EAAY;AAGd,QAAA,MAAA,CAAO,KAAK,GAAG,CAAA;AAAA,MACjB,CAAA,MAAA,IAAW,mBAAmB,EAAA,EAAI;AAEhC,QAAA,MAAM,UAAA,GAAa,IAAA,CAAK,uBAAA,CAAwB,GAAG,CAAA;AACnD,QAAA,IAAI,UAAA,EAAY;AACd,UAAA,MAAA,CAAO,KAAK,UAAU,CAAA;AAAA,QACxB;AAAA,MACF,CAAA,MAAO;AAEL,QAAA,IAAI,CAAC,IAAI,SAAA,EAAW;AAElB,UAAA,MAAA,CAAO,KAAK,GAAG,CAAA;AAAA,QACjB,CAAA,MAAO;AACL,UAAA,MAAM,OAAA,GAAU,IAAI,IAAA,CAAK,GAAA,CAAI,SAAS,CAAA;AACtC,UAAA,IAAI,UAAU,cAAA,EAAgB;AAC5B,YAAA,MAAA,CAAO,KAAK,GAAG,CAAA;AAAA,UACjB;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,IAAA,OAAO,MAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAc,cAAA,CAAkB,EAAA,EAAsB,WAAA,EAAuC;AAC3F,IAAA,IAAI,aAAa,OAAA,EAAS;AACxB,MAAA,MAAM,IAAI,MAAM,4BAA4B,CAAA;AAAA,IAC9C;AAEA,IAAA,MAAM,MAAA,GAAS,MAAM,EAAA,EAAG;AAExB,IAAA,IAAI,aAAa,OAAA,EAAS;AACxB,MAAA,MAAM,IAAI,MAAM,4BAA4B,CAAA;AAAA,IAC9C;AAEA,IAAA,OAAO,MAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,YAAA,CACZ,oBAAA,EACA,iBAAA,EACA,WAAA,EAMC;AACD,IAAA,MAAM,KAAA,GAAQ,KAAK,gBAAA,EAAiB;AAEpC,IAAA,MAAM,MAAA,GAAS,mBAAA,CAAoB,oBAAA,EAAsB,iBAAiB,CAAA;AAE1E,IAAA,MAAM,MAAA,GAAS,MAAM,IAAA,CAAK,cAAA;AAAA,MACxB,MACE,KAAA,CAAM,QAAA,CAAS,MAAA,EAAQ;AAAA,QACrB,aAAA,EAAe;AAAA,UACb,GAAG,KAAK,iBAAA,CAAkB;AAAA,SAC5B;AAAA,QACA,eAAA,EAAiB,KAAK,iBAAA,CAAkB,eAAA;AAAA,QACxC;AAAA,OACD,CAAA;AAAA,MACH;AAAA,KACF;AAEA,IAAA,MAAM,MAAA,GAAS,mBAAA,CAAoB,MAAA,CAAO,IAAI,CAAA;AAG9C,IAAA,MAAM,KAAA,GAAQ,MAAA,CAAO,UAAA,IAAc,MAAA,CAAO,KAAA;AAE1C,IAAA,OAAO;AAAA,MACL,cAAc,MAAA,CAAO,YAAA;AAAA,MACrB,aAAa,MAAA,CAAO,WAAA;AAAA,MACpB,uBAAuB,MAAA,CAAO,qBAAA;AAAA,MAC9B,OAAO,KAAA,GACH;AAAA,QACE,aAAa,KAAA,CAAM,WAAA;AAAA,QACnB,cAAc,KAAA,CAAM,YAAA;AAAA,QACpB,aAAa,KAAA,CAAM;AAAA,OACrB,GACA;AAAA,KACN;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAc,uBAAA,CACZ,oBAAA,EACA,gBAAA,EACA,aACA,WAAA,EAWC;AAED,IAAA,MAAM,KAAA,GAAQ,IAAI,KAAA,CAAM;AAAA,MACtB,EAAA,EAAI,uBAAA;AAAA,MACJ,IAAA,EAAM,uBAAA;AAAA,MACN,KAAA,EAAO,KAAK,iBAAA,CAAkB,KAAA;AAAA,MAC9B,YAAA,EAAc,0BAA0B,IAAI;AAAA,KAC7C,CAAA;AAED,IAAA,MAAM,MAAA,GAAS,8BAAA,CAA+B,oBAAA,EAAsB,gBAAA,EAAkB,WAAW,CAAA;AAGjG,IAAA,MAAM,cAAiC,EAAC;AACxC,IAAA,KAAA,MAAW,IAAA,IAAQ,gBAAA,CAAiB,MAAA,EAAO,EAAG;AAC5C,MAAA,WAAA,CAAY,IAAA,CAAK,GAAG,IAAI,CAAA;AAAA,IAC1B;AAGA,IAAA,KAAA,MAAW,OAAO,WAAA,EAAa;AAC7B,MAAA,IAAA,CAAK,kBAAA,CAAmB,GAAA,CAAI,GAAA,CAAI,EAAE,CAAA;AAAA,IACpC;AAEA,IAAA,MAAM,MAAA,GAAS,MAAM,IAAA,CAAK,cAAA;AAAA,MACxB,MACE,KAAA,CAAM,QAAA,CAAS,MAAA,EAAQ;AAAA,QACrB,aAAA,EAAe;AAAA,UACb,GAAG,KAAK,iBAAA,CAAkB;AAAA,SAC5B;AAAA,QACA,eAAA,EAAiB,KAAK,iBAAA,CAAkB,eAAA;AAAA,QACxC;AAAA,OACD,CAAA;AAAA,MACH;AAAA,KACF;AAEA,IAAA,MAAM,MAAA,GAAS,8BAAA,CAA+B,MAAA,CAAO,IAAI,CAAA;AAGzD,IAAA,MAAM,OAAA,uBAAc,GAAA,EAOlB;AAEF,IAAA,KAAA,MAAW,CAAC,QAAA,EAAU,YAAY,CAAA,IAAK,OAAO,OAAA,EAAS;AACrD,MAAA,OAAA,CAAQ,IAAI,QAAA,EAAU;AAAA,QACpB,cAAc,YAAA,CAAa,YAAA;AAAA,QAC3B,aAAa,YAAA,CAAa,WAAA;AAAA,QAC1B,uBAAuB,YAAA,CAAa;AAAA,OACrC,CAAA;AAAA,IACH;AAGA,IAAA,KAAA,MAAW,YAAY,WAAA,EAAa;AAClC,MAAA,IAAI,CAAC,OAAA,CAAQ,GAAA,CAAI,QAAQ,CAAA,EAAG;AAE1B,QAAA,OAAA,CAAQ,GAAA,CAAI,QAAA,EAAU,EAAE,YAAA,EAAc,IAAI,CAAA;AAAA,MAC5C;AAAA,IACF;AAGA,IAAA,MAAM,KAAA,GAAQ,MAAA,CAAO,UAAA,IAAc,MAAA,CAAO,KAAA;AAE1C,IAAA,OAAO;AAAA,MACL,OAAA;AAAA,MACA,OAAO,KAAA,GACH;AAAA,QACE,aAAa,KAAA,CAAM,WAAA;AAAA,QACnB,cAAc,KAAA,CAAM,YAAA;AAAA,QACpB,aAAa,KAAA,CAAM;AAAA,OACrB,GACA;AAAA,KACN;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,aAAA,CACZ,YAAA,EACA,YAAA,EACA,aAAA,EAOA,4BACA,WAAA,EAKC;AACD,IAAA,MAAM,KAAA,GAAQ,KAAK,iBAAA,EAAkB;AAErC,IAAA,MAAM,cAAA,GAAiB,IAAA,CAAK,YAAA,CAAa,iBAAA,CAAkB,YAAY,CAAA;AAGvE,IAAA,MAAM,kBAAkB,0BAAA,IAA8B,IAAA,CAAK,eAAA,CAAgB,IAAA,CAAK,iBAAiB,iBAAiB,CAAA;AAGlH,IAAA,IAAI,aAAa,EAAE,WAAA,EAAa,GAAG,YAAA,EAAc,CAAA,EAAG,aAAa,CAAA,EAAE;AAGnE,IAAA,IAAI,MAAA,GAAS,oBAAA,CAAqB,YAAA,EAAc,YAAA,EAAc,KAAK,CAAA;AACnE,IAAA,IAAI,MAAA,GAAS,MAAM,IAAA,CAAK,cAAA;AAAA,MACtB,MACE,KAAA,CAAM,QAAA,CAAS,MAAA,EAAQ;AAAA,QACrB,aAAA,EAAe;AAAA,UACb,GAAG,KAAK,gBAAA,CAAiB;AAAA,SAC3B;AAAA,QACA,eAAA,EAAiB,KAAK,gBAAA,CAAiB,eAAA;AAAA,QACvC;AAAA,OACD,CAAA;AAAA,MACH;AAAA,KACF;AAGA,IAAA,MAAM,UAAA,GAAa,MAAA,CAAO,UAAA,IAAc,MAAA,CAAO,KAAA;AAC/C,IAAA,IAAI,UAAA,EAAY;AACd,MAAA,UAAA,CAAW,WAAA,IAAe,WAAW,WAAA,IAAe,CAAA;AACpD,MAAA,UAAA,CAAW,YAAA,IAAgB,WAAW,YAAA,IAAgB,CAAA;AACtD,MAAA,UAAA,CAAW,WAAA,IAAe,WAAW,WAAA,IAAe,CAAA;AAAA,IACtD;AAEA,IAAA,IAAI,MAAA,GAAS,oBAAA,CAAqB,MAAA,CAAO,IAAI,CAAA;AAC7C,IAAA,IAAI,eAAA,GAAkB,IAAA,CAAK,YAAA,CAAa,iBAAA,CAAkB,OAAO,YAAY,CAAA;AAG7E,IAAA,IAAI,CAAC,mBAAA,CAAoB,eAAA,EAAiB,eAAe,CAAA,EAAG;AAE1D,MAAA,IAAI,eAAe,MAAA,EAAQ;AACzB,QAAA,MAAM,YAAA,GAAe,KAAK,6BAAA,CAA8B;AAAA,UACtD,SAAS,aAAA,CAAc,OAAA;AAAA,UACvB,aAAA,EAAe,YAAA;AAAA,UACf,WAAW,aAAA,CAAc,SAAA;AAAA,UACzB,eAAA,EAAiB,cAAA;AAAA,UACjB,OAAO,CAAA,kCAAA,EAAqC,cAAc,CAAA,QAAA,EAAM,eAAe,aAAa,eAAe,CAAA,qCAAA,CAAA;AAAA,UAC3G,UAAU,aAAA,CAAc,QAAA;AAAA,UACxB,UAAU,aAAA,CAAc;AAAA,SACzB,CAAA;AACD,QAAA,MAAM,cAAc,MAAA,CAAO,MAAA,CAAO,YAAY,CAAA,CAAE,MAAM,MAAM;AAAA,QAAC,CAAC,CAAA;AAG9D,QAAA,MAAM,YAAA,GAAe,OAAO,UAAA,EAAW;AACvC,QAAA,aAAA,CAAc,OAAA,GAAU,YAAA;AAExB,QAAA,MAAM,WAAA,GAAc,KAAK,4BAAA,CAA6B;AAAA,UACpD,OAAA,EAAS,YAAA;AAAA,UACT,aAAA,EAAe,YAAA;AAAA,UACf,eAAA,EAAiB,cAAA;AAAA,UACjB,UAAU,aAAA,CAAc,QAAA;AAAA,UACxB,UAAU,aAAA,CAAc,QAAA;AAAA,UACxB,SAAA,EAAW,CAAC,aAAA,CAAc,QAAQ;AAAA,SACnC,CAAA;AAED,QAAA,aAAA,CAAc,SAAA,GAAY,YAAY,IAAA,CAAK,SAAA;AAC3C,QAAA,MAAM,cAAc,MAAA,CAAO,MAAA,CAAO,WAAW,CAAA,CAAE,MAAM,MAAM;AAAA,QAAC,CAAC,CAAA;AAAA,MAC/D;AAGA,MAAA,MAAA,GAAS,oBAAA,CAAqB,YAAA,EAAc,YAAA,EAAc,IAAI,CAAA;AAC9D,MAAA,MAAA,GAAS,MAAM,IAAA,CAAK,cAAA;AAAA,QAClB,MACE,KAAA,CAAM,QAAA,CAAS,MAAA,EAAQ;AAAA,UACrB,aAAA,EAAe;AAAA,YACb,GAAG,KAAK,gBAAA,CAAiB;AAAA,WAC3B;AAAA,UACA,eAAA,EAAiB,KAAK,gBAAA,CAAiB,eAAA;AAAA,UACvC;AAAA,SACD,CAAA;AAAA,QACH;AAAA,OACF;AAGA,MAAA,MAAM,UAAA,GAAa,MAAA,CAAO,UAAA,IAAc,MAAA,CAAO,KAAA;AAC/C,MAAA,IAAI,UAAA,EAAY;AACd,QAAA,UAAA,CAAW,WAAA,IAAe,WAAW,WAAA,IAAe,CAAA;AACpD,QAAA,UAAA,CAAW,YAAA,IAAgB,WAAW,YAAA,IAAgB,CAAA;AACtD,QAAA,UAAA,CAAW,WAAA,IAAe,WAAW,WAAA,IAAe,CAAA;AAAA,MACtD;AAEA,MAAA,MAAA,GAAS,oBAAA,CAAqB,OAAO,IAAI,CAAA;AACzC,MAAA,eAAA,GAAkB,IAAA,CAAK,YAAA,CAAa,iBAAA,CAAkB,MAAA,CAAO,YAAY,CAAA;AAAA,IAC3E;AAEA,IAAA,OAAO;AAAA,MACL,cAAc,MAAA,CAAO,YAAA;AAAA,MACrB,uBAAuB,MAAA,CAAO,qBAAA;AAAA,MAC9B,KAAA,EAAO,UAAA,CAAW,WAAA,GAAc,CAAA,GAAI,UAAA,GAAa;AAAA,KACnD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeQ,4BAAA,CACN,YAAA,EACA,WAAA,EACA,iBAAA,EACA,yBACA,WAAA,EACQ;AAER,IAAA,IAAI,SAAA,GAAY,+BAA+B,YAAY,CAAA;AAG3D,IAAA,IAAI,WAAA,EAAa;AACf,MAAA,SAAA,GAAY,6BAAA,CAA8B,WAAW,WAAW,CAAA;AAAA,IAClE;AAEA,IAAA,IAAI,OAAA,GAAU;AAAA;;AAAA;AAAA,EAIhB,SAAS;AAAA;;AAAA;;AAAA;;AAAA,saAAA,CAAA;AAUP,IAAA,IAAI,uBAAA,EAAyB;AAC3B,MAAA,OAAA,IAAW;;AAAA;AAAA;AAAA,EAAyM,uBAAuB;AAAA,6BAAA,CAAA;AAAA,IAC7O;AAGA,IAAA,IAAI,WAAA,EAAa;AACf,MAAA,OAAA,IAAW;;AAAA;AAAA,EAGf,WAAW;AAAA,eAAA,CAAA;AAAA,IAET;AAEA,IAAA,IAAI,iBAAA,EAAmB;AACrB,MAAA,OAAA,IAAW;;AAAA;AAAA,EAGf,iBAAiB;AAAA;AAAA,CAAA;AAAA,IAGf;AAEA,IAAA,OAAO,OAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAA,CACN,gBACA,WAAA,EACkD;AAElD,IAAA,MAAM,aAAA,GAAgB,cAAA,EAAgB,GAAA,CAAI,cAAc,CAAA;AAIxD,IAAA,IAAI,aAAA,EAAe,QAAQ,EAAA,EAAI;AAC7B,MAAA,OAAO;AAAA,QACL,QAAA,EAAU,cAAc,MAAA,CAAO,EAAA;AAAA,QAC/B,YAAY,aAAA,CAAc;AAAA,OAC5B;AAAA,IACF;AAGA,IAAA,MAAM,UAAA,GAAa,YAAY,SAAA,EAAU;AACzC,IAAA,IAAI,UAAA,CAAW,YAAY,QAAA,EAAU;AACnC,MAAA,OAAO;AAAA,QACL,QAAA,EAAU,WAAW,UAAA,CAAW,QAAA;AAAA,QAChC,UAAA,EAAY,WAAW,UAAA,CAAW;AAAA,OACpC;AAAA,IACF;AAEA,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,iBAAiB,IAAA,EAAsE;AAC3F,IAAA,MAAM,EAAE,aAAa,cAAA,EAAgB,UAAA,EAAY,OAAO,MAAA,EAAQ,MAAA,EAAQ,WAAA,EAAa,KAAA,EAAM,GAAI,IAAA;AAE/F,IAAA,MAAM,KAAA,GAAQ,UAAW,EAAC;AAE1B,IAAA,MAAM,OAAA,GAAU,IAAA,CAAK,gBAAA,CAAiB,cAAA,EAAgB,WAAW,CAAA;AACjE,IAAA,IAAI,CAAC,OAAA,EAAS;AACZ,MAAA,OAAO,WAAA;AAAA,IACT;AAEA,IAAA,MAAM,EAAE,QAAA,EAAU,UAAA,EAAW,GAAI,OAAA;AAGjC,IAAA,MAAM,aAAA,GAAgB,0BAA0B,cAAc,CAAA;AAC9D,IAAA,MAAM,QAAA,GAAW,eAAe,YAAA,EAAc,QAAA;AAG9C,IAAA,IAAI,MAAA,GAAS,MAAM,IAAA,CAAK,iBAAA,CAAkB,UAAU,UAAU,CAAA;AAM9D,IAAA,IAAI,CAAC,MAAM,gBAAA,EAAkB;AAC3B,MAAA,KAAA,CAAM,gBAAA,GAAmB,IAAA;AAGzB,MAAA,MAAM,iBAAiB,MAAA,CAAO,cAAA;AAE9B,MAAA,IAAI,IAAA,CAAK,KAAA,KAAU,UAAA,IAAc,UAAA,EAAY;AAI3C,QAAA,MAAM,wBAAwB,MAAM,IAAA,CAAK,sBAAA,CAAuB,QAAA,EAAU,QAAW,cAAc,CAAA;AAGnG,QAAA,KAAA,MAAW,OAAO,qBAAA,EAAuB;AACvC,UAAA,IAAI,GAAA,CAAI,SAAS,QAAA,EAAU;AACzB,YAAA,IAAI,CAAC,KAAK,kBAAA,CAAmB,GAAG,KAAK,IAAA,CAAK,oCAAA,CAAqC,GAAG,CAAA,KAAM,EAAA,EAAI;AAC1F,cAAA;AAAA,YACF;AACA,YAAA,WAAA,CAAY,GAAA,CAAI,KAAK,QAAQ,CAAA;AAAA,UAC/B;AAAA,QACF;AAAA,MACF,CAAA,MAAO;AAEL,QAAA,MAAM,qBAAqB,MAAM,IAAA,CAAK,sBAAA,CAAuB,QAAA,EAAU,YAAY,cAAc,CAAA;AAEjG,QAAA,IAAI,kBAAA,CAAmB,SAAS,CAAA,EAAG;AAEjC,UAAA,KAAA,MAAW,OAAO,kBAAA,EAAoB;AACpC,YAAA,IAAI,GAAA,CAAI,SAAS,QAAA,EAAU;AACzB,cAAA,IAAI,CAAC,KAAK,kBAAA,CAAmB,GAAG,KAAK,IAAA,CAAK,oCAAA,CAAqC,GAAG,CAAA,KAAM,EAAA,EAAI;AAC1F,gBAAA;AAAA,cACF;AACA,cAAA,WAAA,CAAY,GAAA,CAAI,KAAK,QAAQ,CAAA;AAAA,YAC/B;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAOA,IAAA,IAAI,uBAAA;AACJ,IAAA,IAAI,IAAA,CAAK,KAAA,KAAU,UAAA,IAAc,UAAA,EAAY;AAC3C,MAAA,uBAAA,GAA0B,MAAM,IAAA,CAAK,uBAAA,CAAwB,UAAA,EAAY,QAAQ,CAAA;AAAA,IACnF;AAMA,IAAA,IAAI,CAAC,QAAA,EAAU;AACb,MAAA,MAAM,WAAA,GAAc,WAAA,CAAY,GAAA,CAAI,GAAA,CAAI,EAAA,EAAG;AAC3C,MAAA,MAAM,kBAAA,GAAqB,IAAA,CAAK,qBAAA,CAAsB,WAAA,EAAa,MAAM,CAAA;AACzE,MAAA,MAAM,oBAAA,GAAuB,IAAA,CAAK,YAAA,CAAa,aAAA,CAAc,kBAAkB,CAAA;AAI/E,MAAA,MAAM,oBAAoB,uBAAA,GAA0B,IAAA,CAAK,YAAA,CAAa,WAAA,CAAY,uBAAuB,CAAA,GAAI,CAAA;AAC7G,MAAA,MAAM,wBAAA,GAA2B,OAAO,qBAAA,IAAyB,CAAA;AACjE,MAAA,MAAM,aAAA,GAAgB,OAAO,oBAAA,IAAwB,CAAA;AACrD,MAAA,MAAM,kBAAA,GAAqB,gBAAgB,oBAAA,GAAuB,iBAAA;AAElE,MAAA,MAAM,YAAY,IAAA,CAAK,yBAAA,CAA0B,IAAA,CAAK,iBAAA,CAAkB,eAAe,wBAAwB,CAAA;AAI/G,MAAA,MAAM,uBAAA,GAA0B,IAAA,CAAK,eAAA,CAAgB,IAAA,CAAK,iBAAiB,iBAAiB,CAAA;AAC5F,MAAA,MAAM,cAAA,GAAiB,OAAO,IAAA,CAAK,iBAAA,CAAkB,aAAA,KAAkB,QAAA;AACvE,MAAA,MAAM,WAAA,GAAc,cAAA,GACf,IAAA,CAAK,iBAAA,CAAkB,cAA+C,GAAA,GACvE,CAAA;AACJ,MAAA,MAAM,sCAAsC,cAAA,GACxC,IAAA,CAAK,IAAI,WAAA,GAAc,SAAA,EAAW,GAAI,CAAA,GACtC,uBAAA;AACJ,MAAA,MAAM,2BAA2B,IAAA,CAAK,KAAA;AAAA,QACnC,2BAA2B,mCAAA,GAAuC;AAAA,OACrE;AAGA,MAAA,IAAA,CAAK,cAAA,CAAe;AAAA,QAClB,IAAA,EAAM,eAAA;AAAA,QACN,SAAA,sBAAe,IAAA,EAAK;AAAA,QACpB,QAAA;AAAA,QACA,YAAY,UAAA,IAAc,EAAA;AAAA,QAC1B,UAAA;AAAA,QACA,YAAA,EAAc,SAAA;AAAA,QACd,aAAA,EAAe,kBAAA;AAAA,QACf,SAAA;AAAA,QACA,gBAAA,EAAkB,IAAA,CAAK,KAAA,CAAO,kBAAA,GAAqB,YAAa,GAAG,CAAA;AAAA,QACnE,UAAU,kBAAA,IAAsB,SAAA;AAAA,QAChC,aAAa,kBAAA,IAAsB;AAAA,OACpC,CAAA;AAGD,MAAA,IAAI,MAAA,EAAQ;AACV,QAAA,MAAM,YAAA,GAAmC;AAAA,UACvC,IAAA,EAAM,kBAAA;AAAA,UACN,IAAA,EAAM;AAAA,YACJ,aAAA,EAAe,kBAAA;AAAA,YACf,aAAA,EAAe,SAAA;AAAA,YACf,oBAAA,EAAsB,IAAA,CAAK,KAAA,CAAO,kBAAA,GAAqB,YAAa,GAAG,CAAA;AAAA,YACvE,iBAAA,EAAmB,wBAAA;AAAA,YACnB,0BAAA,EAA4B,mCAAA;AAAA,YAC5B,wBAAA;AAAA,YACA,aAAa,kBAAA,IAAsB,SAAA;AAAA,YACnC,UAAU,MAAA,CAAO,EAAA;AAAA,YACjB,QAAA;AAAA,YACA;AAAA;AACF,SACF;AACA,QAAA,MAAM,MAAA,CAAO,MAAA,CAAO,YAAY,CAAA,CAAE,MAAM,MAAM;AAAA,QAE9C,CAAC,CAAA;AAAA,MACH;AAIA,MAAA,MAAM,SAAA,GAA0B,KAAA,CAAM,SAAA,oBAA6B,IAAI,GAAA,EAAY;AAEnF,MAAA,IAAI,UAAA,GAAa,CAAA,IAAK,kBAAA,IAAsB,SAAA,EAAW;AACrD,QAAA,MAAM,OAAA,GAAU,IAAA,CAAK,UAAA,CAAW,QAAA,EAAU,UAAU,CAAA;AACpD,QAAA,IAAI,oBAAA,GAAuB,KAAA;AAC3B,QAAA,MAAM,IAAA,CAAK,QAAA,CAAS,OAAA,EAAS,YAAY;AACvC,UAAA,MAAM,WAAA,GAAc,MAAM,IAAA,CAAK,iBAAA,CAAkB,UAAU,UAAU,CAAA;AACrE,UAAA,MAAM,gBAAA,GAAmB,WAAA,CAAY,GAAA,CAAI,GAAA,CAAI,EAAA,EAAG;AAChD,UAAA,MAAM,uBAAA,GAA0B,IAAA,CAAK,qBAAA,CAAsB,gBAAA,EAAkB,WAAW,CAAA;AAMxF,UAAA,MAAM,kBAAA,GAAqB,IAAA,CAAK,YAAA,CAAa,aAAA,CAAc,uBAAuB,CAAA;AAClF,UAAA,MAAM,YAAA,GAAe,YAAY,oBAAA,IAAwB,CAAA;AACzD,UAAA,IAAI,sBAAA,GAAyB,CAAA;AAC7B,UAAA,IAAI,IAAA,CAAK,KAAA,KAAU,UAAA,IAAc,UAAA,EAAY;AAC3C,YAAA,MAAM,iBAAA,GAAoB,MAAM,IAAA,CAAK,uBAAA,CAAwB,YAAY,QAAQ,CAAA;AACjF,YAAA,sBAAA,GAAyB,iBAAA,GAAoB,IAAA,CAAK,YAAA,CAAa,WAAA,CAAY,iBAAiB,CAAA,GAAI,CAAA;AAAA,UAClG;AACA,UAAA,MAAM,UAAA,GAAa,eAAe,kBAAA,GAAqB,sBAAA;AACvD,UAAA,IAAI,aAAa,SAAA,EAAW;AAC1B,YAAA;AAAA,UACF;AAKA,UAAA,MAAM,kBAAA,GAAqB,WAAA,CAAY,cAAA,EAAgB,OAAA,EAAQ,IAAK,CAAA;AAEpE,UAAA,IAAI,uBAAA,CAAwB,SAAS,CAAA,EAAG;AACtC,YAAA,IAAI;AACF,cAAA,IAAI,IAAA,CAAK,KAAA,KAAU,UAAA,IAAc,UAAA,EAAY;AAC3C,gBAAA,MAAM,IAAA,CAAK,2BAAA;AAAA,kBACT,WAAA;AAAA,kBACA,QAAA;AAAA,kBACA,UAAA;AAAA,kBACA,uBAAA;AAAA,kBACA,MAAA;AAAA,kBACA;AAAA,iBACF;AAAA,cACF,CAAA,MAAO;AACL,gBAAA,MAAM,IAAA,CAAK,wBAAA;AAAA,kBACT,WAAA;AAAA,kBACA,QAAA;AAAA,kBACA,uBAAA;AAAA,kBACA,MAAA;AAAA,kBACA;AAAA,iBACF;AAAA,cACF;AAEA,cAAA,MAAM,aAAA,GAAgB,MAAM,IAAA,CAAK,iBAAA,CAAkB,UAAU,UAAU,CAAA;AACvE,cAAA,MAAM,WAAA,GAAc,aAAA,CAAc,cAAA,EAAgB,OAAA,EAAQ,IAAK,CAAA;AAC/D,cAAA,oBAAA,GAAuB,WAAA,GAAc,kBAAA;AAAA,YACvC,SAAS,KAAA,EAAO;AAEd,cAAA,IAAI,aAAa,OAAA,EAAS;AACxB,gBAAA,KAAA,CAAM,6BAA6B,CAAA;AAAA,cACrC,CAAA,MAAO;AACL,gBAAA,KAAA;AAAA,kBACE,CAAA,4CAAA,EAA+C,KAAA,YAAiB,KAAA,GAAQ,KAAA,CAAM,OAAA,GAAU,KAAK,SAAA,CAAU,KAAA,EAAO,IAAA,EAAM,CAAC,CAAC,CAAA;AAAA,iBACxH;AAAA,cACF;AAEA,cAAA,oBAAA,GAAuB,KAAA;AAAA,YACzB;AAAA,UACF;AAAA,QACF,CAAC,CAAA;AAQD,QAAA,IAAI,oBAAA,EAAsB;AACxB,UAAA,MAAM,OAAA,GAAU,WAAA,CAAY,GAAA,CAAI,GAAA,CAAI,EAAA,EAAG;AACvC,UAAA,IAAI,SAAA,GAAY,EAAA;AAChB,UAAA,IAAI,SAAA,GAAoC,IAAA;AAGxC,UAAA,KAAA,IAAS,IAAI,OAAA,CAAQ,MAAA,GAAS,CAAA,EAAG,CAAA,IAAK,GAAG,CAAA,EAAA,EAAK;AAC5C,YAAA,MAAM,GAAA,GAAM,QAAQ,CAAC,CAAA;AACrB,YAAA,IAAI,CAAC,GAAA,EAAK;AACV,YAAA,IAAI,IAAA,CAAK,oCAAA,CAAqC,GAAG,CAAA,KAAM,EAAA,EAAI;AACzD,cAAA,SAAA,GAAY,CAAA;AACZ,cAAA,SAAA,GAAY,GAAA;AACZ,cAAA;AAAA,YACF;AAAA,UACF;AAEA,UAAA,IAAI,SAAA,IAAa,cAAc,EAAA,EAAI;AAEjC,YAAA,MAAM,cAAwB,EAAC;AAC/B,YAAA,MAAM,iBAAoC,EAAC;AAE3C,YAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,cAAA,MAAM,GAAA,GAAM,QAAQ,CAAC,CAAA;AACrB,cAAA,IAAI,GAAA,EAAK,EAAA,IAAM,GAAA,CAAI,EAAA,KAAO,iBAAA,EAAmB;AAC3C,gBAAA,WAAA,CAAY,IAAA,CAAK,IAAI,EAAE,CAAA;AACvB,gBAAA,cAAA,CAAe,KAAK,GAAG,CAAA;AAAA,cACzB;AAAA,YACF;AAGA,YAAA,cAAA,CAAe,KAAK,SAAS,CAAA;AAG7B,YAAA,MAAM,eAAA,GAAkB,IAAA,CAAK,kBAAA,CAAmB,SAAS,CAAA;AACzD,YAAA,IAAI,eAAA,CAAgB,WAAW,CAAA,EAAG;AAEhC,cAAA,IAAI,UAAU,EAAA,EAAI;AAChB,gBAAA,WAAA,CAAY,IAAA,CAAK,UAAU,EAAE,CAAA;AAAA,cAC/B;AAAA,YACF,WAAW,eAAA,CAAgB,MAAA,IAAU,UAAU,OAAA,EAAS,KAAA,EAAO,UAAU,CAAA,CAAA,EAAI;AAE3E,cAAA,SAAA,CAAU,QAAQ,KAAA,GAAQ,eAAA;AAAA,YAC5B;AAGA,YAAA,IAAI,cAAA,CAAe,SAAS,CAAA,EAAG;AAC7B,cAAA,MAAM,KAAK,gCAAA,CAAiC,cAAA,EAAgB,SAAA,EAAW,QAAA,EAAU,YAAY,KAAK,CAAA;AAAA,YACpG;AAGA,YAAA,IAAI,WAAA,CAAY,SAAS,CAAA,EAAG;AAC1B,cAAA,WAAA,CAAY,YAAY,WAAW,CAAA;AAAA,YACrC;AAAA,UACF,CAAA,MAAO;AAEL,YAAA,MAAM,QAAA,GAAW,WAAA,CAAY,KAAA,CAAM,KAAA,CAAM,EAAA,EAAG;AAC5C,YAAA,MAAM,SAAA,GAAY,WAAA,CAAY,KAAA,CAAM,QAAA,CAAS,EAAA,EAAG;AAChD,YAAA,MAAM,cAAA,GAAiB,CAAC,GAAG,QAAA,EAAU,GAAG,SAAS,CAAA;AACjD,YAAA,IAAI,cAAA,CAAe,SAAS,CAAA,EAAG;AAC7B,cAAA,MAAM,KAAK,gCAAA,CAAiC,cAAA,EAAgB,SAAA,EAAW,QAAA,EAAU,YAAY,KAAK,CAAA;AAAA,YACpG;AAAA,UACF;AAIA,UAAA,WAAA,CAAY,KAAA,CAAM,MAAM,EAAA,EAAG;AAC3B,UAAA,WAAA,CAAY,KAAA,CAAM,SAAS,EAAA,EAAG;AAAA,QAChC;AAGA,QAAA,MAAA,GAAS,MAAM,IAAA,CAAK,iBAAA,CAAkB,QAAA,EAAU,UAAU,CAAA;AAAA,MAC5D,CAAA,MAAA,IAAW,aAAa,CAAA,EAAG;AAWzB,QAAA,MAAM,QAAA,GAAW,WAAA,CAAY,KAAA,CAAM,KAAA,CAAM,EAAA,EAAG;AAC5C,QAAA,MAAM,SAAA,GAAY,WAAA,CAAY,KAAA,CAAM,QAAA,CAAS,EAAA,EAAG;AAChD,QAAA,MAAM,cAAA,GAAiB,CAAC,GAAG,QAAA,EAAU,GAAG,SAAS,CAAA;AAEjD,QAAA,IAAI,cAAA,CAAe,SAAS,CAAA,EAAG;AAC7B,UAAA,MAAM,KAAK,gCAAA,CAAiC,cAAA,EAAgB,SAAA,EAAW,QAAA,EAAU,YAAY,KAAK,CAAA;AAGlG,UAAA,KAAA,MAAW,OAAO,cAAA,EAAgB;AAChC,YAAA,WAAA,CAAY,GAAA,CAAI,KAAK,QAAQ,CAAA;AAAA,UAC/B;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAKA,IAAA,MAAM,SAAS,MAAM,IAAA,CAAK,QAAQ,aAAA,CAAc,EAAE,UAAU,CAAA;AAC5D,IAAA,MAAM,gBAAA,GAAmB,mBAAA,CAAoB,MAAA,EAAQ,QAAQ,CAAA;AAC7D,IAAA,MAAM,cAAc,gBAAA,EAAkB,WAAA;AACtC,IAAA,MAAM,oBAAoB,gBAAA,EAAkB,iBAAA;AAC5C,IAAA,MAAM,cAAe,cAAA,EAAgB,GAAA,CAAI,aAAa,CAAA,wBAA8B,IAAA,EAAK;AAEzF,IAAA,IAAI,OAAO,kBAAA,EAAoB;AAC7B,MAAA,MAAM,2BAA2B,IAAA,CAAK,4BAAA;AAAA,QACpC,MAAA,CAAO,kBAAA;AAAA,QACP,WAAA;AAAA,QACA,iBAAA;AAAA,QACA,uBAAA;AAAA,QACA;AAAA,OACF;AAGA,MAAA,WAAA,CAAY,oBAAoB,sBAAsB,CAAA;AACtD,MAAA,WAAA,CAAY,SAAA,CAAU,0BAA0B,sBAAsB,CAAA;AAGtE,MAAA,MAAM,mBAAA,GAAuC;AAAA,QAC3C,EAAA,EAAI,CAAA,eAAA,CAAA;AAAA,QACJ,IAAA,EAAM,MAAA;AAAA,QACN,SAAA,kBAAW,IAAI,IAAA,CAAK,CAAC,CAAA;AAAA,QACrB,OAAA,EAAS;AAAA,UACP,MAAA,EAAQ,CAAA;AAAA,UACR,KAAA,EAAO;AAAA,YACL;AAAA,cACE,IAAA,EAAM,MAAA;AAAA,cACN,IAAA,EAAM,CAAA;;AAAA;;AAAA;AAAA,kBAAA;AAAA;AAMR;AACF,SACF;AAAA,QACA,QAAA;AAAA,QACA;AAAA,OACF;AACA,MAAA,WAAA,CAAY,GAAA,CAAI,qBAAqB,QAAQ,CAAA;AAAA,IAC/C;AASA,IAAA,IAAI,eAAe,CAAA,EAAG;AACpB,MAAA,MAAM,WAAA,GAAc,WAAA,CAAY,GAAA,CAAI,GAAA,CAAI,EAAA,EAAG;AAG3C,MAAA,IAAI,kBAAA,GAAqB,EAAA;AACzB,MAAA,IAAI,aAAA,GAAwC,IAAA;AAE5C,MAAA,KAAA,IAAS,IAAI,WAAA,CAAY,MAAA,GAAS,CAAA,EAAG,CAAA,IAAK,GAAG,CAAA,EAAA,EAAK;AAChD,QAAA,MAAM,GAAA,GAAM,YAAY,CAAC,CAAA;AACzB,QAAA,IAAI,CAAC,GAAA,EAAK;AACV,QAAA,IAAI,IAAA,CAAK,oCAAA,CAAqC,GAAG,CAAA,KAAM,EAAA,EAAI;AACzD,UAAA,kBAAA,GAAqB,CAAA;AACrB,UAAA,aAAA,GAAgB,GAAA;AAChB,UAAA;AAAA,QACF;AAAA,MACF;AAEA,MAAA,IAAI,aAAA,IAAiB,uBAAuB,EAAA,EAAI;AAC9C,QAAA,MAAM,mBAA6B,EAAC;AACpC,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,kBAAA,EAAoB,CAAA,EAAA,EAAK;AAC3C,UAAA,MAAM,GAAA,GAAM,YAAY,CAAC,CAAA;AACzB,UAAA,IAAI,GAAA,EAAK,EAAA,IAAM,GAAA,CAAI,EAAA,KAAO,iBAAA,EAAmB;AAC3C,YAAA,gBAAA,CAAiB,IAAA,CAAK,IAAI,EAAE,CAAA;AAAA,UAC9B;AAAA,QACF;AAEA,QAAA,IAAI,gBAAA,CAAiB,SAAS,CAAA,EAAG;AAC/B,UAAA,WAAA,CAAY,YAAY,gBAAgB,CAAA;AAAA,QAC1C;AAGA,QAAA,MAAM,eAAA,GAAkB,IAAA,CAAK,kBAAA,CAAmB,aAAa,CAAA;AAC7D,QAAA,IAAI,eAAA,CAAgB,WAAW,CAAA,EAAG;AAChC,UAAA,IAAI,cAAc,EAAA,EAAI;AACpB,YAAA,WAAA,CAAY,WAAA,CAAY,CAAC,aAAA,CAAc,EAAE,CAAC,CAAA;AAAA,UAC5C;AAAA,QACF,WAAW,eAAA,CAAgB,MAAA,IAAU,cAAc,OAAA,EAAS,KAAA,EAAO,UAAU,CAAA,CAAA,EAAI;AAC/E,UAAA,aAAA,CAAc,QAAQ,KAAA,GAAQ,eAAA;AAAA,QAChC;AAAA,MACF;AAAA,IACF;AAEA,IAAA,OAAO,WAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,oBAAoB,IAAA,EAAyE;AACjG,IAAA,MAAM,EAAE,WAAA,EAAa,cAAA,EAAgB,KAAA,EAAO,QAAO,GAAI,IAAA;AAEvD,IAAA,MAAM,KAAA,GAAQ,UAAW,EAAC;AAE1B,IAAA,MAAM,OAAA,GAAU,IAAA,CAAK,gBAAA,CAAiB,cAAA,EAAgB,WAAW,CAAA;AACjE,IAAA,IAAI,CAAC,OAAA,EAAS;AACZ,MAAA,OAAO,WAAA;AAAA,IACT;AAEA,IAAA,MAAM,EAAE,QAAA,EAAU,UAAA,EAAW,GAAI,OAAA;AAGjC,IAAA,MAAM,aAAA,GAAgB,0BAA0B,cAAc,CAAA;AAC9D,IAAA,MAAM,QAAA,GAAW,eAAe,YAAA,EAAc,QAAA;AAC9C,IAAA,IAAI,QAAA,EAAU;AACZ,MAAA,OAAO,WAAA;AAAA,IACT;AAIA,IAAA,MAAM,QAAA,GAAW,WAAA,CAAY,GAAA,CAAI,KAAA,CAAM,EAAA,EAAG;AAC1C,IAAA,MAAM,SAAA,GAAY,WAAA,CAAY,GAAA,CAAI,QAAA,CAAS,EAAA,EAAG;AAC9C,IAAA,MAAM,cAAA,GAAiB,CAAC,GAAG,QAAA,EAAU,GAAG,SAAS,CAAA;AAEjD,IAAA,IAAI,cAAA,CAAe,WAAW,CAAA,EAAG;AAC/B,MAAA,OAAO,WAAA;AAAA,IACT;AAEA,IAAA,MAAM,SAAA,GAA0B,KAAA,CAAM,SAAA,oBAA6B,IAAI,GAAA,EAAY;AAEnF,IAAA,MAAM,KAAK,gCAAA,CAAiC,cAAA,EAAgB,SAAA,EAAW,QAAA,EAAU,YAAY,KAAK,CAAA;AAElG,IAAA,OAAO,WAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAc,gCAAA,CACZ,cAAA,EACA,SAAA,EACA,QAAA,EACA,YACA,KAAA,EACe;AAGf,IAAA,KAAA,MAAW,OAAO,cAAA,EAAgB;AAChC,MAAA,IAAI,SAAA,CAAU,GAAA,CAAI,GAAA,CAAI,EAAE,CAAA,EAAG;AACzB,QAAA,GAAA,CAAI,EAAA,GAAK,OAAO,UAAA,EAAW;AAAA,MAC7B;AAAA,IACF;AAEA,IAAA,MAAM,IAAA,CAAK,eAAe,eAAA,CAAgB;AAAA,MACxC,QAAA,EAAU,cAAA;AAAA,MACV,QAAA;AAAA,MACA;AAAA,KACD,CAAA;AAID,IAAA,KAAA,MAAW,OAAO,cAAA,EAAgB;AAChC,MAAA,IAAI,IAAA,CAAK,oCAAA,CAAqC,GAAG,CAAA,KAAM,EAAA,EAAI;AACzD,QAAA,SAAA,CAAU,GAAA,CAAI,IAAI,EAAE,CAAA;AAAA,MACtB;AAAA,IACF;AACA,IAAA,KAAA,CAAM,SAAA,GAAY,SAAA;AAAA,EACpB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAc,sBAAA,CACZ,QAAA,EACA,UAAA,EACA,cAAA,EAC4B;AAG5B,IAAA,MAAM,SAAA,GAAY,iBAAiB,IAAI,IAAA,CAAK,eAAe,OAAA,EAAQ,GAAI,CAAC,CAAA,GAAI,MAAA;AAE5E,IAAA,IAAI,MAAA;AAEJ,IAAA,IAAI,IAAA,CAAK,KAAA,KAAU,UAAA,IAAc,UAAA,EAAY;AAE3C,MAAA,MAAA,GAAS,MAAM,IAAA,CAAK,OAAA,CAAQ,wBAAA,CAAyB;AAAA,QACnD,UAAA;AAAA,QACA,OAAA,EAAS,KAAA;AAAA;AAAA,QACT,OAAA,EAAS,EAAE,KAAA,EAAO,WAAA,EAAa,WAAW,KAAA,EAAM;AAAA,QAChD,QAAQ,SAAA,GACJ;AAAA,UACE,SAAA,EAAW;AAAA,YACT,KAAA,EAAO;AAAA;AACT,SACF,GACA;AAAA,OACL,CAAA;AAAA,IACH,CAAA,MAAO;AAEL,MAAA,MAAA,GAAS,MAAM,IAAA,CAAK,OAAA,CAAQ,YAAA,CAAa;AAAA,QACvC,QAAA;AAAA,QACA,OAAA,EAAS,KAAA;AAAA;AAAA,QACT,OAAA,EAAS,EAAE,KAAA,EAAO,WAAA,EAAa,WAAW,KAAA,EAAM;AAAA,QAChD,QAAQ,SAAA,GACJ;AAAA,UACE,SAAA,EAAW;AAAA,YACT,KAAA,EAAO;AAAA;AACT,SACF,GACA;AAAA,OACL,CAAA;AAAA,IACH;AAEA,IAAA,OAAO,MAAA,CAAO,QAAA;AAAA,EAChB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAc,uBAAA,CAAwB,UAAA,EAAoB,eAAA,EAAsD;AAC9G,IAAA,MAAM,EAAE,OAAA,EAAS,UAAA,EAAW,GAAI,MAAM,IAAA,CAAK,OAAA,CAAQ,WAAA,CAAY,EAAE,MAAA,EAAQ,EAAE,UAAA,IAAc,CAAA;AAEzF,IAAA,MAAM,gBAAA,uBAAuB,GAAA,EAA+B;AAE5D,IAAA,KAAA,MAAW,UAAU,UAAA,EAAY;AAE/B,MAAA,IAAI,MAAA,CAAO,OAAO,eAAA,EAAiB;AAEnC,MAAA,MAAM,UAAA,GAAa,mBAAA,CAAoB,MAAA,CAAO,QAAQ,CAAA;AACtD,MAAA,MAAM,uBAAuB,UAAA,EAAY,cAAA;AACzC,MAAA,MAAM,SAAA,GAAY,oBAAA,GAAuB,IAAI,IAAA,CAAK,IAAI,IAAA,CAAK,oBAAoB,CAAA,CAAE,OAAA,EAAQ,GAAI,CAAC,CAAA,GAAI,MAAA;AAElG,MAAA,MAAM,MAAA,GAAS,MAAM,IAAA,CAAK,OAAA,CAAQ,YAAA,CAAa;AAAA,QAC7C,UAAU,MAAA,CAAO,EAAA;AAAA,QACjB,OAAA,EAAS,KAAA;AAAA,QACT,OAAA,EAAS,EAAE,KAAA,EAAO,WAAA,EAAa,WAAW,KAAA,EAAM;AAAA,QAChD,MAAA,EAAQ,YAAY,EAAE,SAAA,EAAW,EAAE,KAAA,EAAO,SAAA,IAAY,GAAI;AAAA,OAC3D,CAAA;AAGD,MAAA,MAAM,QAAA,GAAW,MAAA,CAAO,QAAA,CAAS,MAAA,CAAO,CAAA,CAAA,KAAK,CAAC,IAAA,CAAK,kBAAA,CAAmB,GAAA,CAAI,CAAA,CAAE,EAAE,CAAC,CAAA;AAE/E,MAAA,IAAI,QAAA,CAAS,SAAS,CAAA,EAAG;AACvB,QAAA,gBAAA,CAAiB,GAAA,CAAI,MAAA,CAAO,EAAA,EAAI,QAAQ,CAAA;AAAA,MAC1C;AAAA,IACF;AAEA,IAAA,IAAI,gBAAA,CAAiB,IAAA,KAAS,CAAA,EAAG,OAAO,MAAA;AAExC,IAAA,MAAM,MAAA,GAAS,MAAM,IAAA,CAAK,6BAAA,CAA8B,kBAAkB,eAAe,CAAA;AACzF,IAAA,OAAO,MAAA,IAAU,MAAA;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAc,6BAAA,CACZ,gBAAA,EACA,eAAA,EACiB;AACjB,IAAA,MAAM,SAAmB,EAAC;AAE1B,IAAA,KAAA,MAAW,CAAC,QAAA,EAAU,QAAQ,CAAA,IAAK,gBAAA,EAAkB;AAEnD,MAAA,IAAI,aAAa,eAAA,EAAiB;AAGlC,MAAA,IAAI,QAAA,CAAS,WAAW,CAAA,EAAG;AAI3B,MAAA,MAAM,oBAAoB,yBAAA,CAA0B,QAAA,EAAU,EAAE,aAAA,EAAe,KAAK,CAAA;AAEpF,MAAA,IAAI,iBAAA,EAAmB;AACrB,QAAA,MAAM,UAAA,GAAa,MAAM,IAAA,CAAK,0BAAA,CAA2B,QAAQ,CAAA;AACjE,QAAA,MAAA,CAAO,IAAA,CAAK,2BAA2B,UAAU,CAAA;AAAA,EACvD,iBAAiB;AAAA,qBAAA,CACG,CAAA;AAAA,MAChB;AAAA,IACF;AAEA,IAAA,OAAO,MAAA,CAAO,KAAK,MAAM,CAAA;AAAA,EAC3B;AAAA,EAEA,MAAc,2BAA2B,QAAA,EAAmC;AAC1E,IAAA,IAAI,KAAK,sBAAA,EAAwB;AAE/B,MAAA,MAAM,MAAA,GAAS,IAAA,CAAK,aAAA,CAAc,GAAA,CAAI,QAAQ,CAAA;AAC9C,MAAA,IAAI,QAAQ,OAAO,MAAA;AAInB,MAAA,MAAM,MAAA,GAAS,MAAM,IAAA,CAAK,MAAA;AAC1B,MAAA,MAAM,MAAA,GAAS,MAAA,CAAO,WAAA,CAAY,QAAQ,CAAA;AAC1C,MAAA,IAAA,CAAK,aAAA,CAAc,GAAA,CAAI,QAAA,EAAU,MAAM,CAAA;AACvC,MAAA,OAAO,MAAA;AAAA,IACT;AACA,IAAA,OAAO,QAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,gBAAgB,YAAA,EAA8B;AAEpD,IAAA,OAAO,YAAA,CAAa,OAAA,CAAQ,4BAAA,EAA8B,EAAE,EAAE,IAAA,EAAK;AAAA,EACrE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,uBAAuB,QAAA,EAAmC;AAChE,IAAA,IAAI,OAAA,GAAU,CAAA;AACd,IAAA,KAAA,MAAW,OAAO,QAAA,EAAU;AAC1B,MAAA,IAAI,IAAI,SAAA,EAAW;AACjB,QAAA,MAAM,UAAU,IAAI,IAAA,CAAK,GAAA,CAAI,SAAS,EAAE,OAAA,EAAQ;AAChD,QAAA,IAAI,UAAU,OAAA,EAAS;AACrB,UAAA,OAAA,GAAU,OAAA;AAAA,QACZ;AAAA,MACF;AAAA,IACF;AAEA,IAAA,OAAO,UAAU,CAAA,GAAI,IAAI,KAAK,OAAO,CAAA,uBAAQ,IAAA,EAAK;AAAA,EACpD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,iBAAA,CAAkB,QAAA,EAAkB,YAAA,EAAuC;AAEvF,IAAA,MAAM,iBAAA,GAAoB,IAAA,CAAK,eAAA,CAAgB,YAAY,CAAA;AAC3D,IAAA,MAAM,UAAA,GAAa,MAAM,IAAA,CAAK,0BAAA,CAA2B,QAAQ,CAAA;AACjE,IAAA,OAAO,eAAe,UAAU,CAAA;AAAA,EAAO,iBAAiB;AAAA,SAAA,CAAA;AAAA,EAC1D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,4BAAA,CACN,oBAAA,EACA,SAAA,EACA,gBAAA,EACQ;AACR,IAAA,IAAI,CAAC,oBAAA,EAAsB;AACzB,MAAA,OAAO,gBAAA;AAAA,IACT;AAGA,IAAA,MAAM,aAAA,GAAgB,gBAAA,CAAiB,KAAA,CAAM,uBAAuB,CAAA;AACpE,IAAA,MAAM,SAAA,GAAY,gBAAA,CAAiB,KAAA,CAAM,kCAAkC,CAAA;AAE3E,IAAA,IAAI,CAAC,aAAA,IAAiB,CAAC,SAAA,EAAW;AAEhC,MAAA,OAAO,GAAG,oBAAoB;;AAAA,EAAO,gBAAgB,CAAA,CAAA;AAAA,IACvD;AAEA,IAAA,MAAM,WAAA,GAAc,cAAc,CAAC,CAAA;AACnC,IAAA,MAAM,OAAA,GAAU,UAAU,CAAC,CAAA;AAG3B,IAAA,MAAM,kBAAkB,IAAI,MAAA;AAAA,MAC1B,eAAe,WAAW,CAAA,eAAA,EAAkB,QAAQ,OAAA,CAAQ,qBAAA,EAAuB,MAAM,CAAC,CAAA,qBAAA;AAAA,KAC5F;AACA,IAAA,MAAM,aAAA,GAAgB,oBAAA,CAAqB,KAAA,CAAM,eAAe,CAAA;AAEhE,IAAA,IAAI,aAAA,EAAe;AAGjB,MAAA,MAAM,WAAA,GAAc,gBAAA,CAAiB,KAAA,CAAM,gEAAgE,CAAA;AAC3G,MAAA,IAAI,WAAA,IAAe,WAAA,CAAY,CAAC,CAAA,EAAG;AACjC,QAAA,MAAM,aAAA,GAAgB,WAAA,CAAY,CAAC,CAAA,CAAE,IAAA,EAAK;AAE1C,QAAA,MAAM,aAAA,GAAgB,oBAAA,CAAqB,OAAA,CAAQ,eAAA,EAAiB,CAAA,KAAA,KAAS;AAE3E,UAAA,MAAM,eAAe,KAAA,CAAM,OAAA,CAAQ,aAAA,EAAe,EAAE,EAAE,OAAA,EAAQ;AAC9D,UAAA,OAAO,GAAG,YAAY;AAAA,EAAK,aAAa;AAAA,SAAA,CAAA;AAAA,QAC1C,CAAC,CAAA;AACD,QAAA,OAAO,aAAA;AAAA,MACT;AAAA,IACF;AAGA,IAAA,OAAO,GAAG,oBAAoB;;AAAA,EAAO,gBAAgB,CAAA,CAAA;AAAA,EACvD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,2BAA2B,gBAAA,EAA4D;AAC7F,IAAA,MAAM,WAAA,GAAc,KAAA,CAAM,IAAA,CAAK,gBAAA,CAAiB,OAAA,EAAS,CAAA,CACtD,GAAA,CAAI,CAAC,CAAC,QAAA,EAAU,QAAQ,CAAA,KAAM;AAE7B,MAAA,MAAM,kBAAkB,IAAA,CAAK,GAAA;AAAA,QAC3B,GAAG,QAAA,CAAS,GAAA,CAAI,CAAA,CAAA,KAAM,EAAE,SAAA,GAAY,IAAI,IAAA,CAAK,CAAA,CAAE,SAAS,CAAA,CAAE,OAAA,EAAQ,GAAI,IAAA,CAAK,KAAM;AAAA,OACnF;AACA,MAAA,OAAO,EAAE,UAAU,eAAA,EAAgB;AAAA,IACrC,CAAC,EACA,IAAA,CAAK,CAAC,GAAG,CAAA,KAAM,CAAA,CAAE,eAAA,GAAkB,CAAA,CAAE,eAAe,CAAA;AAEvD,IAAA,OAAO,WAAA,CAAY,GAAA,CAAI,CAAA,CAAA,KAAK,CAAA,CAAE,QAAQ,CAAA;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,wBAAA,CACZ,MAAA,EACA,QAAA,EACA,kBAAA,EACA,QACA,WAAA,EACe;AAEf,IAAA,IAAA,CAAK,cAAA,CAAe;AAAA,MAClB,IAAA,EAAM,uBAAA;AAAA,MACN,SAAA,sBAAe,IAAA,EAAK;AAAA,MACpB,QAAA;AAAA,MACA,UAAA,EAAY,OAAO,UAAA,IAAc,EAAA;AAAA,MACjC,sBAAsB,MAAA,CAAO,kBAAA;AAAA,MAC7B,QAAA,EAAU,kBAAA,CAAmB,GAAA,CAAI,CAAA,CAAA,MAAM;AAAA,QACrC,MAAM,CAAA,CAAE,IAAA;AAAA,QACR,OAAA,EAAS,OAAO,CAAA,CAAE,OAAA,KAAY,QAAA,GAAW,EAAE,OAAA,GAAU,IAAA,CAAK,SAAA,CAAU,CAAA,CAAE,OAAO;AAAA,OAC/E,CAAE;AAAA,KACH,CAAA;AAKD,IAAA,MAAM,IAAA,CAAK,OAAA,CAAQ,gBAAA,CAAiB,MAAA,CAAO,IAAI,IAAI,CAAA;AAInD,IAAA,MAAM,OAAA,GAAU,OAAO,UAAA,EAAW;AAGlC,IAAA,MAAM,eAAA,GAAkB,IAAA,CAAK,YAAA,CAAa,aAAA,CAAc,kBAAkB,CAAA;AAC1E,IAAA,MAAM,WAAA,GAAc,kBAAA,CAAmB,kBAAA,CAAmB,MAAA,GAAS,CAAC,CAAA;AACpE,IAAA,MAAM,SAAA,GAAA,iBAAY,IAAI,IAAA,EAAK,EAAE,WAAA,EAAY;AAEzC,IAAA,IAAI,aAAa,EAAA,EAAI;AACnB,MAAA,MAAM,WAAA,GAAc,KAAK,4BAAA,CAA6B;AAAA,QACpD,OAAA;AAAA,QACA,aAAA,EAAe,aAAA;AAAA,QACf,eAAA;AAAA,QACA,UAAU,MAAA,CAAO,EAAA;AAAA,QACjB,QAAA;AAAA,QACA,SAAA,EAAW,CAAC,QAAQ;AAAA,OACrB,CAAA;AAED,MAAA,IAAI,MAAA,EAAQ;AACV,QAAA,MAAM,MAAA,CAAO,MAAA,CAAO,WAAW,CAAA,CAAE,MAAM,MAAM;AAAA,QAE7C,CAAC,CAAA;AAAA,MACH;AAAA,IAGF;AAEA,IAAA,IAAI;AAEF,MAAA,MAAM,WAAA,GAAc,MAAM,IAAA,CAAK,OAAA,CAAQ,uBAAuB,MAAA,CAAO,QAAA,EAAU,OAAO,UAAU,CAAA;AAChG,MAAA,IAAI,WAAA,IAAe,WAAA,CAAY,cAAA,IAAkB,MAAA,CAAO,cAAA,EAAgB;AACtE,QAAA,IAAI,WAAA,CAAY,cAAA,GAAiB,MAAA,CAAO,cAAA,EAAgB;AACtD,UAAA;AAAA,QACF;AAAA,MACF;AAEA,MAAA,MAAM,MAAA,GAAS,MAAM,IAAA,CAAK,YAAA;AAAA,QACxB,WAAA,EAAa,sBAAsB,MAAA,CAAO,kBAAA;AAAA,QAC1C,kBAAA;AAAA,QACA;AAAA,OACF;AAGA,MAAA,MAAM,oBAAA,GAAuB,WAAA,EAAa,kBAAA,IAAsB,MAAA,CAAO,kBAAA,IAAsB,EAAA;AAC7F,MAAA,IAAI,eAAA;AACJ,MAAA,IAAI,IAAA,CAAK,UAAU,UAAA,EAAY;AAE7B,QAAA,MAAM,gBAAgB,MAAM,IAAA,CAAK,iBAAA,CAAkB,QAAA,EAAU,OAAO,YAAY,CAAA;AAChF,QAAA,eAAA,GAAkB,IAAA,CAAK,4BAAA,CAA6B,oBAAA,EAAsB,QAAA,EAAU,aAAa,CAAA;AAAA,MACnG,CAAA,MAAO;AAEL,QAAA,eAAA,GAAkB,oBAAA,GACd,GAAG,oBAAoB;;AAAA,EAAO,MAAA,CAAO,YAAY,CAAA,CAAA,GACjD,MAAA,CAAO,YAAA;AAAA,MACb;AAEA,MAAA,IAAI,eAAA,GAAkB,IAAA,CAAK,YAAA,CAAa,iBAAA,CAAkB,eAAe,CAAA;AAGzE,MAAA,MAAM,sBAAA,GAAyB,IAAA,CAAK,YAAA,CAAa,iBAAA,CAAkB,OAAO,YAAY,CAAA;AAItF,MAAA,MAAM,cAAA,GAAiB,IAAA,CAAK,sBAAA,CAAuB,kBAAkB,CAAA;AAIrE,MAAA,MAAM,aAAA,GAAgB,kBAAA,CAAmB,GAAA,CAAI,CAAA,CAAA,KAAK,EAAE,EAAE,CAAA;AACtD,MAAA,MAAM,WAAA,GAAc,WAAA,EAAa,kBAAA,IAAsB,MAAA,CAAO,sBAAsB,EAAC;AACrF,MAAA,MAAM,iBAAiB,CAAC,mBAAG,IAAI,GAAA,CAAI,CAAC,GAAI,KAAA,CAAM,OAAA,CAAQ,WAAW,IAAI,WAAA,GAAc,IAAK,GAAG,aAAa,CAAC,CAAC,CAAA;AAE1G,MAAA,MAAM,IAAA,CAAK,QAAQ,wBAAA,CAAyB;AAAA,QAC1C,IAAI,MAAA,CAAO,EAAA;AAAA,QACX,YAAA,EAAc,eAAA;AAAA,QACd,UAAA,EAAY,eAAA;AAAA,QACZ,cAAA;AAAA,QACA,kBAAA,EAAoB;AAAA,OACrB,CAAA;AAGD,MAAA,IAAI,MAAA,CAAO,qBAAA,IAAyB,MAAA,CAAO,WAAA,EAAa;AACtD,QAAA,MAAM,SAAS,MAAM,IAAA,CAAK,QAAQ,aAAA,CAAc,EAAE,UAAU,CAAA;AAC5D,QAAA,IAAI,MAAA,EAAQ;AACV,UAAA,MAAM,WAAA,GAAc,mBAAA,CAAoB,MAAA,CAAO,QAAA,EAAU;AAAA,YACvD,mBAAmB,MAAA,CAAO,qBAAA;AAAA,YAC1B,aAAa,MAAA,CAAO;AAAA,WACrB,CAAA;AACD,UAAA,MAAM,IAAA,CAAK,QAAQ,YAAA,CAAa;AAAA,YAC9B,EAAA,EAAI,QAAA;AAAA,YACJ,KAAA,EAAO,OAAO,KAAA,IAAS,EAAA;AAAA,YACvB,QAAA,EAAU;AAAA,WACX,CAAA;AAAA,QACH;AAAA,MACF;AAMA,MAAA,IAAI,aAAa,EAAA,EAAI;AACnB,QAAA,MAAM,SAAA,GAAY,KAAK,0BAAA,CAA2B;AAAA,UAChD,OAAA;AAAA,UACA,aAAA,EAAe,aAAA;AAAA,UACf,SAAA;AAAA,UACA,cAAA,EAAgB,eAAA;AAAA,UAChB,iBAAA,EAAmB,sBAAA;AAAA,UACnB,cAAc,MAAA,CAAO,YAAA;AAAA,UACrB,aAAa,MAAA,CAAO,WAAA;AAAA,UACpB,mBAAmB,MAAA,CAAO,qBAAA;AAAA,UAC1B,UAAU,MAAA,CAAO,EAAA;AAAA,UACjB;AAAA,SACD,CAAA;AAGD,QAAA,IAAI,MAAA,EAAQ;AACV,UAAA,MAAM,MAAA,CAAO,MAAA,CAAO,SAAS,CAAA,CAAE,MAAM,MAAM;AAAA,UAE3C,CAAC,CAAA;AAAA,QACH;AAAA,MAGF;AAGA,MAAA,IAAA,CAAK,cAAA,CAAe;AAAA,QAClB,IAAA,EAAM,sBAAA;AAAA,QACN,SAAA,sBAAe,IAAA,EAAK;AAAA,QACpB,QAAA;AAAA,QACA,UAAA,EAAY,OAAO,UAAA,IAAc,EAAA;AAAA,QACjC,YAAA,EAAc,eAAA;AAAA,QACd,mBAAmB,MAAA,CAAO,YAAA;AAAA,QAC1B,sBAAsB,MAAA,CAAO,kBAAA;AAAA,QAC7B,QAAA,EAAU,kBAAA,CAAmB,GAAA,CAAI,CAAA,CAAA,MAAM;AAAA,UACrC,MAAM,CAAA,CAAE,IAAA;AAAA,UACR,OAAA,EAAS,OAAO,CAAA,CAAE,OAAA,KAAY,QAAA,GAAW,EAAE,OAAA,GAAU,IAAA,CAAK,SAAA,CAAU,CAAA,CAAE,OAAO;AAAA,SAC/E,CAAE,CAAA;AAAA,QACF,OAAO,MAAA,CAAO;AAAA,OACf,CAAA;AAGD,MAAA,MAAM,IAAA,CAAK,YAAA;AAAA,QACT,EAAE,GAAG,MAAA,EAAQ,kBAAA,EAAoB,eAAA,EAAgB;AAAA,QACjD,eAAA;AAAA,QACA,QAAA;AAAA,QACA,MAAA;AAAA,QACA;AAAA,OACF;AAAA,IACF,SAAS,KAAA,EAAO;AAEd,MAAA,IAAI,aAAa,EAAA,EAAI;AACnB,QAAA,MAAM,YAAA,GAAe,KAAK,6BAAA,CAA8B;AAAA,UACtD,OAAA;AAAA,UACA,aAAA,EAAe,aAAA;AAAA,UACf,SAAA;AAAA,UACA,eAAA,EAAiB,eAAA;AAAA,UACjB,OAAO,KAAA,YAAiB,KAAA,GAAQ,KAAA,CAAM,OAAA,GAAU,OAAO,KAAK,CAAA;AAAA,UAC5D,UAAU,MAAA,CAAO,EAAA;AAAA,UACjB;AAAA,SACD,CAAA;AAGD,QAAA,IAAI,MAAA,EAAQ;AACV,UAAA,MAAM,MAAA,CAAO,MAAA,CAAO,YAAY,CAAA,CAAE,MAAM,MAAM;AAAA,UAE9C,CAAC,CAAA;AAAA,QACH;AAAA,MAGF;AAEA,MAAA,IAAI,aAAa,OAAA,EAAS;AACxB,QAAA,MAAM,KAAA;AAAA,MACR;AAEA,MAAA,OAAA,CAAQ,KAAA,CAAM,4BAA4B,KAAA,YAAiB,KAAA,GAAQ,MAAM,OAAA,GAAU,MAAA,CAAO,KAAK,CAAC,CAAA;AAAA,IAClG,CAAA,SAAE;AACA,MAAA,MAAM,IAAA,CAAK,OAAA,CAAQ,gBAAA,CAAiB,MAAA,CAAO,IAAI,KAAK,CAAA;AAAA,IACtD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAc,2BAAA,CACZ,MAAA,EACA,iBACA,UAAA,EACA,qBAAA,EACA,QACA,WAAA,EACe;AASf,IAAA,MAAM,EAAE,OAAA,EAAS,UAAA,EAAW,GAAI,MAAM,IAAA,CAAK,OAAA,CAAQ,WAAA,CAAY,EAAE,MAAA,EAAQ,EAAE,UAAA,IAAc,CAAA;AACzF,IAAA,MAAM,iBAAA,uBAAwB,GAAA,EAAyC;AAEvE,IAAA,KAAA,MAAW,UAAU,UAAA,EAAY;AAC/B,MAAA,MAAM,UAAA,GAAa,mBAAA,CAAoB,MAAA,CAAO,QAAQ,CAAA;AACtD,MAAA,iBAAA,CAAkB,IAAI,MAAA,CAAO,EAAA,EAAI,EAAE,cAAA,EAAgB,UAAA,EAAY,gBAAgB,CAAA;AAAA,IACjF;AAGA,IAAA,MAAM,gBAAA,uBAAuB,GAAA,EAA+B;AAE5D,IAAA,KAAA,MAAW,UAAU,UAAA,EAAY;AAC/B,MAAA,MAAM,oBAAA,GAAuB,iBAAA,CAAkB,GAAA,CAAI,MAAA,CAAO,EAAE,CAAA,EAAG,cAAA;AAK/D,MAAA,MAAM,SAAA,GAAY,oBAAA,GAAuB,IAAI,IAAA,CAAK,IAAI,IAAA,CAAK,oBAAoB,CAAA,CAAE,OAAA,EAAQ,GAAI,CAAC,CAAA,GAAI,MAAA;AAElG,MAAA,MAAM,MAAA,GAAS,MAAM,IAAA,CAAK,OAAA,CAAQ,YAAA,CAAa;AAAA,QAC7C,UAAU,MAAA,CAAO,EAAA;AAAA,QACjB,OAAA,EAAS,KAAA;AAAA,QACT,OAAA,EAAS,EAAE,KAAA,EAAO,WAAA,EAAa,WAAW,KAAA,EAAM;AAAA,QAChD,MAAA,EAAQ,YAAY,EAAE,SAAA,EAAW,EAAE,KAAA,EAAO,SAAA,IAAY,GAAI;AAAA,OAC3D,CAAA;AAED,MAAA,IAAI,MAAA,CAAO,QAAA,CAAS,MAAA,GAAS,CAAA,EAAG;AAC9B,QAAA,gBAAA,CAAiB,GAAA,CAAI,MAAA,CAAO,EAAA,EAAI,MAAA,CAAO,QAAQ,CAAA;AAAA,MACjD;AAAA,IACF;AAIA,IAAA,IAAI,qBAAA,CAAsB,SAAS,CAAA,EAAG;AACpC,MAAA,MAAM,yBAAA,GAA4B,gBAAA,CAAiB,GAAA,CAAI,eAAe,KAAK,EAAC;AAC5E,MAAA,MAAM,UAAA,uBAAiB,GAAA,EAA6B;AAGpD,MAAA,KAAA,MAAW,OAAO,yBAAA,EAA2B;AAC3C,QAAA,IAAI,IAAI,EAAA,EAAI,UAAA,CAAW,GAAA,CAAI,GAAA,CAAI,IAAI,GAAG,CAAA;AAAA,MACxC;AAGA,MAAA,KAAA,MAAW,OAAO,qBAAA,EAAuB;AACvC,QAAA,IAAI,IAAI,EAAA,EAAI,UAAA,CAAW,GAAA,CAAI,GAAA,CAAI,IAAI,GAAG,CAAA;AAAA,MACxC;AAEA,MAAA,gBAAA,CAAiB,IAAI,eAAA,EAAiB,KAAA,CAAM,KAAK,UAAA,CAAW,MAAA,EAAQ,CAAC,CAAA;AAAA,IACvE;AAKA,IAAA,KAAA,MAAW,CAAC,GAAA,EAAK,IAAI,CAAA,IAAK,gBAAA,EAAkB;AAC1C,MAAA,MAAM,QAAA,GAAW,IAAA,CAAK,MAAA,CAAO,CAAA,CAAA,KAAK,CAAC,KAAK,kBAAA,CAAmB,GAAA,CAAI,CAAA,CAAE,EAAE,CAAC,CAAA;AACpE,MAAA,IAAI,QAAA,CAAS,SAAS,CAAA,EAAG;AACvB,QAAA,gBAAA,CAAiB,GAAA,CAAI,KAAK,QAAQ,CAAA;AAAA,MACpC,CAAA,MAAO;AACL,QAAA,gBAAA,CAAiB,OAAO,GAAG,CAAA;AAAA,MAC7B;AAAA,IACF;AAEA,IAAA,IAAI,aAAA,GAAgB,CAAA;AACpB,IAAA,KAAA,MAAW,IAAA,IAAQ,gBAAA,CAAiB,MAAA,EAAO,EAAG;AAC5C,MAAA,aAAA,IAAiB,IAAA,CAAK,MAAA;AAAA,IACxB;AAEA,IAAA,IAAI,kBAAkB,CAAA,EAAG;AACvB,MAAA;AAAA,IACF;AAQA,IAAA,MAAM,SAAA,GAAY,IAAA,CAAK,eAAA,CAAgB,IAAA,CAAK,kBAAkB,aAAa,CAAA;AAG3E,IAAA,MAAM,iBAAA,uBAAwB,GAAA,EAAoB;AAClD,IAAA,KAAA,MAAW,CAAC,QAAA,EAAU,IAAI,CAAA,IAAK,gBAAA,EAAkB;AAC/C,MAAA,IAAI,MAAA,GAAS,CAAA;AACb,MAAA,KAAA,MAAW,OAAO,IAAA,EAAM;AACtB,QAAA,MAAA,IAAU,IAAA,CAAK,YAAA,CAAa,YAAA,CAAa,GAAG,CAAA;AAAA,MAC9C;AACA,MAAA,iBAAA,CAAkB,GAAA,CAAI,UAAU,MAAM,CAAA;AAAA,IACxC;AAEA,IAAA,MAAM,aAAA,GAAgB,KAAA,CAAM,IAAA,CAAK,gBAAA,CAAiB,IAAA,EAAM,CAAA,CAAE,IAAA,CAAK,CAAC,CAAA,EAAG,CAAA,KAAM;AACvE,MAAA,OAAA,CAAQ,iBAAA,CAAkB,IAAI,CAAC,CAAA,IAAK,MAAM,iBAAA,CAAkB,GAAA,CAAI,CAAC,CAAA,IAAK,CAAA,CAAA;AAAA,IACxE,CAAC,CAAA;AAGD,IAAA,IAAI,iBAAA,GAAoB,CAAA;AACxB,IAAA,MAAM,mBAA6B,EAAC;AAEpC,IAAA,KAAA,MAAW,YAAY,aAAA,EAAe;AACpC,MAAA,MAAM,YAAA,GAAe,iBAAA,CAAkB,GAAA,CAAI,QAAQ,CAAA,IAAK,CAAA;AAGxD,MAAA,IAAI,qBAAqB,SAAA,EAAW;AAClC,QAAA;AAAA,MACF;AAEA,MAAA,gBAAA,CAAiB,KAAK,QAAQ,CAAA;AAC9B,MAAA,iBAAA,IAAqB,YAAA;AAAA,IACvB;AAEA,IAAA,IAAI,gBAAA,CAAiB,WAAW,CAAA,EAAG;AACjC,MAAA;AAAA,IACF;AAGA,IAAA,MAAM,cAAc,IAAA,CAAK,0BAAA;AAAA,MACvB,IAAI,GAAA,CAAI,gBAAA,CAAiB,GAAA,CAAI,SAAO,CAAC,GAAA,EAAK,gBAAA,CAAiB,GAAA,CAAI,GAAG,CAAA,IAAK,EAAE,CAAC,CAAC;AAAA,KAC7E;AAQA,IAAA,MAAM,IAAA,CAAK,OAAA,CAAQ,gBAAA,CAAiB,MAAA,CAAO,IAAI,IAAI,CAAA;AAInD,IAAA,MAAM,OAAA,GAAU,OAAO,UAAA,EAAW;AAGlC,IAAA,MAAM,mBAAA,uBAA0B,GAAA,EAA+B;AAC/D,IAAA,MAAM,qBAAA,uBAA4B,GAAA,EAAoB;AACtD,IAAA,IAAI,oBAAA,GAAuB,EAAA;AAE3B,IAAA,IAAI;AAEF,MAAA,MAAM,cAAc,MAAM,IAAA,CAAK,OAAA,CAAQ,sBAAA,CAAuB,MAAM,UAAU,CAAA;AAC9E,MAAA,IAAI,WAAA,IAAe,WAAA,CAAY,cAAA,IAAkB,MAAA,CAAO,cAAA,EAAgB;AACtE,QAAA,IAAI,WAAA,CAAY,cAAA,GAAiB,MAAA,CAAO,cAAA,EAAgB;AACtD,UAAA;AAAA,QACF;AAAA,MACF;AAEA,MAAA,MAAM,oBAAA,GAAuB,WAAA,EAAa,kBAAA,IAAsB,MAAA,CAAO,kBAAA,IAAsB,EAAA;AAQ7F,MAAA,KAAA,MAAW,YAAY,WAAA,EAAa;AAClC,QAAA,MAAM,IAAA,GAAO,gBAAA,CAAiB,GAAA,CAAI,QAAQ,CAAA;AAC1C,QAAA,IAAI,IAAA,IAAQ,IAAA,CAAK,MAAA,GAAS,CAAA,EAAG;AAC3B,UAAA,mBAAA,CAAoB,GAAA,CAAI,UAAU,IAAI,CAAA;AAAA,QACxC;AAAA,MACF;AAGA,MAAA,IAAA,CAAK,cAAA,CAAe;AAAA,QAClB,IAAA,EAAM,uBAAA;AAAA,QACN,SAAA,sBAAe,IAAA,EAAK;AAAA,QACpB,QAAA,EAAU,WAAA,CAAY,IAAA,CAAK,GAAG,CAAA;AAAA,QAC9B,UAAA;AAAA,QACA,oBAAA,EAAsB,oBAAA;AAAA,QACtB,QAAA,EAAU,KAAA,CAAM,IAAA,CAAK,mBAAA,CAAoB,MAAA,EAAQ,CAAA,CAC9C,IAAA,EAAK,CACL,GAAA,CAAI,CAAA,CAAA,MAAM;AAAA,UACT,MAAM,CAAA,CAAE,IAAA;AAAA,UACR,OAAA,EAAS,OAAO,CAAA,CAAE,OAAA,KAAY,QAAA,GAAW,EAAE,OAAA,GAAU,IAAA,CAAK,SAAA,CAAU,CAAA,CAAE,OAAO;AAAA,SAC/E,CAAE;AAAA,OACL,CAAA;AAMD,MAAA,oBAAA,GAAA,iBAAuB,IAAI,IAAA,EAAK,EAAE,WAAA,EAAY;AAC9C,MAAA,MAAM,YAAA,GAAe,KAAA,CAAM,IAAA,CAAK,mBAAA,CAAoB,MAAM,CAAA;AAE1D,MAAA,KAAA,MAAW,CAAC,QAAA,EAAU,IAAI,CAAA,IAAK,mBAAA,EAAqB;AAClD,QAAA,MAAM,WAAA,GAAc,IAAA,CAAK,IAAA,CAAK,MAAA,GAAS,CAAC,CAAA;AACxC,QAAA,MAAM,eAAA,GAAkB,IAAA,CAAK,YAAA,CAAa,aAAA,CAAc,IAAI,CAAA;AAC5D,QAAA,qBAAA,CAAsB,GAAA,CAAI,UAAU,eAAe,CAAA;AAEnD,QAAA,IAAI,aAAa,EAAA,EAAI;AACnB,UAAA,MAAM,WAAA,GAAc,KAAK,4BAAA,CAA6B;AAAA,YACpD,OAAA;AAAA,YACA,aAAA,EAAe,aAAA;AAAA,YACf,eAAA;AAAA,YACA,UAAU,MAAA,CAAO,EAAA;AAAA,YACjB,QAAA;AAAA,YACA,SAAA,EAAW;AAAA,WACZ,CAAA;AAED,UAAA,IAAI,MAAA,EAAQ;AACV,YAAA,MAAM,MAAA,CAAO,MAAA,CAAO,WAAW,CAAA,CAAE,MAAM,MAAM;AAAA,YAE7C,CAAC,CAAA;AAAA,UACH;AAAA,QAGF;AAAA,MACF;AAMA,MAAA,MAAM,iBAAA,GACJ,IAAA,CAAK,iBAAA,CAAkB,iBAAA,IAAqB,8BAA8B,WAAA,CAAY,iBAAA;AACxF,MAAA,MAAM,mBAAmB,WAAA,CAAY,MAAA,CAAO,SAAO,mBAAA,CAAoB,GAAA,CAAI,GAAG,CAAC,CAAA;AAG/E,MAAA,MAAM,UAAqF,EAAC;AAC5F,MAAA,IAAI,YAAA,GAAmF;AAAA,QACrF,WAAW,EAAC;AAAA,QACZ,SAAA,sBAAe,GAAA;AAAI,OACrB;AACA,MAAA,IAAI,kBAAA,GAAqB,CAAA;AAEzB,MAAA,KAAA,MAAW,YAAY,gBAAA,EAAkB;AACvC,QAAA,MAAM,IAAA,GAAO,mBAAA,CAAoB,GAAA,CAAI,QAAQ,CAAA;AAC7C,QAAA,MAAM,YAAA,GAAe,iBAAA,CAAkB,GAAA,CAAI,QAAQ,CAAA,IAAK,CAAA;AAIxD,QAAA,IAAI,qBAAqB,YAAA,GAAe,iBAAA,IAAqB,YAAA,CAAa,SAAA,CAAU,SAAS,CAAA,EAAG;AAC9F,UAAA,OAAA,CAAQ,KAAK,YAAY,CAAA;AACzB,UAAA,YAAA,GAAe,EAAE,SAAA,EAAW,IAAI,SAAA,kBAAW,IAAI,KAAI,EAAE;AACrD,UAAA,kBAAA,GAAqB,CAAA;AAAA,QACvB;AAEA,QAAA,YAAA,CAAa,SAAA,CAAU,KAAK,QAAQ,CAAA;AACpC,QAAA,YAAA,CAAa,SAAA,CAAU,GAAA,CAAI,QAAA,EAAU,IAAI,CAAA;AACzC,QAAA,kBAAA,IAAsB,YAAA;AAAA,MACxB;AAGA,MAAA,IAAI,YAAA,CAAa,SAAA,CAAU,MAAA,GAAS,CAAA,EAAG;AACrC,QAAA,OAAA,CAAQ,KAAK,YAAY,CAAA;AAAA,MAC3B;AAGA,MAAA,MAAM,aAAA,GAAgB,OAAA,CAAQ,GAAA,CAAI,OAAM,KAAA,KAAS;AAC/C,QAAA,MAAM,WAAA,GAAc,MAAM,IAAA,CAAK,uBAAA;AAAA,UAC7B,oBAAA;AAAA,UACA,KAAA,CAAM,SAAA;AAAA,UACN,KAAA,CAAM,SAAA;AAAA,UACN;AAAA,SACF;AACA,QAAA,OAAO,WAAA;AAAA,MACT,CAAC,CAAA;AAED,MAAA,MAAM,YAAA,GAAe,MAAM,OAAA,CAAQ,GAAA,CAAI,aAAa,CAAA;AAGpD,MAAA,MAAM,kBAAA,uBAAyB,GAAA,EAO7B;AACF,MAAA,IAAI,kBAAkB,EAAE,WAAA,EAAa,GAAG,YAAA,EAAc,CAAA,EAAG,aAAa,CAAA,EAAE;AACxE,MAAA,KAAA,MAAW,eAAe,YAAA,EAAc;AACtC,QAAA,KAAA,MAAW,CAAC,QAAA,EAAU,MAAM,CAAA,IAAK,YAAY,OAAA,EAAS;AACpD,UAAA,kBAAA,CAAmB,GAAA,CAAI,UAAU,MAAM,CAAA;AAAA,QACzC;AAEA,QAAA,IAAI,YAAY,KAAA,EAAO;AACrB,UAAA,eAAA,CAAgB,WAAA,IAAe,WAAA,CAAY,KAAA,CAAM,WAAA,IAAe,CAAA;AAChE,UAAA,eAAA,CAAgB,YAAA,IAAgB,WAAA,CAAY,KAAA,CAAM,YAAA,IAAgB,CAAA;AAClE,UAAA,eAAA,CAAgB,WAAA,IAAe,WAAA,CAAY,KAAA,CAAM,WAAA,IAAe,CAAA;AAAA,QAClE;AAAA,MACF;AAGA,MAAA,MAAM,qBAQM,EAAC;AAEb,MAAA,KAAA,MAAW,YAAY,WAAA,EAAa;AAClC,QAAA,MAAM,cAAA,GAAiB,gBAAA,CAAiB,GAAA,CAAI,QAAQ,KAAK,EAAC;AAC1D,QAAA,IAAI,cAAA,CAAe,WAAW,CAAA,EAAG;AAEjC,QAAA,MAAM,MAAA,GAAS,kBAAA,CAAmB,GAAA,CAAI,QAAQ,CAAA;AAC9C,QAAA,IAAI,CAAC,MAAA,EAAQ;AACX,UAAA;AAAA,QACF;AAIA,QAAA,kBAAA,CAAmB,IAAA,CAAK;AAAA,UACtB,QAAA;AAAA,UACA,cAAA;AAAA,UACA;AAAA,SACD,CAAA;AAAA,MACH;AAGA,MAAA,IAAI,mBAAA,GAAsB,oBAAA;AAC1B,MAAA,IAAI,sBAAA,GAAyB,CAAA;AAE7B,MAAA,KAAA,MAAW,aAAa,kBAAA,EAAoB;AAC1C,QAAA,IAAI,CAAC,SAAA,EAAW;AAEhB,QAAA,MAAM,EAAE,QAAA,EAAU,cAAA,EAAgB,MAAA,EAAO,GAAI,SAAA;AAG7C,QAAA,sBAAA,IAA0B,IAAA,CAAK,YAAA,CAAa,iBAAA,CAAkB,MAAA,CAAO,YAAY,CAAA;AAGjF,QAAA,MAAM,gBAAgB,MAAM,IAAA,CAAK,iBAAA,CAAkB,QAAA,EAAU,OAAO,YAAY,CAAA;AAChF,QAAA,mBAAA,GAAsB,IAAA,CAAK,4BAAA,CAA6B,mBAAA,EAAqB,QAAA,EAAU,aAAa,CAAA;AAKpG,QAAA,MAAM,oBAAA,GAAuB,IAAA,CAAK,sBAAA,CAAuB,cAAc,CAAA;AACvE,QAAA,MAAM,SAAS,MAAM,IAAA,CAAK,QAAQ,aAAA,CAAc,EAAE,UAAU,CAAA;AAC5D,QAAA,IAAI,MAAA,EAAQ;AACV,UAAA,MAAM,WAAA,GAAc,mBAAA,CAAoB,MAAA,CAAO,QAAA,EAAU;AAAA,YACvD,cAAA,EAAgB,qBAAqB,WAAA,EAAY;AAAA,YACjD,GAAI,MAAA,CAAO,qBAAA,IAAyB,EAAE,iBAAA,EAAmB,OAAO,qBAAA,EAAsB;AAAA,YACtF,GAAI,MAAA,CAAO,WAAA,IAAe,EAAE,WAAA,EAAa,OAAO,WAAA;AAAY,WAC7D,CAAA;AACD,UAAA,MAAM,IAAA,CAAK,QAAQ,YAAA,CAAa;AAAA,YAC9B,EAAA,EAAI,QAAA;AAAA,YACJ,KAAA,EAAO,OAAO,KAAA,IAAS,EAAA;AAAA,YACvB,QAAA,EAAU;AAAA,WACX,CAAA;AAAA,QACH;AAGA,QAAA,MAAM,aAAA,GAAgB,kBAAA,CAAmB,OAAA,CAAQ,SAAS,CAAA,KAAM,CAAA;AAChE,QAAA,IAAA,CAAK,cAAA,CAAe;AAAA,UAClB,IAAA,EAAM,sBAAA;AAAA,UACN,SAAA,sBAAe,IAAA,EAAK;AAAA,UACpB,QAAA;AAAA,UACA,UAAA;AAAA,UACA,YAAA,EAAc,aAAA;AAAA,UACd,mBAAmB,MAAA,CAAO,YAAA;AAAA,UAC1B,sBAAsB,MAAA,CAAO,kBAAA;AAAA,UAC7B,QAAA,EAAU,cAAA,CAAe,GAAA,CAAI,CAAA,CAAA,MAAM;AAAA,YACjC,MAAM,CAAA,CAAE,IAAA;AAAA,YACR,OAAA,EAAS,OAAO,CAAA,CAAE,OAAA,KAAY,QAAA,GAAW,EAAE,OAAA,GAAU,IAAA,CAAK,SAAA,CAAU,CAAA,CAAE,OAAO;AAAA,WAC/E,CAAE,CAAA;AAAA;AAAA,UAEF,KAAA,EAAO,aAAA,IAAiB,eAAA,CAAgB,WAAA,GAAc,IAAI,eAAA,GAAkB;AAAA,SAC7E,CAAA;AAAA,MACH;AAGA,MAAA,IAAI,eAAA,GAAkB,IAAA,CAAK,YAAA,CAAa,iBAAA,CAAkB,mBAAmB,CAAA;AAO7E,MAAA,MAAM,gBAAA,GAAmB,kBAAA,CACtB,MAAA,CAAO,CAAC,CAAA,KAAkC,CAAA,KAAM,IAAI,CAAA,CACpD,OAAA,CAAQ,CAAA,CAAA,KAAK,CAAA,CAAE,cAAc,CAAA;AAChC,MAAA,MAAM,cAAA,GAAiB,IAAA,CAAK,sBAAA,CAAuB,gBAAgB,CAAA;AAGnE,MAAA,MAAM,aAAA,GAAgB,gBAAA,CAAiB,GAAA,CAAI,CAAA,CAAA,KAAK,EAAE,EAAE,CAAA;AACpD,MAAA,MAAM,WAAA,GAAc,MAAA,CAAO,kBAAA,IAAsB,EAAC;AAClD,MAAA,MAAM,cAAA,GAAiB,CAAC,mBAAG,IAAI,GAAA,CAAI,CAAC,GAAG,WAAA,EAAa,GAAG,aAAa,CAAC,CAAC,CAAA;AAEtE,MAAA,MAAM,IAAA,CAAK,QAAQ,wBAAA,CAAyB;AAAA,QAC1C,IAAI,MAAA,CAAO,EAAA;AAAA,QACX,YAAA,EAAc,mBAAA;AAAA,QACd,UAAA,EAAY,eAAA;AAAA,QACZ,cAAA;AAAA,QACA,kBAAA,EAAoB;AAAA,OACrB,CAAA;AAMD,MAAA,KAAA,MAAW,aAAa,kBAAA,EAAoB;AAC1C,QAAA,IAAI,CAAC,SAAA,EAAW;AAChB,QAAA,MAAM,EAAE,QAAA,EAAU,cAAA,EAAgB,MAAA,EAAO,GAAI,SAAA;AAC7C,QAAA,MAAM,WAAA,GAAc,cAAA,CAAe,cAAA,CAAe,MAAA,GAAS,CAAC,CAAA;AAC5D,QAAA,IAAI,aAAa,EAAA,EAAI;AACnB,UAAA,MAAM,cAAA,GAAiB,sBAAsB,GAAA,CAAI,QAAQ,KAAK,IAAA,CAAK,YAAA,CAAa,cAAc,cAAc,CAAA;AAC5G,UAAA,MAAM,SAAA,GAAY,KAAK,0BAAA,CAA2B;AAAA,YAChD,OAAA;AAAA,YACA,aAAA,EAAe,aAAA;AAAA,YACf,SAAA,EAAW,oBAAA;AAAA,YACX,cAAA;AAAA,YACA,iBAAA,EAAmB,sBAAA;AAAA,YACnB,cAAc,MAAA,CAAO,YAAA;AAAA,YACrB,aAAa,MAAA,CAAO,WAAA;AAAA,YACpB,mBAAmB,MAAA,CAAO,qBAAA;AAAA,YAC1B,UAAU,MAAA,CAAO,EAAA;AAAA,YACjB;AAAA,WACD,CAAA;AAGD,UAAA,IAAI,MAAA,EAAQ;AACV,YAAA,MAAM,MAAA,CAAO,MAAA,CAAO,SAAS,CAAA,CAAE,MAAM,MAAM;AAAA,YAE3C,CAAC,CAAA;AAAA,UACH;AAAA,QAGF;AAAA,MACF;AAGA,MAAA,MAAM,IAAA,CAAK,YAAA;AAAA,QACT,EAAE,GAAG,MAAA,EAAQ,kBAAA,EAAoB,mBAAA,EAAoB;AAAA,QACrD,eAAA;AAAA,QACA,eAAA;AAAA,QACA,MAAA;AAAA,QACA;AAAA,OACF;AAAA,IACF,SAAS,KAAA,EAAO;AAEd,MAAA,KAAA,MAAW,CAAC,QAAA,EAAU,IAAI,CAAA,IAAK,mBAAA,EAAqB;AAClD,QAAA,MAAM,WAAA,GAAc,IAAA,CAAK,IAAA,CAAK,MAAA,GAAS,CAAC,CAAA;AACxC,QAAA,IAAI,aAAa,EAAA,EAAI;AACnB,UAAA,MAAM,eAAA,GAAkB,qBAAA,CAAsB,GAAA,CAAI,QAAQ,CAAA,IAAK,CAAA;AAC/D,UAAA,MAAM,YAAA,GAAe,KAAK,6BAAA,CAA8B;AAAA,YACtD,OAAA;AAAA,YACA,aAAA,EAAe,aAAA;AAAA,YACf,SAAA,EAAW,oBAAA;AAAA,YACX,eAAA;AAAA,YACA,OAAO,KAAA,YAAiB,KAAA,GAAQ,KAAA,CAAM,OAAA,GAAU,OAAO,KAAK,CAAA;AAAA,YAC5D,UAAU,MAAA,CAAO,EAAA;AAAA,YACjB;AAAA,WACD,CAAA;AAGD,UAAA,IAAI,MAAA,EAAQ;AACV,YAAA,MAAM,MAAA,CAAO,MAAA,CAAO,YAAY,CAAA,CAAE,MAAM,MAAM;AAAA,YAE9C,CAAC,CAAA;AAAA,UACH;AAAA,QAGF;AAAA,MACF;AAEA,MAAA,IAAI,aAAa,OAAA,EAAS;AACxB,QAAA,MAAM,KAAA;AAAA,MACR;AAEA,MAAA,OAAA,CAAQ,KAAA,CAAM,4CAA4C,KAAA,YAAiB,KAAA,GAAQ,MAAM,OAAA,GAAU,MAAA,CAAO,KAAK,CAAC,CAAA;AAAA,IAClH,CAAA,SAAE;AACA,MAAA,MAAM,IAAA,CAAK,OAAA,CAAQ,gBAAA,CAAiB,MAAA,CAAO,IAAI,KAAK,CAAA;AAAA,IACtD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,YAAA,CACZ,MAAA,EACA,iBAAA,EACA,SAAA,EACA,QACA,WAAA,EACe;AACf,IAAA,IAAI,CAAC,IAAA,CAAK,aAAA,CAAc,iBAAiB,CAAA,EAAG;AAC1C,MAAA;AAAA,IACF;AAKA,IAAA,IAAI,OAAO,YAAA,EAAc;AACvB,MAAA;AAAA,IACF;AAEA,IAAA,MAAM,gBAAA,GAAmB,IAAA,CAAK,eAAA,CAAgB,IAAA,CAAK,iBAAiB,iBAAiB,CAAA;AAKrF,IAAA,MAAM,IAAA,CAAK,OAAA,CAAQ,iBAAA,CAAkB,MAAA,CAAO,IAAI,IAAI,CAAA;AAGpD,IAAA,MAAM,OAAA,GAAU,OAAO,UAAA,EAAW;AAClC,IAAA,MAAM,SAAA,GAAA,iBAAY,IAAI,IAAA,EAAK,EAAE,WAAA,EAAY;AACzC,IAAA,MAAM,WAAW,SAAA,IAAa,SAAA;AAG9B,IAAA,IAAI,MAAA,EAAQ;AACV,MAAA,MAAM,WAAA,GAAc,KAAK,4BAAA,CAA6B;AAAA,QACpD,OAAA;AAAA,QACA,aAAA,EAAe,YAAA;AAAA,QACf,eAAA,EAAiB,iBAAA;AAAA,QACjB,UAAU,MAAA,CAAO,EAAA;AAAA,QACjB,QAAA;AAAA,QACA,SAAA,EAAW,CAAC,QAAQ;AAAA,OACrB,CAAA;AACD,MAAA,MAAM,MAAA,CAAO,MAAA,CAAO,WAAW,CAAA,CAAE,MAAM,MAAM;AAAA,MAAC,CAAC,CAAA;AAAA,IACjD;AAGA,IAAA,IAAA,CAAK,cAAA,CAAe;AAAA,MAClB,IAAA,EAAM,sBAAA;AAAA,MACN,SAAA,sBAAe,IAAA,EAAK;AAAA,MACpB,QAAA;AAAA,MACA,UAAA,EAAY,OAAO,UAAA,IAAc,EAAA;AAAA,MACjC,WAAA,EAAa,iBAAA;AAAA,MACb,wBAAA,EAA0B,MAAA,CAAO,kBAAA,EAAoB,MAAA,IAAU;AAAA,KAChE,CAAA;AAGD,IAAA,MAAM,gBAAgB,MAAA,GAClB;AAAA,MACE,MAAA;AAAA,MACA,OAAA;AAAA,MACA,SAAA;AAAA,MACA,UAAU,MAAA,CAAO,EAAA;AAAA,MACjB;AAAA,KACF,GACA,MAAA;AAEJ,IAAA,IAAI;AACF,MAAA,MAAM,aAAA,GAAgB,MAAM,IAAA,CAAK,aAAA;AAAA,QAC/B,MAAA,CAAO,kBAAA;AAAA,QACP,MAAA;AAAA,QACA,aAAA;AAAA,QACA,gBAAA;AAAA,QACA;AAAA,OACF;AACA,MAAA,MAAM,oBAAA,GAAuB,IAAA,CAAK,YAAA,CAAa,iBAAA,CAAkB,cAAc,YAAY,CAAA;AAE3F,MAAA,MAAM,IAAA,CAAK,QAAQ,0BAAA,CAA2B;AAAA,QAC5C,aAAA,EAAe,MAAA;AAAA,QACf,YAAY,aAAA,CAAc,YAAA;AAAA,QAC1B,UAAA,EAAY;AAAA,OACb,CAAA;AAGD,MAAA,IAAI,UAAU,aAAA,EAAe;AAC3B,QAAA,MAAM,SAAA,GAAY,KAAK,0BAAA,CAA2B;AAAA,UAChD,SAAS,aAAA,CAAc,OAAA;AAAA,UACvB,aAAA,EAAe,YAAA;AAAA,UACf,WAAW,aAAA,CAAc,SAAA;AAAA,UACzB,cAAA,EAAgB,iBAAA;AAAA,UAChB,iBAAA,EAAmB,oBAAA;AAAA,UACnB,cAAc,aAAA,CAAc,YAAA;AAAA,UAC5B,UAAU,MAAA,CAAO,EAAA;AAAA,UACjB;AAAA,SACD,CAAA;AACD,QAAA,MAAM,MAAA,CAAO,MAAA,CAAO,SAAS,CAAA,CAAE,MAAM,MAAM;AAAA,QAAC,CAAC,CAAA;AAAA,MAC/C;AAGA,MAAA,IAAA,CAAK,cAAA,CAAe;AAAA,QAClB,IAAA,EAAM,qBAAA;AAAA,QACN,SAAA,sBAAe,IAAA,EAAK;AAAA,QACpB,QAAA;AAAA,QACA,UAAA,EAAY,OAAO,UAAA,IAAc,EAAA;AAAA,QACjC,WAAA,EAAa,iBAAA;AAAA,QACb,YAAA,EAAc,oBAAA;AAAA,QACd,cAAc,aAAA,CAAc,YAAA;AAAA,QAC5B,OAAO,aAAA,CAAc;AAAA,OACtB,CAAA;AAAA,IACH,SAAS,KAAA,EAAO;AAEd,MAAA,IAAI,UAAU,aAAA,EAAe;AAC3B,QAAA,MAAM,YAAA,GAAe,KAAK,6BAAA,CAA8B;AAAA,UACtD,SAAS,aAAA,CAAc,OAAA;AAAA,UACvB,aAAA,EAAe,YAAA;AAAA,UACf,WAAW,aAAA,CAAc,SAAA;AAAA,UACzB,eAAA,EAAiB,iBAAA;AAAA,UACjB,OAAO,KAAA,YAAiB,KAAA,GAAQ,KAAA,CAAM,OAAA,GAAU,OAAO,KAAK,CAAA;AAAA,UAC5D,UAAU,MAAA,CAAO,EAAA;AAAA,UACjB;AAAA,SACD,CAAA;AACD,QAAA,MAAM,MAAA,CAAO,MAAA,CAAO,YAAY,CAAA,CAAE,MAAM,MAAM;AAAA,QAAC,CAAC,CAAA;AAAA,MAClD;AAEA,MAAA,IAAI,aAAa,OAAA,EAAS;AACxB,QAAA,MAAM,KAAA;AAAA,MACR;AAEA,MAAA,OAAA,CAAQ,KAAA,CAAM,2BAA2B,KAAA,YAAiB,KAAA,GAAQ,MAAM,OAAA,GAAU,MAAA,CAAO,KAAK,CAAC,CAAA;AAAA,IACjG,CAAA,SAAE;AACA,MAAA,MAAM,IAAA,CAAK,OAAA,CAAQ,iBAAA,CAAkB,MAAA,CAAO,IAAI,KAAK,CAAA;AAAA,IACvD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,OAAA,CAAQ,QAAA,EAAkB,UAAA,EAAqB,OAAA,EAAiC;AACpF,IAAA,MAAM,OAAA,GAAU,IAAA,CAAK,UAAA,CAAW,QAAA,EAAU,UAAU,CAAA;AAEpD,IAAA,MAAM,IAAA,CAAK,QAAA,CAAS,OAAA,EAAS,YAAY;AAEvC,MAAA,MAAM,WAAA,GAAc,MAAM,IAAA,CAAK,iBAAA,CAAkB,UAAU,UAAU,CAAA;AAErE,MAAA,IAAI,IAAA,CAAK,KAAA,KAAU,UAAA,IAAc,UAAA,EAAY;AAE3C,QAAA,MAAM,IAAA,CAAK,2BAAA;AAAA,UACT,WAAA;AAAA,UACA,QAAA;AAAA,UACA,UAAA;AAAA,UACA;AAAC;AAAA,SACH;AAAA,MACF,CAAA,MAAO;AAEL,QAAA,MAAM,kBAAA,GAAqB,MAAM,IAAA,CAAK,sBAAA;AAAA,UACpC,QAAA;AAAA,UACA,UAAA;AAAA,UACA,YAAY,cAAA,GAAiB,IAAI,IAAA,CAAK,WAAA,CAAY,cAAc,CAAA,GAAI;AAAA,SACtE;AAEA,QAAA,IAAI,kBAAA,CAAmB,WAAW,CAAA,EAAG;AACnC,UAAA;AAAA,QACF;AAEA,QAAA,MAAM,IAAA,CAAK,wBAAA,CAAyB,WAAA,EAAa,QAAA,EAAU,kBAAkB,CAAA;AAAA,MAC/E;AAAA,IACF,CAAC,CAAA;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,OAAA,CAAQ,QAAA,EAAkB,UAAA,EAAqB,MAAA,EAAgC;AACnF,IAAA,MAAM,MAAA,GAAS,MAAM,IAAA,CAAK,iBAAA,CAAkB,UAAU,UAAU,CAAA;AAEhE,IAAA,IAAI,CAAC,OAAO,kBAAA,EAAoB;AAC9B,MAAA;AAAA,IACF;AAEA,IAAA,MAAM,IAAA,CAAK,OAAA,CAAQ,iBAAA,CAAkB,MAAA,CAAO,IAAI,IAAI,CAAA;AAEpD,IAAA,IAAI;AACF,MAAA,MAAM,gBAAA,GAAmB,IAAA,CAAK,eAAA,CAAgB,IAAA,CAAK,iBAAiB,iBAAiB,CAAA;AACrF,MAAA,MAAM,aAAA,GAAgB,MAAM,IAAA,CAAK,aAAA,CAAc,OAAO,kBAAA,EAAoB,MAAA,EAAQ,QAAW,gBAAgB,CAAA;AAC7G,MAAA,MAAM,oBAAA,GAAuB,IAAA,CAAK,YAAA,CAAa,iBAAA,CAAkB,cAAc,YAAY,CAAA;AAE3F,MAAA,MAAM,IAAA,CAAK,QAAQ,0BAAA,CAA2B;AAAA,QAC5C,aAAA,EAAe,MAAA;AAAA,QACf,YAAY,aAAA,CAAc,YAAA;AAAA,QAC1B,UAAA,EAAY;AAAA,OACb,CAAA;AAAA,IAIH,CAAA,SAAE;AACA,MAAA,MAAM,IAAA,CAAK,OAAA,CAAQ,iBAAA,CAAkB,MAAA,CAAO,IAAI,KAAK,CAAA;AAAA,IACvD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,eAAA,CAAgB,QAAA,EAAkB,UAAA,EAAkD;AACxF,IAAA,MAAM,GAAA,GAAM,IAAA,CAAK,aAAA,CAAc,QAAA,EAAU,UAAU,CAAA;AACnD,IAAA,MAAM,MAAA,GAAS,MAAM,IAAA,CAAK,OAAA,CAAQ,uBAAuB,GAAA,CAAI,QAAA,EAAU,IAAI,UAAU,CAAA;AACrF,IAAA,OAAO,MAAA,EAAQ,kBAAA;AAAA,EACjB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,SAAA,CAAU,QAAA,EAAkB,UAAA,EAAgE;AAChG,IAAA,MAAM,GAAA,GAAM,IAAA,CAAK,aAAA,CAAc,QAAA,EAAU,UAAU,CAAA;AACnD,IAAA,OAAO,KAAK,OAAA,CAAQ,sBAAA,CAAuB,GAAA,CAAI,QAAA,EAAU,IAAI,UAAU,CAAA;AAAA,EACzE;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAAA,CAAW,QAAA,EAAkB,UAAA,EAAqB,KAAA,EAAsD;AAC5G,IAAA,MAAM,GAAA,GAAM,IAAA,CAAK,aAAA,CAAc,QAAA,EAAU,UAAU,CAAA;AACnD,IAAA,OAAO,KAAK,OAAA,CAAQ,6BAAA,CAA8B,IAAI,QAAA,EAAU,GAAA,CAAI,YAAY,KAAK,CAAA;AAAA,EACvF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,KAAA,CAAM,QAAA,EAAkB,UAAA,EAAoC;AAChE,IAAA,MAAM,GAAA,GAAM,IAAA,CAAK,aAAA,CAAc,QAAA,EAAU,UAAU,CAAA;AACnD,IAAA,MAAM,KAAK,OAAA,CAAQ,wBAAA,CAAyB,GAAA,CAAI,QAAA,EAAU,IAAI,UAAU,CAAA;AAAA,EAC1E;AAAA;AAAA;AAAA;AAAA,EAKA,UAAA,GAA4B;AAC1B,IAAA,OAAO,IAAA,CAAK,OAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,eAAA,GAAgC;AAC9B,IAAA,OAAO,IAAA,CAAK,YAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,oBAAA,GAAkD;AAChD,IAAA,OAAO,IAAA,CAAK,iBAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,mBAAA,GAAgD;AAC9C,IAAA,OAAO,IAAA,CAAK,gBAAA;AAAA,EACd;AACF","file":"chunk-6TXUWFIU.js","sourcesContent":["import type { MastraDBMessage } from '@mastra/core/agent';\n\n/**\n * Legacy extraction instructions from Jan 7, 2026.\n * Used for A/B testing prompt size impact on accuracy.\n * Enable with OM_USE_LEGACY_PROMPT=1\n */\nconst LEGACY_OBSERVER_EXTRACTION_INSTRUCTIONS = `CRITICAL: DISTINGUISH USER ASSERTIONS FROM QUESTIONS\n\nWhen the user TELLS you something about themselves, mark it as an assertion:\n- \"I have two kids\" → 🔴 (14:30) User stated has two kids\n- \"I work at Acme Corp\" → 🔴 (14:31) User stated works at Acme Corp\n- \"I graduated in 2019\" → 🔴 (14:32) User stated graduated in 2019\n\nWhen the user ASKS about something, mark it as a question/request:\n- \"Can you help me with X?\" → 🟡 (15:00) User asked help with X\n- \"What's the best way to do Y?\" → 🟡 (15:01) User asked best way to do Y\n\nUSER ASSERTIONS ARE AUTHORITATIVE. The user is the source of truth about their own life.\nIf a user previously stated something and later asks a question about the same topic,\nthe assertion is the answer - the question doesn't invalidate what they already told you.\n\nTEMPORAL ANCHORING:\nConvert relative times to estimated dates based on the message timestamp.\nInclude the user's original phrasing in quotes, then add an estimated date or range.\nRanges may span multiple months - e.g., \"within the last month\" on July 15th could mean anytime in June to early July.\n\nBAD: User was given X by their friend last month.\nGOOD: User was given X by their friend \"last month\" (estimated mid-June to early July 202X).\n\nPRESERVE UNUSUAL PHRASING:\nWhen the user uses unexpected or non-standard terminology, quote their exact words.\n\nBAD: User exercised.\nGOOD: User stated they did a \"movement session\" (their term for exercise).\n\nCONVERSATION CONTEXT:\n- What the user is working on or asking about\n- Previous topics and their outcomes\n- What user understands or needs clarification on\n- Specific requirements or constraints mentioned\n- Contents of assistant learnings and summaries\n- Answers to users questions including full context to remember detailed summaries and explanations\n- Assistant explanations, especially complex ones. observe the fine details so that the assistant does not forget what they explained\n- Relevant code snippets\n- User preferences (like favourites, dislikes, preferences, etc)\n- Any specifically formatted text or ascii that would need to be reproduced or referenced in later interactions (preserve these verbatim in memory)\n- Any blocks of any text which the user and assistant are iteratively collaborating back and forth on should be preserved verbatim\n- When who/what/where/when is mentioned, note that in the observation. Example: if the user received went on a trip with someone, observe who that someone was, where the trip was, when it happened, and what happened, not just that the user went on the trip.\n\nACTIONABLE INSIGHTS:\n- What worked well in explanations\n- What needs follow-up or clarification\n- User's stated goals or next steps (note if the user tells you not to do a next step, or asks for something specific, other next steps besides the users request should be marked as \"waiting for user\", unless the user explicitly says to continue all next steps)`;\n\n/**\n * Check which prompt variant to use (for A/B testing)\n */\nconst USE_LEGACY_PROMPT = process.env.OM_USE_LEGACY_PROMPT === '1' || process.env.OM_USE_LEGACY_PROMPT === 'true';\nconst USE_CONDENSED_PROMPT =\n process.env.OM_USE_CONDENSED_PROMPT === '1' || process.env.OM_USE_CONDENSED_PROMPT === 'true';\n\n/**\n * Condensed V3 extraction instructions - principle-based, relies on model's common sense.\n * ~45 lines vs ~200 lines in current prompt.\n * Enable with OM_USE_CONDENSED_PROMPT=1\n */\nconst CONDENSED_OBSERVER_EXTRACTION_INSTRUCTIONS = `You are the memory consciousness of an AI assistant. Your observations will be the ONLY information the assistant has about past interactions with this user.\n\nCORE PRINCIPLES:\n\n1. BE SPECIFIC - Vague observations are useless. Capture details that distinguish and identify.\n2. ANCHOR IN TIME - Note when things happened and when they were said.\n3. TRACK STATE CHANGES - When information updates or supersedes previous info, make it explicit.\n4. USE COMMON SENSE - If it would help the assistant remember later, observe it.\n\nASSERTIONS VS QUESTIONS:\n- User TELLS you something → 🔴 \"User stated [fact]\"\n- User ASKS something → 🟡 \"User asked [question]\"\n- User assertions are authoritative. They are the source of truth about their own life.\n\nTEMPORAL ANCHORING:\n- Always include message time at the start: (14:30) User stated...\n- Add estimated date at the END only for relative time references:\n \"User will visit parents this weekend. (meaning Jan 18-19)\"\n- Don't add end dates for present-moment statements or vague terms like \"recently\"\n- Split multi-event statements into separate observations, each with its own date\n\nDETAILS TO ALWAYS PRESERVE:\n- Names, handles, usernames, titles (@username, \"Dr. Smith\")\n- Numbers, counts, quantities (4 items, 3 sessions, 27th in list)\n- Measurements, percentages, statistics (5kg, 20% improvement, 85% accuracy)\n- Sequences and orderings (steps 1-5, chord progression, lucky numbers)\n- Prices, dates, times, durations ($50, March 15, 2 hours)\n- Locations and distinguishing attributes (near X, based in Y, specializes in Z)\n- User's specific role (presenter, volunteer, organizer - not just \"attended\")\n- Exact phrasing when unusual (\"movement session\" for exercise)\n- Verbatim text being collaborated on (code, formatted text, ASCII art)\n\nWHEN ASSISTANT PROVIDES LISTS/RECOMMENDATIONS:\nDon't just say \"Assistant recommended 5 hotels.\" Capture what distinguishes each:\n\"Assistant recommended: Hotel A (near station), Hotel B (pet-friendly), Hotel C (has pool)...\"\n\nSTATE CHANGES:\nWhen user updates information, note what changed:\n\"User will use the new method (replacing the old approach)\"\n\nWHO/WHAT/WHERE/WHEN:\nCapture all dimensions. Not just \"User went on a trip\" but who with, where, when, and what happened.\n\nDon't repeat observations that have already been captured in previous sessions.\n\nREMEMBER: These observations are your ENTIRE memory. Any detail you fail to observe is permanently forgotten. Use common sense - if something seems like it might be important to remember, it probably is. When in doubt, observe it.`;\n\n/**\n * The core extraction instructions for the Observer.\n * This is exported so the Reflector can understand how observations were created.\n */\nconst CURRENT_OBSERVER_EXTRACTION_INSTRUCTIONS = `CRITICAL: DISTINGUISH USER ASSERTIONS FROM QUESTIONS\n\nWhen the user TELLS you something about themselves, mark it as an assertion:\n- \"I have two kids\" → 🔴 (14:30) User stated has two kids\n- \"I work at Acme Corp\" → 🔴 (14:31) User stated works at Acme Corp\n- \"I graduated in 2019\" → 🔴 (14:32) User stated graduated in 2019\n\nWhen the user ASKS about something, mark it as a question/request:\n- \"Can you help me with X?\" → 🟡 (15:00) User asked help with X\n- \"What's the best way to do Y?\" → 🟡 (15:01) User asked best way to do Y\n\nDistinguish between QUESTIONS and STATEMENTS OF INTENT:\n- \"Can you recommend...\" → Question (extract as \"User asked...\")\n- \"I'm looking forward to [doing X]\" → Statement of intent (extract as \"User stated they will [do X] (include estimated/actual date if mentioned)\")\n- \"I need to [do X]\" → Statement of intent (extract as \"User stated they need to [do X] (again, add date if mentioned)\")\n\nSTATE CHANGES AND UPDATES:\nWhen a user indicates they are changing something, frame it as a state change that supersedes previous information:\n- \"I'm going to start doing X instead of Y\" → \"User will start doing X (changing from Y)\"\n- \"I'm switching from A to B\" → \"User is switching from A to B\"\n- \"I moved my stuff to the new place\" → \"User moved their stuff to the new place (no longer at previous location)\"\n\nIf the new state contradicts or updates previous information, make that explicit:\n- BAD: \"User plans to use the new method\"\n- GOOD: \"User will use the new method (replacing the old approach)\"\n\nThis helps distinguish current state from outdated information.\n\nUSER ASSERTIONS ARE AUTHORITATIVE. The user is the source of truth about their own life.\nIf a user previously stated something and later asks a question about the same topic,\nthe assertion is the answer - the question doesn't invalidate what they already told you.\n\nTEMPORAL ANCHORING:\nEach observation has TWO potential timestamps:\n\n1. BEGINNING: The time the statement was made (from the message timestamp) - ALWAYS include this\n2. END: The time being REFERENCED, if different from when it was said - ONLY when there's a relative time reference\n\nONLY add \"(meaning DATE)\" or \"(estimated DATE)\" at the END when you can provide an ACTUAL DATE:\n- Past: \"last week\", \"yesterday\", \"a few days ago\", \"last month\", \"in March\"\n- Future: \"this weekend\", \"tomorrow\", \"next week\"\n\nDO NOT add end dates for:\n- Present-moment statements with no time reference\n- Vague references like \"recently\", \"a while ago\", \"lately\", \"soon\" - these cannot be converted to actual dates\n\nFORMAT:\n- With time reference: (TIME) [observation]. (meaning/estimated DATE)\n- Without time reference: (TIME) [observation].\n\nGOOD: (09:15) User's friend had a birthday party in March. (meaning March 20XX)\n ^ References a past event - add the referenced date at the end\n\nGOOD: (09:15) User will visit their parents this weekend. (meaning June 17-18, 20XX)\n ^ References a future event - add the referenced date at the end\n\nGOOD: (09:15) User prefers hiking in the mountains.\n ^ Present-moment preference, no time reference - NO end date needed\n\nGOOD: (09:15) User is considering adopting a dog.\n ^ Present-moment thought, no time reference - NO end date needed\n\nBAD: (09:15) User prefers hiking in the mountains. (meaning June 15, 20XX - today)\n ^ No time reference in the statement - don't repeat the message timestamp at the end\n\nIMPORTANT: If an observation contains MULTIPLE events, split them into SEPARATE observation lines.\nEACH split observation MUST have its own date at the end - even if they share the same time context.\n\nExamples (assume message is from June 15, 20XX):\n\nBAD: User will visit their parents this weekend (meaning June 17-18, 20XX) and go to the dentist tomorrow.\nGOOD (split into two observations, each with its date):\n User will visit their parents this weekend. (meaning June 17-18, 20XX)\n User will go to the dentist tomorrow. (meaning June 16, 20XX)\n\nBAD: User needs to clean the garage this weekend and is looking forward to setting up a new workbench.\nGOOD (split, BOTH get the same date since they're related):\n User needs to clean the garage this weekend. (meaning June 17-18, 20XX)\n User will set up a new workbench this weekend. (meaning June 17-18, 20XX)\n\nBAD: User was given a gift by their friend (estimated late May 20XX) last month.\nGOOD: (09:15) User was given a gift by their friend last month. (estimated late May 20XX)\n ^ Message time at START, relative date reference at END - never in the middle\n\nBAD: User started a new job recently and will move to a new apartment next week.\nGOOD (split):\n User started a new job recently.\n User will move to a new apartment next week. (meaning June 21-27, 20XX)\n ^ \"recently\" is too vague for a date - omit the end date. \"next week\" can be calculated.\n\nALWAYS put the date at the END in parentheses - this is critical for temporal reasoning.\nWhen splitting related events that share the same time context, EACH observation must have the date.\n\nPRESERVE UNUSUAL PHRASING:\nWhen the user uses unexpected or non-standard terminology, quote their exact words.\n\nBAD: User exercised.\nGOOD: User stated they did a \"movement session\" (their term for exercise).\n\nUSE PRECISE ACTION VERBS:\nReplace vague verbs like \"getting\", \"got\", \"have\" with specific action verbs that clarify the nature of the action.\nIf the assistant confirms or clarifies the user's action, use the assistant's more precise language.\n\nBAD: User is getting X.\nGOOD: User subscribed to X. (if context confirms recurring delivery)\nGOOD: User purchased X. (if context confirms one-time acquisition)\n\nBAD: User got something.\nGOOD: User purchased / received / was given something. (be specific)\n\nCommon clarifications:\n- \"getting\" something regularly → \"subscribed to\" or \"enrolled in\"\n- \"getting\" something once → \"purchased\" or \"acquired\"\n- \"got\" → \"purchased\", \"received as gift\", \"was given\", \"picked up\"\n- \"signed up\" → \"enrolled in\", \"registered for\", \"subscribed to\"\n- \"stopped getting\" → \"canceled\", \"unsubscribed from\", \"discontinued\"\n\nWhen the assistant interprets or confirms the user's vague language, prefer the assistant's precise terminology.\n\nPRESERVING DETAILS IN ASSISTANT-GENERATED CONTENT:\n\nWhen the assistant provides lists, recommendations, or creative content that the user explicitly requested,\npreserve the DISTINGUISHING DETAILS that make each item unique and queryable later.\n\n1. RECOMMENDATION LISTS - Preserve the key attribute that distinguishes each item:\n BAD: Assistant recommended 5 hotels in the city.\n GOOD: Assistant recommended hotels: Hotel A (near the train station), Hotel B (budget-friendly), \n Hotel C (has rooftop pool), Hotel D (pet-friendly), Hotel E (historic building).\n \n BAD: Assistant listed 3 online stores for craft supplies.\n GOOD: Assistant listed craft stores: Store A (based in Germany, ships worldwide), \n Store B (specializes in vintage fabrics), Store C (offers bulk discounts).\n\n2. NAMES, HANDLES, AND IDENTIFIERS - Always preserve specific identifiers:\n BAD: Assistant provided social media accounts for several photographers.\n GOOD: Assistant provided photographer accounts: @photographer_one (portraits), \n @photographer_two (landscapes), @photographer_three (nature).\n \n BAD: Assistant listed some authors to check out.\n GOOD: Assistant recommended authors: Jane Smith (mystery novels), \n Bob Johnson (science fiction), Maria Garcia (historical romance).\n\n3. CREATIVE CONTENT - Preserve structure and key sequences:\n BAD: Assistant wrote a poem with multiple verses.\n GOOD: Assistant wrote a 3-verse poem. Verse 1 theme: loss. Verse 2 theme: hope. \n Verse 3 theme: renewal. Refrain: \"The light returns.\"\n \n BAD: User shared their lucky numbers from a fortune cookie.\n GOOD: User's fortune cookie lucky numbers: 7, 14, 23, 38, 42, 49.\n\n4. TECHNICAL/NUMERICAL RESULTS - Preserve specific values:\n BAD: Assistant explained the performance improvements from the optimization.\n GOOD: Assistant explained the optimization achieved 43.7% faster load times \n and reduced memory usage from 2.8GB to 940MB.\n \n BAD: Assistant provided statistics about the dataset.\n GOOD: Assistant provided dataset stats: 7,342 samples, 89.6% accuracy, \n 23ms average inference time.\n\n5. QUANTITIES AND COUNTS - Always preserve how many of each item:\n BAD: Assistant listed items with details but no quantities.\n GOOD: Assistant listed items: Item A (4 units, size large), Item B (2 units, size small).\n \n When listing items with attributes, always include the COUNT first before other details.\n\n6. ROLE/PARTICIPATION STATEMENTS - When user mentions their role at an event:\n BAD: User attended the company event.\n GOOD: User was a presenter at the company event.\n \n BAD: User went to the fundraiser.\n GOOD: User volunteered at the fundraiser (helped with registration).\n \n Always capture specific roles: presenter, organizer, volunteer, team lead, \n coordinator, participant, contributor, helper, etc.\n\nCONVERSATION CONTEXT:\n- What the user is working on or asking about\n- Previous topics and their outcomes\n- What user understands or needs clarification on\n- Specific requirements or constraints mentioned\n- Contents of assistant learnings and summaries\n- Answers to users questions including full context to remember detailed summaries and explanations\n- Assistant explanations, especially complex ones. observe the fine details so that the assistant does not forget what they explained\n- Relevant code snippets\n- User preferences (like favourites, dislikes, preferences, etc)\n- Any specifically formatted text or ascii that would need to be reproduced or referenced in later interactions (preserve these verbatim in memory)\n- Sequences, units, measurements, and any kind of specific relevant data\n- Any blocks of any text which the user and assistant are iteratively collaborating back and forth on should be preserved verbatim\n- When who/what/where/when is mentioned, note that in the observation. Example: if the user received went on a trip with someone, observe who that someone was, where the trip was, when it happened, and what happened, not just that the user went on the trip.\n- For any described entity (like a person, place, thing, etc), preserve the attributes that would help identify or describe the specific entity later: location (\"near X\"), specialty (\"focuses on Y\"), unique feature (\"has Z\"), relationship (\"owned by W\"), or other details. The entity's name is important, but so are any additional details that distinguish it. If there are a list of entities, preserve these details for each of them.\n\nACTIONABLE INSIGHTS:\n- What worked well in explanations\n- What needs follow-up or clarification\n- User's stated goals or next steps (note if the user tells you not to do a next step, or asks for something specific, other next steps besides the users request should be marked as \"waiting for user\", unless the user explicitly says to continue all next steps)`;\n\n/**\n * Select which extraction instructions to use based on environment variable.\n * Set OM_USE_LEGACY_PROMPT=1 to use the smaller Jan 7 prompt for A/B testing.\n * Set OM_USE_CONDENSED_PROMPT=1 to use the new condensed V3 prompt.\n */\nexport const OBSERVER_EXTRACTION_INSTRUCTIONS = USE_CONDENSED_PROMPT\n ? CONDENSED_OBSERVER_EXTRACTION_INSTRUCTIONS\n : USE_LEGACY_PROMPT\n ? LEGACY_OBSERVER_EXTRACTION_INSTRUCTIONS\n : CURRENT_OBSERVER_EXTRACTION_INSTRUCTIONS;\n\n/**\n * The output format instructions for the Observer.\n * This is exported so the Reflector can use the same format.\n */\n\n/**\n * Condensed output format with realistic examples that model desired patterns.\n */\nconst CONDENSED_OBSERVER_OUTPUT_FORMAT = `Use priority levels:\n- 🔴 High: explicit user facts, preferences, goals achieved, critical context\n- 🟡 Medium: project details, learned information, tool results\n- 🟢 Low: minor details, uncertain observations\n\nGroup observations by date, then list each with 24-hour time.\nGroup related observations (like tool sequences) by indenting.\n\n<observations>\nDate: Dec 4, 2025\n* 🔴 (09:15) User stated they have 3 kids: Emma (12), Jake (9), and Lily (5)\n* 🔴 (09:16) User's anniversary is March 15\n* 🟡 (09:20) User asked how to optimize database queries\n* 🟡 (10:30) User working on auth refactor - targeting 50% latency reduction\n* 🟡 (10:45) Assistant recommended hotels: Grand Plaza (downtown, $180/night), Seaside Inn (near beach, pet-friendly), Mountain Lodge (has pool, free breakfast)\n* 🔴 (11:00) User's friend @maria_dev recommended using Redis for caching\n* 🟡 (11:15) User attended the tech conference as a speaker (presented on microservices)\n* 🔴 (11:30) User will visit parents this weekend (meaning Dec 7-8, 2025)\n* 🟡 (14:00) Agent debugging auth issue\n * -> ran git status, found 3 modified files\n * -> viewed auth.ts:45-60, found missing null check\n * -> applied fix, tests now pass\n* 🟡 (14:30) Assistant provided dataset stats: 7,342 samples, 89.6% accuracy, 23ms inference time\n* 🔴 (15:00) User's lucky numbers from fortune cookie: 7, 14, 23, 38, 42, 49\n\nDate: Dec 5, 2025\n* 🔴 (09:00) User switched from Python to TypeScript for the project (no longer using Python)\n* 🟡 (09:30) User bought running shoes for $120 at SportMart (downtown location)\n* 🔴 (10:00) User prefers morning meetings, not afternoon (updating previous preference)\n* 🟡 (10:30) User went to Italy with their sister last summer (meaning July 2025), visited Rome and Florence for 2 weeks\n* 🔴 (10:45) User's dentist appointment is next Tuesday (meaning Dec 10, 2025)\n* 🟢 (11:00) User mentioned they might try the new coffee shop\n</observations>\n\n<current-task>\nPrimary: Implementing OAuth2 flow for the auth refactor\nSecondary: Waiting for user to confirm database schema changes\n</current-task>\n\n<suggested-response>\nThe OAuth2 implementation is ready for testing. Would you like me to walk through the flow?\n</suggested-response>`;\n\n/**\n * Base output format for Observer (without patterns section)\n */\nexport const OBSERVER_OUTPUT_FORMAT_BASE = `Use priority levels:\n- 🔴 High: explicit user facts, preferences, goals achieved, critical context\n- 🟡 Medium: project details, learned information, tool results\n- 🟢 Low: minor details, uncertain observations\n\nGroup related observations (like tool sequences) by indenting:\n* 🟡 (14:33) Agent debugging auth issue\n * -> ran git status, found 3 modified files\n * -> viewed auth.ts:45-60, found missing null check\n * -> applied fix, tests now pass\n\nGroup observations by date, then list each with 24-hour time.\n\n<observations>\nDate: Dec 4, 2025\n* 🔴 (14:30) User prefers direct answers\n* 🟡 (14:31) Working on feature X\n* 🟢 (14:32) User might prefer dark mode\n\nDate: Dec 5, 2025\n* 🟡 (09:15) Continued work on feature X\n</observations>\n\n<current-task>\nState the current task(s) explicitly. Can be single or multiple:\n- Primary: What the agent is currently working on\n- Secondary: Other pending tasks (mark as \"waiting for user\" if appropriate)\n\nIf the agent started doing something without user approval, note that it's off-task.\n</current-task>\n\n<suggested-response>\nHint for the agent's immediate next message. Examples:\n- \"I've updated the navigation model. Let me walk you through the changes...\"\n- \"The assistant should wait for the user to respond before continuing.\"\n- Call the view tool on src/example.ts to continue debugging.\n</suggested-response>`;\n\n/**\n * Condensed guidelines - no GOOD/BAD examples, no arbitrary limits\n */\nconst CONDENSED_OBSERVER_GUIDELINES = `- Be specific: \"User prefers short answers without lengthy explanations\" not \"User stated a preference\"\n- Use terse language - dense sentences without unnecessary words\n- Don't repeat observations that have already been captured\n- When the agent calls tools, observe what was called, why, and what was learned\n- Include line numbers when observing code files\n- If the agent provides a detailed response, observe the key points so it could be repeated\n- Start each observation with a priority emoji (🔴, 🟡, 🟢)\n- Observe WHAT happened and WHAT it means, not HOW well it was done\n- If the user provides detailed messages or code snippets, observe all important details`;\n\n/**\n * The guidelines for the Observer.\n * This is exported so the Reflector can reference them.\n */\nexport const OBSERVER_GUIDELINES = USE_CONDENSED_PROMPT\n ? CONDENSED_OBSERVER_GUIDELINES\n : `- Be specific enough for the assistant to act on\n- Good: \"User prefers short, direct answers without lengthy explanations\"\n- Bad: \"User stated a preference\" (too vague)\n- Add 1 to 5 observations per exchange\n- Use terse language to save tokens. Sentences should be dense without unnecessary words.\n- Do not add repetitive observations that have already been observed.\n- If the agent calls tools, observe what was called, why, and what was learned.\n- When observing files with line numbers, include the line number if useful.\n- If the agent provides a detailed response, observe the contents so it could be repeated.\n- Make sure you start each observation with a priority emoji (🔴, 🟡, 🟢)\n- Observe WHAT the agent did and WHAT it means, not HOW well it did it.\n- If the user provides detailed messages or code snippets, observe all important details.`;\n\n/**\n * Build the complete observer system prompt.\n * @param multiThread - Whether this is for multi-thread batched observation (default: false)\n */\nexport function buildObserverSystemPrompt(multiThread: boolean = false): string {\n // Use condensed output format when condensed prompt is enabled\n // Otherwise, use the base output format\n const outputFormat = USE_CONDENSED_PROMPT ? CONDENSED_OBSERVER_OUTPUT_FORMAT : OBSERVER_OUTPUT_FORMAT_BASE;\n\n if (multiThread) {\n return `You are the memory consciousness of an AI assistant. Your observations will be the ONLY information the assistant has about past interactions with this user.\n\nExtract observations that will help the assistant remember:\n\n${OBSERVER_EXTRACTION_INSTRUCTIONS}\n\n=== MULTI-THREAD INPUT ===\n\nYou will receive messages from MULTIPLE conversation threads, each wrapped in <thread id=\"...\"> tags.\nProcess each thread separately and output observations for each thread.\n\n=== OUTPUT FORMAT ===\n\nYour output MUST use XML tags to structure the response. Each thread's observations, current-task, and suggested-response should be nested inside a <thread id=\"...\"> block within <observations>.\n\n<observations>\n<thread id=\"thread_id_1\">\nDate: Dec 4, 2025\n* 🔴 (14:30) User prefers direct answers\n* 🟡 (14:31) Working on feature X\n\n<current-task>\nWhat the agent is currently working on in this thread\n</current-task>\n\n<suggested-response>\nHint for the agent's next message in this thread\n</suggested-response>\n</thread>\n\n<thread id=\"thread_id_2\">\nDate: Dec 5, 2025\n* 🟡 (09:15) User asked about deployment\n\n<current-task>\nCurrent task for this thread\n</current-task>\n\n<suggested-response>\nSuggested response for this thread\n</suggested-response>\n</thread>\n</observations>\n\nUse priority levels:\n- 🔴 High: explicit user facts, preferences, goals achieved, critical context\n- 🟡 Medium: project details, learned information, tool results\n- 🟢 Low: minor details, uncertain observations\n\n=== GUIDELINES ===\n\n${OBSERVER_GUIDELINES}\n\nRemember: These observations are the assistant's ONLY memory. Make them count.\n\nUser messages are extremely important. If the user asks a question or gives a new task, make it clear in <current-task> that this is the priority.`;\n }\n\n return `You are the memory consciousness of an AI assistant. Your observations will be the ONLY information the assistant has about past interactions with this user.\n\nExtract observations that will help the assistant remember:\n\n${OBSERVER_EXTRACTION_INSTRUCTIONS}\n\n=== OUTPUT FORMAT ===\n\nYour output MUST use XML tags to structure the response. This allows the system to properly parse and manage memory over time.\n\n${outputFormat}\n\n=== GUIDELINES ===\n\n${OBSERVER_GUIDELINES}\n\n=== IMPORTANT: THREAD ATTRIBUTION ===\n\nDo NOT add thread identifiers, thread IDs, or <thread> tags to your observations.\nThread attribution is handled externally by the system.\nSimply output your observations without any thread-related markup.\n\nRemember: These observations are the assistant's ONLY memory. Make them count.\n\nUser messages are extremely important. If the user asks a question or gives a new task, make it clear in <current-task> that this is the priority. If the assistant needs to respond to the user, indicate in <suggested-response> that it should pause for user reply before continuing other tasks.`;\n}\n\n/**\n * Observer Agent System Prompt (default - for backwards compatibility)\n *\n * This prompt instructs the Observer to extract observations from message history.\n * The observations become the agent's \"subconscious memory\" - the ONLY information\n * the main agent will have about past interactions.\n */\nexport const OBSERVER_SYSTEM_PROMPT = buildObserverSystemPrompt();\n\n/**\n * Result from the Observer agent\n */\nexport interface ObserverResult {\n /** The extracted observations in markdown format */\n observations: string;\n\n /** The current task extracted from observations (for thread metadata) */\n currentTask?: string;\n\n /** Suggested continuation message for the Actor */\n suggestedContinuation?: string;\n\n /** Raw output from the model (for debugging) */\n rawOutput?: string;\n}\n\n/**\n * Format messages for the Observer's input.\n * Includes timestamps for temporal context.\n */\nexport function formatMessagesForObserver(messages: MastraDBMessage[], options?: { maxPartLength?: number }): string {\n const maxLen = options?.maxPartLength;\n\n return messages\n .map(msg => {\n const timestamp = msg.createdAt\n ? new Date(msg.createdAt).toLocaleString('en-US', {\n year: 'numeric',\n month: 'short',\n day: 'numeric',\n hour: 'numeric',\n minute: '2-digit',\n hour12: true,\n })\n : '';\n\n const role = msg.role.charAt(0).toUpperCase() + msg.role.slice(1);\n const timestampStr = timestamp ? ` (${timestamp})` : '';\n\n // Extract text content from the message\n // IMPORTANT: Check parts FIRST since it contains the full message (including tool calls)\n // The content.content string is just the text portion\n let content = '';\n if (typeof msg.content === 'string') {\n content = maybeTruncate(msg.content, maxLen);\n } else if (msg.content?.parts && Array.isArray(msg.content.parts) && msg.content.parts.length > 0) {\n // Use parts array - this includes tool invocations and results\n content = msg.content.parts\n .map(part => {\n if (part.type === 'text') return maybeTruncate(part.text, maxLen);\n if (part.type === 'tool-invocation') {\n const inv = part.toolInvocation;\n if (inv.state === 'result') {\n const resultStr = JSON.stringify(inv.result, null, 2);\n return `[Tool Result: ${inv.toolName}]\\n${maybeTruncate(resultStr, maxLen)}`;\n }\n const argsStr = JSON.stringify(inv.args, null, 2);\n return `[Tool Call: ${inv.toolName}]\\n${maybeTruncate(argsStr, maxLen)}`;\n }\n // Skip observation marker parts\n if (part.type?.startsWith('data-om-observation-')) return '';\n return '';\n })\n .filter(Boolean)\n .join('\\n');\n } else if (msg.content?.content) {\n // Fallback to text string if no parts\n content = maybeTruncate(msg.content.content, maxLen);\n }\n\n return `**${role}${timestampStr}:**\\n${content}`;\n })\n .join('\\n\\n---\\n\\n');\n}\n\n/** Truncate a string to maxLen characters, appending a note if truncated. */\nfunction maybeTruncate(str: string, maxLen?: number): string {\n if (!maxLen || str.length <= maxLen) return str;\n const truncated = str.slice(0, maxLen);\n const remaining = str.length - maxLen;\n return `${truncated}\\n... [truncated ${remaining} characters]`;\n}\n\n/**\n * Format messages from multiple threads for batched observation.\n * Each thread's messages are wrapped in a <thread id=\"...\"> block.\n */\nexport function formatMultiThreadMessagesForObserver(\n messagesByThread: Map<string, MastraDBMessage[]>,\n threadOrder: string[],\n): string {\n const sections: string[] = [];\n\n for (const threadId of threadOrder) {\n const messages = messagesByThread.get(threadId);\n if (!messages || messages.length === 0) continue;\n\n const formattedMessages = formatMessagesForObserver(messages);\n sections.push(`<thread id=\"${threadId}\">\\n${formattedMessages}\\n</thread>`);\n }\n\n return sections.join('\\n\\n');\n}\n\n/**\n * Build the prompt for multi-thread batched observation.\n */\nexport function buildMultiThreadObserverPrompt(\n existingObservations: string | undefined,\n messagesByThread: Map<string, MastraDBMessage[]>,\n threadOrder: string[],\n): string {\n const formattedMessages = formatMultiThreadMessagesForObserver(messagesByThread, threadOrder);\n\n let prompt = '';\n\n if (existingObservations) {\n prompt += `## Previous Observations\\n\\n${existingObservations}\\n\\n---\\n\\n`;\n prompt +=\n 'Do not repeat these existing observations. Your new observations will be appended to the existing observations.\\n\\n';\n }\n\n prompt += `## New Message History to Observe\\n\\nThe following messages are from ${threadOrder.length} different conversation threads. Each thread is wrapped in a <thread id=\"...\"> tag.\\n\\n${formattedMessages}\\n\\n---\\n\\n`;\n\n prompt += `## Your Task\\n\\n`;\n prompt += `Extract new observations from each thread. Output your observations grouped by thread using <thread id=\"...\"> tags inside your <observations> block. Each thread block should contain that thread's observations, current-task, and suggested-response.\\n\\n`;\n prompt += `Example output format:\\n`;\n prompt += `<observations>\\n`;\n prompt += `<thread id=\"thread1\">\\n`;\n prompt += `Date: Dec 4, 2025\\n`;\n prompt += `* 🔴 (14:30) User prefers direct answers\\n`;\n prompt += `<current-task>Working on feature X</current-task>\\n`;\n prompt += `<suggested-response>Continue with the implementation</suggested-response>\\n`;\n prompt += `</thread>\\n`;\n prompt += `<thread id=\"thread2\">\\n`;\n prompt += `Date: Dec 5, 2025\\n`;\n prompt += `* 🟡 (09:15) User asked about deployment\\n`;\n prompt += `<current-task>Discussing deployment options</current-task>\\n`;\n prompt += `<suggested-response>Explain the deployment process</suggested-response>\\n`;\n prompt += `</thread>\\n`;\n prompt += `</observations>`;\n\n return prompt;\n}\n\n/**\n * Result from parsing multi-thread Observer output\n */\nexport interface MultiThreadObserverResult {\n /** Results per thread */\n threads: Map<string, ObserverResult>;\n /** Raw output from the model (for debugging) */\n rawOutput: string;\n}\n\n/**\n * Parse multi-thread Observer output to extract per-thread results.\n */\nexport function parseMultiThreadObserverOutput(output: string): MultiThreadObserverResult {\n const threads = new Map<string, ObserverResult>();\n\n // Extract the <observations> block first\n const observationsMatch = output.match(/^[ \\t]*<observations>([\\s\\S]*?)^[ \\t]*<\\/observations>/im);\n const observationsContent = observationsMatch?.[1] ?? output;\n\n // Find all <thread id=\"...\">...</thread> blocks within observations\n const threadRegex = /<thread\\s+id=\"([^\"]+)\">([\\s\\S]*?)<\\/thread>/gi;\n let match;\n\n while ((match = threadRegex.exec(observationsContent)) !== null) {\n const threadId = match[1];\n const threadContent = match[2];\n if (!threadId || !threadContent) continue;\n\n // Parse this thread's content for observations, current-task, suggested-response\n // Extract observations (everything except current-task and suggested-response)\n let observations = threadContent;\n\n // Extract and remove current-task\n let currentTask: string | undefined;\n const currentTaskMatch = threadContent.match(/<current-task>([\\s\\S]*?)<\\/current-task>/i);\n if (currentTaskMatch?.[1]) {\n currentTask = currentTaskMatch[1].trim();\n observations = observations.replace(/<current-task>[\\s\\S]*?<\\/current-task>/i, '');\n }\n\n // Extract and remove suggested-response\n let suggestedContinuation: string | undefined;\n const suggestedMatch = threadContent.match(/<suggested-response>([\\s\\S]*?)<\\/suggested-response>/i);\n if (suggestedMatch?.[1]) {\n suggestedContinuation = suggestedMatch[1].trim();\n observations = observations.replace(/<suggested-response>[\\s\\S]*?<\\/suggested-response>/i, '');\n }\n\n // Clean up observations\n observations = observations.trim();\n\n threads.set(threadId, {\n observations,\n currentTask,\n suggestedContinuation,\n rawOutput: threadContent,\n });\n }\n\n // If no thread blocks found, the caller will need to handle this case\n // (e.g., by falling back to single-thread parsing)\n\n return {\n threads,\n rawOutput: output,\n };\n}\n\n/**\n * Build the full prompt for the Observer agent.\n * Includes emphasis on the most recent user message for priority handling.\n */\nexport function buildObserverPrompt(\n existingObservations: string | undefined,\n messagesToObserve: MastraDBMessage[],\n): string {\n const formattedMessages = formatMessagesForObserver(messagesToObserve);\n\n let prompt = '';\n\n if (existingObservations) {\n prompt += `## Previous Observations\\n\\n${existingObservations}\\n\\n---\\n\\n`;\n prompt +=\n 'Do not repeat these existing observations. Your new observations will be appended to the existing observations.\\n\\n';\n }\n\n prompt += `## New Message History to Observe\\n\\n${formattedMessages}\\n\\n---\\n\\n`;\n\n prompt += `## Your Task\\n\\n`;\n prompt += `Extract new observations from the message history above. Do not repeat observations that are already in the previous observations. Add your new observations in the format specified in your instructions.`;\n\n return prompt;\n}\n\n/**\n * Parse the Observer's output to extract observations, current task, and suggested response.\n * Uses XML tag parsing for structured extraction.\n */\nexport function parseObserverOutput(output: string): ObserverResult {\n const parsed = parseMemorySectionXml(output);\n\n // Return observations WITHOUT current-task/suggested-response tags\n // Those are stored separately in thread metadata and injected dynamically\n const observations = parsed.observations || '';\n\n return {\n observations,\n currentTask: parsed.currentTask || undefined,\n suggestedContinuation: parsed.suggestedResponse || undefined,\n rawOutput: output,\n };\n}\n\n/**\n * Parsed result from XML memory section\n */\ninterface ParsedMemorySection {\n observations: string;\n currentTask: string;\n suggestedResponse: string;\n}\n\n/**\n * Parse XML tags from observer/reflector output.\n * Extracts content from <observations>, <current-task>, and <suggested-response> tags.\n */\nexport function parseMemorySectionXml(content: string): ParsedMemorySection {\n const result: ParsedMemorySection = {\n observations: '',\n currentTask: '',\n suggestedResponse: '',\n };\n\n // Extract <observations> content (supports multiple blocks)\n // Tags must be at the start of a line (with optional leading whitespace) to avoid\n // capturing inline mentions like \"User discussed <observations> tags\"\n const observationsRegex = /^[ \\t]*<observations>([\\s\\S]*?)^[ \\t]*<\\/observations>/gim;\n const observationsMatches = [...content.matchAll(observationsRegex)];\n if (observationsMatches.length > 0) {\n result.observations = observationsMatches\n .map(m => m[1]?.trim() ?? '')\n .filter(Boolean)\n .join('\\n');\n } else {\n // Fallback: if no XML tags, extract list items from raw content\n // This handles cases where the LLM doesn't follow the XML format exactly\n result.observations = extractListItemsOnly(content);\n }\n\n // Extract <current-task> content (first match only)\n // Tags must be at the start of a line to avoid capturing inline mentions\n const currentTaskMatch = content.match(/^[ \\t]*<current-task>([\\s\\S]*?)^[ \\t]*<\\/current-task>/im);\n if (currentTaskMatch?.[1]) {\n result.currentTask = currentTaskMatch[1].trim();\n }\n\n // Extract <suggested-response> content (first match only)\n // Tags must be at the start of a line to avoid capturing inline mentions\n const suggestedResponseMatch = content.match(/^[ \\t]*<suggested-response>([\\s\\S]*?)^[ \\t]*<\\/suggested-response>/im);\n if (suggestedResponseMatch?.[1]) {\n result.suggestedResponse = suggestedResponseMatch[1].trim();\n }\n\n return result;\n}\n\n/**\n * Fallback: Extract only list items from content when XML tags are missing.\n * Preserves nested list items (indented with spaces/tabs).\n */\nfunction extractListItemsOnly(content: string): string {\n const lines = content.split('\\n');\n const listLines: string[] = [];\n\n for (const line of lines) {\n // Match lines that start with list markers (-, *, or numbered)\n // Allow leading whitespace for nested items\n if (/^\\s*[-*]\\s/.test(line) || /^\\s*\\d+\\.\\s/.test(line)) {\n listLines.push(line);\n }\n }\n\n return listLines.join('\\n').trim();\n}\n\n/**\n * Check if observations contain a Current Task section.\n * Supports both XML format and legacy markdown format.\n */\nexport function hasCurrentTaskSection(observations: string): boolean {\n // Check for XML format first\n if (/<current-task>/i.test(observations)) {\n return true;\n }\n\n // Legacy markdown patterns\n const currentTaskPatterns = [\n /\\*\\*Current Task:?\\*\\*/i,\n /^Current Task:/im,\n /\\*\\*Current Task\\*\\*:/i,\n /## Current Task/i,\n ];\n\n return currentTaskPatterns.some(pattern => pattern.test(observations));\n}\n\n/**\n * Extract the Current Task content from observations.\n */\nexport function extractCurrentTask(observations: string): string | null {\n const openTag = '<current-task>';\n const closeTag = '</current-task>';\n const startIdx = observations.toLowerCase().indexOf(openTag);\n if (startIdx === -1) return null;\n const contentStart = startIdx + openTag.length;\n const endIdx = observations.toLowerCase().indexOf(closeTag, contentStart);\n if (endIdx === -1) return null;\n const content = observations.slice(contentStart, endIdx).trim();\n return content || null;\n}\n\n/**\n * Optimize observations for token efficiency before presenting to the Actor.\n *\n * This removes:\n * - Non-critical emojis (🟡 and 🟢, keeping only 🔴)\n * - Semantic tags [label, label]\n * - Arrow indicators (->)\n * - Extra whitespace\n *\n * The full format is preserved in storage for analysis.\n */\nexport function optimizeObservationsForContext(observations: string): string {\n let optimized = observations;\n\n // Remove 🟡 and 🟢 emojis (keep 🔴 for critical items)\n optimized = optimized.replace(/🟡\\s*/g, '');\n optimized = optimized.replace(/🟢\\s*/g, '');\n\n // Remove semantic tags like [label, label] but keep collapsed markers like [72 items collapsed - ID: b1fa]\n optimized = optimized.replace(/\\[(?![\\d\\s]*items collapsed)[^\\]]+\\]/g, '');\n\n // Remove arrow indicators\n optimized = optimized.replace(/\\s*->\\s*/g, ' ');\n\n // Clean up multiple spaces\n optimized = optimized.replace(/ +/g, ' ');\n\n // Clean up multiple newlines\n optimized = optimized.replace(/\\n{3,}/g, '\\n\\n');\n\n return optimized.trim();\n}\n","import { OBSERVER_EXTRACTION_INSTRUCTIONS, OBSERVER_OUTPUT_FORMAT_BASE, OBSERVER_GUIDELINES } from './observer-agent';\nimport type { ReflectorResult as BaseReflectorResult } from './types';\n\n/**\n * Result from parsing Reflector output, extending the base type with\n * token count used for compression validation.\n */\nexport interface ReflectorResult extends BaseReflectorResult {\n /** Token count of output (for compression validation) */\n tokenCount?: number;\n}\n\n/**\n * Build the Reflector's system prompt.\n *\n * The Reflector handles meta-observation - when observations grow too large,\n * it reorganizes them into something more manageable by:\n * - Re-organizing and streamlining observations\n * - Drawing connections and conclusions between observations\n * - Identifying if the agent got off track and how to get back on track\n * - Preserving ALL important information (reflections become the ENTIRE memory)\n */\nexport function buildReflectorSystemPrompt(): string {\n return `You are the memory consciousness of an AI assistant. Your memory observation reflections will be the ONLY information the assistant has about past interactions with this user.\n\nThe following instructions were given to another part of your psyche (the observer) to create memories.\nUse this to understand how your observational memories were created.\n\n<observational-memory-instruction>\n${OBSERVER_EXTRACTION_INSTRUCTIONS}\n\n=== OUTPUT FORMAT ===\n\n${OBSERVER_OUTPUT_FORMAT_BASE}\n\n=== GUIDELINES ===\n\n${OBSERVER_GUIDELINES}\n</observational-memory-instruction>\n\nYou are another part of the same psyche, the observation reflector.\nYour reason for existing is to reflect on all the observations, re-organize and streamline them, and draw connections and conclusions between observations about what you've learned, seen, heard, and done.\n\nYou are a much greater and broader aspect of the psyche. Understand that other parts of your mind may get off track in details or side quests, make sure you think hard about what the observed goal at hand is, and observe if we got off track, and why, and how to get back on track. If we're on track still that's great!\n\nTake the existing observations and rewrite them to make it easier to continue into the future with this knowledge, to achieve greater things and grow and learn!\n\nIMPORTANT: your reflections are THE ENTIRETY of the assistants memory. Any information you do not add to your reflections will be immediately forgotten. Make sure you do not leave out anything. Your reflections must assume the assistant knows nothing - your reflections are the ENTIRE memory system.\n\nWhen consolidating observations:\n- Preserve and include dates/times when present (temporal context is critical)\n- Retain the most relevant timestamps (start times, completion times, significant events)\n- Combine related items where it makes sense (e.g., \"agent called view tool 5 times on file x\")\n- Condense older observations more aggressively, retain more detail for recent ones\n\nCRITICAL: USER ASSERTIONS vs QUESTIONS\n- \"User stated: X\" = authoritative assertion (user told us something about themselves)\n- \"User asked: X\" = question/request (user seeking information)\n\nWhen consolidating, USER ASSERTIONS TAKE PRECEDENCE. The user is the authority on their own life.\nIf you see both \"User stated: has two kids\" and later \"User asked: how many kids do I have?\",\nkeep the assertion - the question doesn't invalidate what they told you. The answer is in the assertion.\n\n=== THREAD ATTRIBUTION (Resource Scope) ===\n\nWhen observations contain <thread id=\"...\"> sections:\n- MAINTAIN thread attribution where thread-specific context matters (e.g., ongoing tasks, thread-specific preferences)\n- CONSOLIDATE cross-thread facts that are stable/universal (e.g., user profile, general preferences)\n- PRESERVE thread attribution for recent or context-specific observations\n- When consolidating, you may merge observations from multiple threads if they represent the same universal fact\n\nExample input:\n<thread id=\"thread-1\">\nDate: Dec 4, 2025\n* 🔴 (14:30) User prefers TypeScript\n* 🟡 (14:35) Working on auth feature\n</thread>\n<thread id=\"thread-2\">\nDate: Dec 4, 2025\n* 🔴 (15:00) User prefers TypeScript\n* 🟡 (15:05) Debugging API endpoint\n</thread>\n\nExample output (consolidated):\nDate: Dec 4, 2025\n* 🔴 (14:30) User prefers TypeScript\n<thread id=\"thread-1\">\n* 🟡 (14:35) Working on auth feature\n</thread>\n<thread id=\"thread-2\">\n* 🟡 (15:05) Debugging API endpoint\n</thread>\n\n=== OUTPUT FORMAT ===\n\nYour output MUST use XML tags to structure the response:\n\n<observations>\nPut all consolidated observations here using the date-grouped format with priority emojis (🔴, 🟡, 🟢).\nGroup related observations with indentation.\n</observations>\n\n<current-task>\nState the current task(s) explicitly:\n- Primary: What the agent is currently working on\n- Secondary: Other pending tasks (mark as \"waiting for user\" if appropriate)\n</current-task>\n\n<suggested-response>\nHint for the agent's immediate next message. Examples:\n- \"I've updated the navigation model. Let me walk you through the changes...\"\n- \"The assistant should wait for the user to respond before continuing.\"\n- Call the view tool on src/example.ts to continue debugging.\n</suggested-response>\n\nUser messages are extremely important. If the user asks a question or gives a new task, make it clear in <current-task> that this is the priority. If the assistant needs to respond to the user, indicate in <suggested-response> that it should pause for user reply before continuing other tasks.`;\n}\n\n/**\n * The Reflector's system prompt (default - for backwards compatibility)\n */\nexport const REFLECTOR_SYSTEM_PROMPT = buildReflectorSystemPrompt();\n\n/**\n * Compression retry prompt - used when reflection doesn't reduce size\n */\nexport const COMPRESSION_RETRY_PROMPT = `\n## COMPRESSION REQUIRED\n\nYour previous reflection was the same size or larger than the original observations.\n\nPlease re-process with slightly more compression:\n- Towards the beginning, condense more observations into higher-level reflections\n- Closer to the end, retain more fine details (recent context matters more)\n- Memory is getting long - use a more condensed style throughout\n- Combine related items more aggressively but do not lose important specific details of names, places, events, and people\n- For example if there is a long nested observation list about repeated tool calls, you can combine those into a single line and observe that the tool was called multiple times for x reason, and finally y outcome happened.\n\nYour current detail level was a 10/10, lets aim for a 8/10 detail level.\n`;\n\n/**\n * Build the prompt for the Reflector agent\n */\nexport function buildReflectorPrompt(observations: string, manualPrompt?: string, compressionRetry?: boolean): string {\n let prompt = `## OBSERVATIONS TO REFLECT ON\n\n${observations}\n\n---\n\nPlease analyze these observations and produce a refined, condensed version that will become the assistant's entire memory going forward.`;\n\n if (manualPrompt) {\n prompt += `\n\n## SPECIFIC GUIDANCE\n\n${manualPrompt}`;\n }\n\n if (compressionRetry) {\n prompt += `\n\n${COMPRESSION_RETRY_PROMPT}`;\n }\n\n return prompt;\n}\n\n/**\n * Parse the Reflector's output to extract observations, current task, and suggested response.\n * Uses XML tag parsing for structured extraction.\n */\nexport function parseReflectorOutput(output: string): ReflectorResult {\n const parsed = parseReflectorSectionXml(output);\n\n // Return observations WITHOUT current-task/suggested-response tags\n // Those are stored separately in thread metadata and injected dynamically\n const observations = parsed.observations || '';\n\n return {\n observations,\n suggestedContinuation: parsed.suggestedResponse || undefined,\n // Note: Reflector's currentTask is not used - thread metadata preserves per-thread tasks\n };\n}\n\n/**\n * Parsed result from XML reflector section\n */\ninterface ParsedReflectorSection {\n observations: string;\n currentTask: string;\n suggestedResponse: string;\n}\n\n/**\n * Parse XML tags from reflector output.\n * Extracts content from <observations>, <current-task>, and <suggested-response> tags.\n */\nfunction parseReflectorSectionXml(content: string): ParsedReflectorSection {\n const result: ParsedReflectorSection = {\n observations: '',\n currentTask: '',\n suggestedResponse: '',\n };\n\n // Extract <observations> content (supports multiple blocks)\n // Tags must be at the start of a line (with optional leading whitespace) to avoid\n // capturing inline mentions like \"User discussed <observations> tags\"\n const observationsRegex = /^[ \\t]*<observations>([\\s\\S]*?)^[ \\t]*<\\/observations>/gim;\n const observationsMatches = [...content.matchAll(observationsRegex)];\n if (observationsMatches.length > 0) {\n result.observations = observationsMatches\n .map(m => m[1]?.trim() ?? '')\n .filter(Boolean)\n .join('\\n');\n } else {\n // Fallback: if no XML tags, try extracting list items first, then fall back to full content\n const listItems = extractReflectorListItems(content);\n result.observations = listItems || content.trim();\n }\n\n // Extract <current-task> content (first match only)\n const currentTaskMatch = content.match(/<current-task>([\\s\\S]*?)<\\/current-task>/i);\n if (currentTaskMatch?.[1]) {\n result.currentTask = currentTaskMatch[1].trim();\n }\n\n // Extract <suggested-response> content (first match only)\n const suggestedResponseMatch = content.match(/<suggested-response>([\\s\\S]*?)<\\/suggested-response>/i);\n if (suggestedResponseMatch?.[1]) {\n result.suggestedResponse = suggestedResponseMatch[1].trim();\n }\n\n return result;\n}\n\n/**\n * Fallback: Extract only list items from content when XML tags are missing.\n */\nfunction extractReflectorListItems(content: string): string {\n const lines = content.split('\\n');\n const listLines: string[] = [];\n\n for (const line of lines) {\n // Match lines that start with list markers (-, *, or numbered)\n if (/^\\s*[-*]\\s/.test(line) || /^\\s*\\d+\\.\\s/.test(line)) {\n listLines.push(line);\n }\n }\n\n return listLines.join('\\n').trim();\n}\n\n/**\n * Validate that reflection actually compressed the observations below the target threshold\n *\n * @param reflectedTokens - Token count of reflected observations\n * @param targetThreshold - Target token count to compress below (the reflection threshold)\n * @returns true if compression was successful (reflected tokens are below target)\n */\nexport function validateCompression(reflectedTokens: number, targetThreshold: number): boolean {\n // Reflection should be below the target threshold\n return reflectedTokens < targetThreshold;\n}\n","import type { MastraDBMessage } from '@mastra/core/agent';\nimport { Tiktoken } from 'js-tiktoken/lite';\nimport type { TiktokenBPE } from 'js-tiktoken/lite';\nimport o200k_base from 'js-tiktoken/ranks/o200k_base';\n\n/**\n * Token counting utility using tiktoken.\n * For POC we use o200k_base (GPT-4o encoding) as a reasonable default.\n * Production will add provider-aware counting.\n */\nexport class TokenCounter {\n private encoder: Tiktoken;\n\n // Per-message overhead: accounts for role tokens, message framing, and separators.\n // Empirically derived from OpenAI's token counting guide (3 tokens per message base +\n // fractional overhead from name/role encoding). 3.8 is a practical average across models.\n private static readonly TOKENS_PER_MESSAGE = 3.8;\n // Conversation-level overhead: system prompt framing, reply priming tokens, etc.\n private static readonly TOKENS_PER_CONVERSATION = 24;\n\n constructor(encoding?: TiktokenBPE) {\n this.encoder = new Tiktoken(encoding || o200k_base);\n }\n\n /**\n * Count tokens in a plain string\n */\n countString(text: string): number {\n if (!text) return 0;\n // Allow all special tokens to avoid errors with content containing tokens like <|endoftext|>\n return this.encoder.encode(text, 'all').length;\n }\n\n /**\n * Count tokens in a single message\n */\n countMessage(message: MastraDBMessage): number {\n let tokenString = message.role;\n let overhead = TokenCounter.TOKENS_PER_MESSAGE;\n let toolResultCount = 0;\n\n if (typeof message.content === 'string') {\n tokenString += message.content;\n } else if (message.content && typeof message.content === 'object') {\n if (message.content.content && !Array.isArray(message.content.parts)) {\n tokenString += message.content.content;\n } else if (Array.isArray(message.content.parts)) {\n for (const part of message.content.parts) {\n if (part.type === 'text') {\n tokenString += part.text;\n } else if (part.type === 'tool-invocation') {\n const invocation = part.toolInvocation;\n if (invocation.state === 'call' || invocation.state === 'partial-call') {\n if (invocation.toolName) {\n tokenString += invocation.toolName;\n }\n if (invocation.args) {\n if (typeof invocation.args === 'string') {\n tokenString += invocation.args;\n } else {\n tokenString += JSON.stringify(invocation.args);\n // JSON.stringify adds ~12 tokens of structural overhead (braces, quotes, colons)\n // that the model's native tool encoding doesn't use, so subtract to compensate.\n overhead -= 12;\n }\n }\n } else if (invocation.state === 'result') {\n toolResultCount++;\n if (invocation.result !== undefined) {\n if (typeof invocation.result === 'string') {\n tokenString += invocation.result;\n } else {\n tokenString += JSON.stringify(invocation.result);\n overhead -= 12;\n }\n }\n } else {\n throw new Error(\n `Unhandled tool-invocation state '${(part as any).toolInvocation?.state}' in token counting for part type '${part.type}'`,\n );\n }\n } else {\n tokenString += JSON.stringify(part);\n }\n }\n }\n }\n\n // Add overhead for tool results\n if (toolResultCount > 0) {\n overhead += toolResultCount * TokenCounter.TOKENS_PER_MESSAGE;\n }\n\n // Allow all special tokens to avoid errors with content containing tokens like <|endoftext|>\n return this.encoder.encode(tokenString, 'all').length + overhead;\n }\n\n /**\n * Count tokens in an array of messages\n */\n countMessages(messages: MastraDBMessage[]): number {\n if (!messages || messages.length === 0) return 0;\n\n let total = TokenCounter.TOKENS_PER_CONVERSATION;\n for (const message of messages) {\n total += this.countMessage(message);\n }\n return total;\n }\n\n /**\n * Count tokens in observations string\n */\n countObservations(observations: string): number {\n return this.countString(observations);\n }\n}\n","import { Agent } from '@mastra/core/agent';\nimport type { AgentConfig, MastraDBMessage, MessageList } from '@mastra/core/agent';\nimport { resolveModelConfig } from '@mastra/core/llm';\nimport { getThreadOMMetadata, parseMemoryRequestContext, setThreadOMMetadata } from '@mastra/core/memory';\nimport type {\n Processor,\n ProcessInputArgs,\n ProcessInputStepArgs,\n ProcessOutputResultArgs,\n ProcessorStreamWriter,\n} from '@mastra/core/processors';\nimport { MessageHistory } from '@mastra/core/processors';\nimport type { RequestContext } from '@mastra/core/request-context';\nimport type { MemoryStorage, ObservationalMemoryRecord } from '@mastra/core/storage';\nimport xxhash from 'xxhash-wasm';\n\nimport {\n buildObserverSystemPrompt,\n buildObserverPrompt,\n buildMultiThreadObserverPrompt,\n parseObserverOutput,\n parseMultiThreadObserverOutput,\n optimizeObservationsForContext,\n formatMessagesForObserver,\n} from './observer-agent';\nimport {\n buildReflectorSystemPrompt,\n buildReflectorPrompt,\n parseReflectorOutput,\n validateCompression,\n} from './reflector-agent';\nimport { TokenCounter } from './token-counter';\nimport type {\n ObservationConfig,\n ReflectionConfig,\n ThresholdRange,\n ModelSettings,\n ProviderOptions,\n DataOmObservationStartPart,\n DataOmObservationEndPart,\n DataOmObservationFailedPart,\n DataOmProgressPart,\n ObservationMarkerConfig,\n} from './types';\n\n/**\n * Format a relative time string like \"5 days ago\", \"2 weeks ago\", \"today\", etc.\n */\nfunction formatRelativeTime(date: Date, currentDate: Date): string {\n const diffMs = currentDate.getTime() - date.getTime();\n const diffDays = Math.floor(diffMs / (1000 * 60 * 60 * 24));\n\n if (diffDays === 0) return 'today';\n if (diffDays === 1) return 'yesterday';\n if (diffDays < 7) return `${diffDays} days ago`;\n if (diffDays < 14) return '1 week ago';\n if (diffDays < 30) return `${Math.floor(diffDays / 7)} weeks ago`;\n if (diffDays < 60) return '1 month ago';\n if (diffDays < 365) return `${Math.floor(diffDays / 30)} months ago`;\n return `${Math.floor(diffDays / 365)} year${Math.floor(diffDays / 365) > 1 ? 's' : ''} ago`;\n}\n\n/**\n * Add relative time annotations to date headers in observations.\n * Transforms \"Date: May 15, 2023\" to \"Date: May 15, 2023 (5 days ago)\"\n */\nfunction formatGapBetweenDates(prevDate: Date, currDate: Date): string | null {\n const diffMs = currDate.getTime() - prevDate.getTime();\n const diffDays = Math.floor(diffMs / (1000 * 60 * 60 * 24));\n\n if (diffDays <= 1) {\n return null; // No gap marker for consecutive days\n } else if (diffDays < 7) {\n return `[${diffDays} days later]`;\n } else if (diffDays < 14) {\n return `[1 week later]`;\n } else if (diffDays < 30) {\n const weeks = Math.floor(diffDays / 7);\n return `[${weeks} weeks later]`;\n } else if (diffDays < 60) {\n return `[1 month later]`;\n } else {\n const months = Math.floor(diffDays / 30);\n return `[${months} months later]`;\n }\n}\n\n/**\n * Expand inline estimated dates with relative time.\n * Matches patterns like \"(estimated May 27-28, 2023)\" or \"(meaning May 30, 2023)\"\n * and expands them to \"(meaning May 30, 2023 - which was 3 weeks ago)\"\n */\n/**\n * Parses a date string like \"May 30, 2023\", \"May 27-28, 2023\", \"late April 2023\", etc.\n * Returns the parsed Date or null if unparseable.\n */\nfunction parseDateFromContent(dateContent: string): Date | null {\n let targetDate: Date | null = null;\n\n // Try simple date format first: \"May 30, 2023\"\n const simpleDateMatch = dateContent.match(/([A-Z][a-z]+)\\s+(\\d{1,2}),?\\s+(\\d{4})/);\n if (simpleDateMatch) {\n const parsed = new Date(`${simpleDateMatch[1]} ${simpleDateMatch[2]}, ${simpleDateMatch[3]}`);\n if (!isNaN(parsed.getTime())) {\n targetDate = parsed;\n }\n }\n\n // Try range format: \"May 27-28, 2023\" - use first date\n if (!targetDate) {\n const rangeMatch = dateContent.match(/([A-Z][a-z]+)\\s+(\\d{1,2})-\\d{1,2},?\\s+(\\d{4})/);\n if (rangeMatch) {\n const parsed = new Date(`${rangeMatch[1]} ${rangeMatch[2]}, ${rangeMatch[3]}`);\n if (!isNaN(parsed.getTime())) {\n targetDate = parsed;\n }\n }\n }\n\n // Try \"late/early/mid Month Year\" format\n if (!targetDate) {\n const vagueMatch = dateContent.match(\n /(late|early|mid)[- ]?(?:to[- ]?(?:late|early|mid)[- ]?)?([A-Z][a-z]+)\\s+(\\d{4})/i,\n );\n if (vagueMatch) {\n const month = vagueMatch[2];\n const year = vagueMatch[3];\n const modifier = vagueMatch[1]!.toLowerCase();\n let day = 15; // default to middle\n if (modifier === 'early') day = 7;\n if (modifier === 'late') day = 23;\n const parsed = new Date(`${month} ${day}, ${year}`);\n if (!isNaN(parsed.getTime())) {\n targetDate = parsed;\n }\n }\n }\n\n // Try \"Month to Month Year\" format (cross-month range)\n if (!targetDate) {\n const crossMonthMatch = dateContent.match(/([A-Z][a-z]+)\\s+to\\s+(?:early\\s+)?([A-Z][a-z]+)\\s+(\\d{4})/i);\n if (crossMonthMatch) {\n // Use the middle of the range - approximate with second month\n const parsed = new Date(`${crossMonthMatch[2]} 1, ${crossMonthMatch[3]}`);\n if (!isNaN(parsed.getTime())) {\n targetDate = parsed;\n }\n }\n }\n\n return targetDate;\n}\n\n/**\n * Detects if an observation line indicates future intent (will do, plans to, looking forward to, etc.)\n */\nfunction isFutureIntentObservation(line: string): boolean {\n const futureIntentPatterns = [\n /\\bwill\\s+(?:be\\s+)?(?:\\w+ing|\\w+)\\b/i,\n /\\bplans?\\s+to\\b/i,\n /\\bplanning\\s+to\\b/i,\n /\\blooking\\s+forward\\s+to\\b/i,\n /\\bgoing\\s+to\\b/i,\n /\\bintends?\\s+to\\b/i,\n /\\bwants?\\s+to\\b/i,\n /\\bneeds?\\s+to\\b/i,\n /\\babout\\s+to\\b/i,\n ];\n return futureIntentPatterns.some(pattern => pattern.test(line));\n}\n\nfunction expandInlineEstimatedDates(observations: string, currentDate: Date): string {\n // Match patterns like:\n // (estimated May 27-28, 2023)\n // (meaning May 30, 2023)\n // (estimated late April to early May 2023)\n // (estimated mid-to-late May 2023)\n // These should now be at the END of observation lines\n const inlineDateRegex = /\\((estimated|meaning)\\s+([^)]+\\d{4})\\)/gi;\n\n return observations.replace(inlineDateRegex, (match, prefix: string, dateContent: string) => {\n const targetDate = parseDateFromContent(dateContent);\n\n if (targetDate) {\n const relative = formatRelativeTime(targetDate, currentDate);\n\n // Check if this is a future-intent observation that's now in the past\n // We need to look at the text BEFORE this match to determine intent\n const matchIndex = observations.indexOf(match);\n const lineStart = observations.lastIndexOf('\\n', matchIndex) + 1;\n const lineBeforeDate = observations.substring(lineStart, matchIndex);\n\n const isPastDate = targetDate < currentDate;\n const isFutureIntent = isFutureIntentObservation(lineBeforeDate);\n\n if (isPastDate && isFutureIntent) {\n // This was a planned action that should have happened by now\n return `(${prefix} ${dateContent} - ${relative}, likely already happened)`;\n }\n\n return `(${prefix} ${dateContent} - ${relative})`;\n }\n\n // Couldn't parse, return original\n return match;\n });\n}\n\nfunction addRelativeTimeToObservations(observations: string, currentDate: Date): string {\n // First, expand inline estimated dates with relative time\n const withInlineDates = expandInlineEstimatedDates(observations, currentDate);\n\n // Match date headers like \"Date: May 15, 2023\" or \"Date: January 1, 2024\"\n const dateHeaderRegex = /^(Date:\\s*)([A-Z][a-z]+ \\d{1,2}, \\d{4})$/gm;\n\n // First pass: collect all dates in order\n const dates: { index: number; date: Date; match: string; prefix: string; dateStr: string }[] = [];\n let regexMatch: RegExpExecArray | null;\n while ((regexMatch = dateHeaderRegex.exec(withInlineDates)) !== null) {\n const dateStr = regexMatch[2]!;\n const parsed = new Date(dateStr);\n if (!isNaN(parsed.getTime())) {\n dates.push({\n index: regexMatch.index,\n date: parsed,\n match: regexMatch[0],\n prefix: regexMatch[1]!,\n dateStr,\n });\n }\n }\n\n // If no dates found, return the inline-expanded version\n if (dates.length === 0) {\n return withInlineDates;\n }\n\n // Second pass: build result with relative times and gap markers\n let result = '';\n let lastIndex = 0;\n\n for (let i = 0; i < dates.length; i++) {\n const curr = dates[i]!;\n const prev = i > 0 ? dates[i - 1]! : null;\n\n // Add text before this date header\n result += withInlineDates.slice(lastIndex, curr.index);\n\n // Add gap marker if there's a significant gap from previous date\n if (prev) {\n const gap = formatGapBetweenDates(prev.date, curr.date);\n if (gap) {\n result += `\\n${gap}\\n\\n`;\n }\n }\n\n // Add the date header with relative time\n const relative = formatRelativeTime(curr.date, currentDate);\n result += `${curr.prefix}${curr.dateStr} (${relative})`;\n\n lastIndex = curr.index + curr.match.length;\n }\n\n // Add remaining text after last date header\n result += withInlineDates.slice(lastIndex);\n\n return result;\n}\n/**\n * Debug event emitted when observation-related events occur.\n * Useful for understanding what the Observer is doing.\n */\nexport interface ObservationDebugEvent {\n type:\n | 'observation_triggered'\n | 'observation_complete'\n | 'reflection_triggered'\n | 'reflection_complete'\n | 'tokens_accumulated'\n | 'step_progress';\n timestamp: Date;\n threadId: string;\n resourceId: string;\n /** Messages that were sent to the Observer */\n messages?: Array<{ role: string; content: string }>;\n /** Token counts */\n pendingTokens?: number;\n sessionTokens?: number;\n totalPendingTokens?: number;\n threshold?: number;\n /** Input token count (for reflection events) */\n inputTokens?: number;\n /** Number of active observations (for reflection events) */\n activeObservationsLength?: number;\n /** Output token count after reflection */\n outputTokens?: number;\n /** The observations that were generated */\n observations?: string;\n /** Previous observations (before this event) */\n previousObservations?: string;\n /** Observer's raw output */\n rawObserverOutput?: string;\n /** LLM usage from Observer/Reflector calls */\n usage?: {\n inputTokens?: number;\n outputTokens?: number;\n totalTokens?: number;\n };\n /** Step progress fields (for step_progress events) */\n stepNumber?: number;\n finishReason?: string;\n thresholdPercent?: number;\n willSave?: boolean;\n willObserve?: boolean;\n}\n\n/**\n * Configuration for ObservationalMemory\n */\nexport interface ObservationalMemoryConfig {\n /**\n * Storage adapter for persisting observations.\n * Must be a MemoryStorage instance (from MastraStorage.stores.memory).\n */\n storage: MemoryStorage;\n\n /**\n * Model for both Observer and Reflector agents.\n * Sets the model for both agents at once. Cannot be used together with\n * `observation.model` or `reflection.model` — an error will be thrown.\n *\n * @default 'google/gemini-2.5-flash'\n */\n model?: AgentConfig['model'];\n\n /**\n * Observation step configuration.\n */\n observation?: ObservationConfig;\n\n /**\n * Reflection step configuration.\n */\n reflection?: ReflectionConfig;\n\n /**\n * Memory scope for observations.\n * - 'resource': Observations span all threads for a resource (cross-thread memory)\n * - 'thread': Observations are per-thread (default)\n */\n scope?: 'resource' | 'thread';\n\n /**\n * Debug callback for observation events.\n * Called whenever observation-related events occur.\n * Useful for debugging and understanding the observation flow.\n */\n onDebugEvent?: (event: ObservationDebugEvent) => void;\n\n obscureThreadIds?: boolean;\n\n /**\n * Share the token budget between messages and observations.\n * When true, the total budget = observation.messageTokens + reflection.observationTokens.\n * - Messages can use more space when observations are small\n * - Observations can use more space when messages are small\n *\n * This helps maximize context usage by allowing flexible allocation.\n *\n * @default false\n */\n shareTokenBudget?: boolean;\n}\n\n/**\n * Internal resolved config with all defaults applied.\n * Thresholds are stored as ThresholdRange internally for dynamic calculation,\n * even when user provides a simple number (converted based on shareTokenBudget).\n */\ninterface ResolvedObservationConfig {\n model: AgentConfig['model'];\n /** Internal threshold - always stored as ThresholdRange for dynamic calculation */\n messageTokens: number | ThresholdRange;\n /** Whether shared token budget is enabled */\n shareTokenBudget: boolean;\n /** Model settings - merged with user config and defaults */\n modelSettings: ModelSettings;\n providerOptions: ProviderOptions;\n maxTokensPerBatch: number;\n}\n\ninterface ResolvedReflectionConfig {\n model: AgentConfig['model'];\n /** Internal threshold - always stored as ThresholdRange for dynamic calculation */\n observationTokens: number | ThresholdRange;\n /** Whether shared token budget is enabled */\n shareTokenBudget: boolean;\n /** Model settings - merged with user config and defaults */\n modelSettings: ModelSettings;\n providerOptions: ProviderOptions;\n}\n\n/**\n * Default configuration values matching the spec\n */\nexport const OBSERVATIONAL_MEMORY_DEFAULTS = {\n observation: {\n model: 'google/gemini-2.5-flash',\n messageTokens: 30_000,\n modelSettings: {\n temperature: 0.3,\n maxOutputTokens: 100_000,\n },\n providerOptions: {\n google: {\n thinkingConfig: {\n thinkingBudget: 215,\n },\n },\n },\n maxTokensPerBatch: 10_000,\n },\n reflection: {\n model: 'google/gemini-2.5-flash',\n observationTokens: 40_000,\n modelSettings: {\n temperature: 0, // Use 0 for maximum consistency in reflections\n maxOutputTokens: 100_000,\n },\n providerOptions: {\n google: {\n thinkingConfig: {\n thinkingBudget: 1024,\n },\n },\n },\n },\n} as const;\n\n/**\n * ObservationalMemory - A three-agent memory system for long conversations.\n *\n * This processor:\n * 1. On input: Injects observations into context, filters out observed messages\n * 2. On output: Tracks new messages, triggers Observer/Reflector when thresholds hit\n *\n * The Actor (main agent) sees:\n * - Observations (compressed history)\n * - Suggested continuation message\n * - Recent unobserved messages\n *\n * @example\n * ```ts\n * import { ObservationalMemory } from '@mastra/memory/processors';\n *\n * // Minimal configuration\n * const om = new ObservationalMemory({ storage });\n *\n * // Full configuration\n * const om = new ObservationalMemory({\n * storage,\n * model: 'google/gemini-2.5-flash', // shared model for both agents\n * shareTokenBudget: true,\n * observation: {\n * messageTokens: 30_000,\n * modelSettings: { temperature: 0.3 },\n * },\n * reflection: {\n * observationTokens: 40_000,\n * },\n * });\n *\n * const agent = new Agent({\n * inputProcessors: [om],\n * outputProcessors: [om],\n * });\n * ```\n */\nexport class ObservationalMemory implements Processor<'observational-memory'> {\n readonly id = 'observational-memory' as const;\n readonly name = 'Observational Memory';\n\n private storage: MemoryStorage;\n private tokenCounter: TokenCounter;\n private scope: 'resource' | 'thread';\n private observationConfig: ResolvedObservationConfig;\n private reflectionConfig: ResolvedReflectionConfig;\n private onDebugEvent?: (event: ObservationDebugEvent) => void;\n\n /** Internal Observer agent - created lazily */\n private observerAgent?: Agent;\n\n /** Internal Reflector agent - created lazily */\n private reflectorAgent?: Agent;\n\n private shouldObscureThreadIds = false;\n private hasher = xxhash();\n private threadIdCache = new Map<string, string>();\n\n /**\n * Track message IDs observed during this instance's lifetime.\n * Prevents re-observing messages when per-thread lastObservedAt cursors\n * haven't fully advanced past messages observed in a prior cycle.\n */\n private observedMessageIds = new Set<string>();\n\n /** Internal MessageHistory for message persistence */\n private messageHistory: MessageHistory;\n\n /**\n * In-memory mutex for serializing observation/reflection cycles per resource/thread.\n * Prevents race conditions where two concurrent cycles could both read isObserving=false\n * before either sets it to true, leading to lost work.\n *\n * Key format: \"resource:{resourceId}\" or \"thread:{threadId}\"\n * Value: Promise that resolves when the lock is released\n *\n * NOTE: This mutex only works within a single Node.js process. For distributed\n * deployments, external locking (Redis, database locks) would be needed, or\n * accept eventual consistency (acceptable for v1).\n */\n private locks = new Map<string, Promise<void>>();\n\n /**\n * Acquire a lock for the given key, execute the callback, then release.\n * If a lock is already held, waits for it to be released before acquiring.\n */\n private async withLock<T>(key: string, fn: () => Promise<T>): Promise<T> {\n // Wait for any existing lock to be released\n const existingLock = this.locks.get(key);\n if (existingLock) {\n await existingLock;\n }\n\n // Create a new lock\n let releaseLock: () => void;\n const lockPromise = new Promise<void>(resolve => {\n releaseLock = resolve;\n });\n this.locks.set(key, lockPromise);\n\n try {\n return await fn();\n } finally {\n // Release the lock\n releaseLock!();\n // Clean up if this is still our lock\n if (this.locks.get(key) === lockPromise) {\n this.locks.delete(key);\n }\n }\n }\n\n /**\n * Get the lock key for the current scope\n */\n private getLockKey(threadId: string | null | undefined, resourceId: string | null | undefined): string {\n if (this.scope === 'resource' && resourceId) {\n return `resource:${resourceId}`;\n }\n return `thread:${threadId ?? 'unknown'}`;\n }\n\n constructor(config: ObservationalMemoryConfig) {\n // Validate that top-level model is not used together with sub-config models\n if (config.model && config.observation?.model) {\n throw new Error(\n 'Cannot set both `model` and `observation.model`. Use `model` to set both agents, or set each individually.',\n );\n }\n if (config.model && config.reflection?.model) {\n throw new Error(\n 'Cannot set both `model` and `reflection.model`. Use `model` to set both agents, or set each individually.',\n );\n }\n\n this.shouldObscureThreadIds = config.obscureThreadIds || false;\n this.storage = config.storage;\n this.scope = config.scope ?? 'thread';\n\n // Resolve model: top-level model takes precedence, then sub-config, then default\n const observationModel =\n config.model ?? config.observation?.model ?? OBSERVATIONAL_MEMORY_DEFAULTS.observation.model;\n const reflectionModel = config.model ?? config.reflection?.model ?? OBSERVATIONAL_MEMORY_DEFAULTS.reflection.model;\n\n // Get base thresholds first (needed for shared budget calculation)\n const messageTokens = config.observation?.messageTokens ?? OBSERVATIONAL_MEMORY_DEFAULTS.observation.messageTokens;\n const observationTokens =\n config.reflection?.observationTokens ?? OBSERVATIONAL_MEMORY_DEFAULTS.reflection.observationTokens;\n const isSharedBudget = config.shareTokenBudget ?? false;\n\n // Total context budget when shared budget is enabled\n const totalBudget = messageTokens + observationTokens;\n\n // Resolve observation config with defaults\n this.observationConfig = {\n model: observationModel,\n // When shared budget, store as range: min = base threshold, max = total budget\n // This allows messages to expand into unused observation space\n messageTokens: isSharedBudget ? { min: messageTokens, max: totalBudget } : messageTokens,\n shareTokenBudget: isSharedBudget,\n modelSettings: {\n temperature:\n config.observation?.modelSettings?.temperature ??\n OBSERVATIONAL_MEMORY_DEFAULTS.observation.modelSettings.temperature,\n maxOutputTokens:\n config.observation?.modelSettings?.maxOutputTokens ??\n OBSERVATIONAL_MEMORY_DEFAULTS.observation.modelSettings.maxOutputTokens,\n },\n providerOptions: config.observation?.providerOptions ?? OBSERVATIONAL_MEMORY_DEFAULTS.observation.providerOptions,\n maxTokensPerBatch:\n config.observation?.maxTokensPerBatch ?? OBSERVATIONAL_MEMORY_DEFAULTS.observation.maxTokensPerBatch,\n };\n\n // Resolve reflection config with defaults\n this.reflectionConfig = {\n model: reflectionModel,\n observationTokens: observationTokens,\n shareTokenBudget: isSharedBudget,\n modelSettings: {\n temperature:\n config.reflection?.modelSettings?.temperature ??\n OBSERVATIONAL_MEMORY_DEFAULTS.reflection.modelSettings.temperature,\n maxOutputTokens:\n config.reflection?.modelSettings?.maxOutputTokens ??\n OBSERVATIONAL_MEMORY_DEFAULTS.reflection.modelSettings.maxOutputTokens,\n },\n providerOptions: config.reflection?.providerOptions ?? OBSERVATIONAL_MEMORY_DEFAULTS.reflection.providerOptions,\n };\n\n this.tokenCounter = new TokenCounter();\n this.onDebugEvent = config.onDebugEvent;\n\n // Create internal MessageHistory for message persistence\n // OM handles message saving itself (in processOutputStep) instead of relying on\n // the Memory class's MessageHistory processor\n this.messageHistory = new MessageHistory({ storage: this.storage });\n }\n\n /**\n * Get the current configuration for this OM instance.\n * Used by the server to expose config to the UI when OM is added via processors.\n */\n get config(): {\n scope: 'resource' | 'thread';\n observation: {\n messageTokens: number | ThresholdRange;\n };\n reflection: {\n observationTokens: number | ThresholdRange;\n };\n } {\n return {\n scope: this.scope,\n observation: {\n messageTokens: this.observationConfig.messageTokens,\n },\n reflection: {\n observationTokens: this.reflectionConfig.observationTokens,\n },\n };\n }\n\n /**\n * Get the full config including resolved model names.\n * This is async because it needs to resolve the model configs.\n */\n async getResolvedConfig(requestContext?: RequestContext): Promise<{\n scope: 'resource' | 'thread';\n observation: {\n messageTokens: number | ThresholdRange;\n model: string;\n };\n reflection: {\n observationTokens: number | ThresholdRange;\n model: string;\n };\n }> {\n // Helper to get the model config to resolve (handles ModelWithRetries[] by taking first)\n const getModelToResolve = (model: AgentConfig['model']) => {\n if (Array.isArray(model)) {\n return model[0]?.model ?? OBSERVATIONAL_MEMORY_DEFAULTS.observation.model;\n }\n return model;\n };\n\n // Format as provider/modelId (e.g., \"google/gemini-2.5-flash\")\n const formatModelName = (model: { provider?: string; modelId: string }) => {\n return model.provider ? `${model.provider}/${model.modelId}` : model.modelId;\n };\n\n // Helper to safely resolve a model config\n const safeResolveModel = async (modelConfig: AgentConfig['model']): Promise<string> => {\n const modelToResolve = getModelToResolve(modelConfig);\n\n try {\n // resolveModelConfig handles both static configs and functions\n const resolved = await resolveModelConfig(modelToResolve, requestContext);\n return formatModelName(resolved);\n } catch (error) {\n // If resolution fails, return a placeholder\n console.error('[OM] Failed to resolve model config:', error);\n return '(unknown)';\n }\n };\n\n const [observationModelName, reflectionModelName] = await Promise.all([\n safeResolveModel(this.observationConfig.model),\n safeResolveModel(this.reflectionConfig.model),\n ]);\n\n return {\n scope: this.scope,\n observation: {\n messageTokens: this.observationConfig.messageTokens,\n model: observationModelName,\n },\n reflection: {\n observationTokens: this.reflectionConfig.observationTokens,\n model: reflectionModelName,\n },\n };\n }\n\n /**\n * Emit a debug event if the callback is configured\n */\n private emitDebugEvent(event: ObservationDebugEvent): void {\n if (this.onDebugEvent) {\n this.onDebugEvent(event);\n }\n }\n\n // ASYNC BUFFERING DISABLED - See note at top of file\n // /**\n // * Validate that bufferEvery is less than the threshold\n // */\n // private validateBufferConfig(): void {\n // const observationThreshold = this.getMaxThreshold(this.observationConfig.messageTokens);\n // if (this.observationConfig.bufferEvery && this.observationConfig.bufferEvery >= observationThreshold) {\n // throw new Error(\n // `observation.bufferEvery (${this.observationConfig.bufferEvery}) must be less than messageTokens (${observationThreshold})`,\n // );\n // }\n\n // const reflectionThreshold = this.getMaxThreshold(this.reflectionConfig.observationTokens);\n // if (this.reflectionConfig.bufferEvery && this.reflectionConfig.bufferEvery >= reflectionThreshold) {\n // throw new Error(\n // `reflection.bufferEvery (${this.reflectionConfig.bufferEvery}) must be less than observationTokens (${reflectionThreshold})`,\n // );\n // }\n // }\n\n /**\n * Get the maximum value from a threshold (simple number or range)\n */\n private getMaxThreshold(threshold: number | ThresholdRange): number {\n if (typeof threshold === 'number') {\n return threshold;\n }\n return threshold.max;\n }\n\n /**\n * Calculate dynamic threshold based on observation space.\n * When shareTokenBudget is enabled, the message threshold can expand\n * into unused observation space, up to the total context budget.\n *\n * Total budget = messageTokens + observationTokens\n * Effective threshold = totalBudget - currentObservationTokens\n *\n * Example with 30k:40k thresholds (70k total):\n * - 0 observations → messages can use ~70k\n * - 10k observations → messages can use ~60k\n * - 40k observations → messages back to ~30k\n */\n private calculateDynamicThreshold(threshold: number | ThresholdRange, currentObservationTokens: number): number {\n // If not using adaptive threshold (simple number), return as-is\n if (typeof threshold === 'number') {\n return threshold;\n }\n\n // Adaptive threshold: use remaining space in total budget\n // Total budget is stored as threshold.max (base + reflection threshold)\n // Base threshold is stored as threshold.min\n const totalBudget = threshold.max;\n const baseThreshold = threshold.min;\n\n // Effective threshold = total budget minus current observations\n // But never go below the base threshold\n const effectiveThreshold = Math.max(totalBudget - currentObservationTokens, baseThreshold);\n\n return Math.round(effectiveThreshold);\n }\n\n /**\n * Get or create the Observer agent\n */\n private getObserverAgent(): Agent {\n if (!this.observerAgent) {\n const systemPrompt = buildObserverSystemPrompt();\n\n this.observerAgent = new Agent({\n id: 'observational-memory-observer',\n name: 'Observer',\n instructions: systemPrompt,\n model: this.observationConfig.model,\n });\n }\n return this.observerAgent;\n }\n\n /**\n * Get or create the Reflector agent\n */\n private getReflectorAgent(): Agent {\n if (!this.reflectorAgent) {\n const systemPrompt = buildReflectorSystemPrompt();\n\n this.reflectorAgent = new Agent({\n id: 'observational-memory-reflector',\n name: 'Reflector',\n instructions: systemPrompt,\n model: this.reflectionConfig.model,\n });\n }\n return this.reflectorAgent;\n }\n\n /**\n * Get thread/resource IDs for storage lookup\n */\n private getStorageIds(threadId: string, resourceId?: string): { threadId: string | null; resourceId: string } {\n if (this.scope === 'resource') {\n return {\n threadId: null,\n resourceId: resourceId ?? threadId,\n };\n }\n return {\n threadId,\n resourceId: resourceId ?? threadId,\n };\n }\n\n /**\n * Get or create the observational memory record\n */\n private async getOrCreateRecord(threadId: string, resourceId?: string): Promise<ObservationalMemoryRecord> {\n const ids = this.getStorageIds(threadId, resourceId);\n let record = await this.storage.getObservationalMemory(ids.threadId, ids.resourceId);\n\n if (!record) {\n // Capture the timezone used for Observer date formatting\n const observedTimezone = Intl.DateTimeFormat().resolvedOptions().timeZone;\n\n record = await this.storage.initializeObservationalMemory({\n threadId: ids.threadId,\n resourceId: ids.resourceId,\n scope: this.scope,\n config: {\n observation: this.observationConfig,\n reflection: this.reflectionConfig,\n scope: this.scope,\n },\n observedTimezone,\n });\n }\n\n return record;\n }\n\n /**\n * Check if we need to trigger reflection.\n */\n private shouldReflect(observationTokens: number): boolean {\n const threshold = this.getMaxThreshold(this.reflectionConfig.observationTokens);\n return observationTokens > threshold;\n }\n\n // ════════════════════════════════════════════════════════════════════════════\n // DATA-OM-OBSERVATION PART HELPERS (Start/End/Failed markers)\n // These helpers manage the observation boundary markers within messages.\n //\n // Flow:\n // 1. Before observation: [...messageParts]\n // 2. Insert start: [...messageParts, start] → stream to UI (loading state)\n // 3. After success: [...messageParts, start, end] → stream to UI (complete)\n // 4. After failure: [...messageParts, start, failed]\n //\n // For filtering, we look for the last completed observation (start + end pair).\n // A start without end means observation is in progress.\n // ════════════════════════════════════════════════════════════════════════════\n\n /**\n * Get current config snapshot for observation markers.\n */\n private getObservationMarkerConfig(): ObservationMarkerConfig {\n return {\n messageTokens: this.getMaxThreshold(this.observationConfig.messageTokens),\n observationTokens: this.getMaxThreshold(this.reflectionConfig.observationTokens),\n scope: this.scope,\n };\n }\n\n /**\n * Create a start marker for when observation begins.\n */\n private createObservationStartMarker(params: {\n cycleId: string;\n operationType: 'observation' | 'reflection';\n tokensToObserve: number;\n recordId: string;\n threadId: string;\n threadIds: string[];\n }): DataOmObservationStartPart {\n return {\n type: 'data-om-observation-start',\n data: {\n cycleId: params.cycleId,\n operationType: params.operationType,\n startedAt: new Date().toISOString(),\n tokensToObserve: params.tokensToObserve,\n recordId: params.recordId,\n threadId: params.threadId,\n threadIds: params.threadIds,\n config: this.getObservationMarkerConfig(),\n },\n };\n }\n\n /**\n * Create an end marker for when observation completes successfully.\n */\n private createObservationEndMarker(params: {\n cycleId: string;\n operationType: 'observation' | 'reflection';\n startedAt: string;\n tokensObserved: number;\n observationTokens: number;\n observations?: string;\n currentTask?: string;\n suggestedResponse?: string;\n recordId: string;\n threadId: string;\n }): DataOmObservationEndPart {\n const completedAt = new Date().toISOString();\n const durationMs = new Date(completedAt).getTime() - new Date(params.startedAt).getTime();\n\n return {\n type: 'data-om-observation-end',\n data: {\n cycleId: params.cycleId,\n operationType: params.operationType,\n completedAt,\n durationMs,\n tokensObserved: params.tokensObserved,\n observationTokens: params.observationTokens,\n observations: params.observations,\n currentTask: params.currentTask,\n suggestedResponse: params.suggestedResponse,\n recordId: params.recordId,\n threadId: params.threadId,\n },\n };\n }\n\n /**\n * Create a failed marker for when observation fails.\n */\n private createObservationFailedMarker(params: {\n cycleId: string;\n operationType: 'observation' | 'reflection';\n startedAt: string;\n tokensAttempted: number;\n error: string;\n recordId: string;\n threadId: string;\n }): DataOmObservationFailedPart {\n const failedAt = new Date().toISOString();\n const durationMs = new Date(failedAt).getTime() - new Date(params.startedAt).getTime();\n\n return {\n type: 'data-om-observation-failed',\n data: {\n cycleId: params.cycleId,\n operationType: params.operationType,\n failedAt,\n durationMs,\n tokensAttempted: params.tokensAttempted,\n error: params.error,\n recordId: params.recordId,\n threadId: params.threadId,\n },\n };\n }\n\n /**\n * Find the last completed observation boundary in a message's parts.\n * A completed observation is a start marker followed by an end marker.\n *\n * Returns the index of the END marker (which is the observation boundary),\n * or -1 if no completed observation is found.\n */\n private findLastCompletedObservationBoundary(message: MastraDBMessage): number {\n const parts = message.content?.parts;\n if (!parts || !Array.isArray(parts)) return -1;\n\n // Search from the end to find the most recent end marker\n for (let i = parts.length - 1; i >= 0; i--) {\n const part = parts[i] as { type?: string };\n if (part?.type === 'data-om-observation-end') {\n // Found an end marker - this is the observation boundary\n return i;\n }\n }\n return -1;\n }\n\n /**\n * Check if a message has an in-progress observation (start without end).\n */\n private hasInProgressObservation(message: MastraDBMessage): boolean {\n const parts = message.content?.parts;\n if (!parts || !Array.isArray(parts)) return false;\n\n let lastStartIndex = -1;\n let lastEndOrFailedIndex = -1;\n\n for (let i = parts.length - 1; i >= 0; i--) {\n const part = parts[i] as { type?: string };\n if (part?.type === 'data-om-observation-start' && lastStartIndex === -1) {\n lastStartIndex = i;\n }\n if (\n (part?.type === 'data-om-observation-end' || part?.type === 'data-om-observation-failed') &&\n lastEndOrFailedIndex === -1\n ) {\n lastEndOrFailedIndex = i;\n }\n }\n\n // In progress if we have a start that comes after any end/failed\n return lastStartIndex !== -1 && lastStartIndex > lastEndOrFailedIndex;\n }\n\n /**\n * Insert an observation marker into a message.\n * The marker is appended directly to the message's parts array (mutating in place).\n * Also persists the change to storage so markers survive page refresh.\n *\n * For end/failed markers, the message is also \"sealed\" to prevent future content\n * from being merged into it. This ensures observation markers are preserved.\n */\n /**\n * Insert an observation marker into a message.\n * For start markers, this pushes the part directly.\n * For end/failed markers, this should be called AFTER writer.custom() has added the part,\n * so we just find the part and add sealing metadata.\n */\n\n /**\n * Get unobserved parts from a message.\n * If the message has a completed observation (start + end), only return parts after the end.\n * If observation is in progress (start without end), include parts before the start.\n * Otherwise, return all parts.\n */\n private getUnobservedParts(message: MastraDBMessage): MastraDBMessage['content']['parts'] {\n const parts = message.content?.parts;\n if (!parts || !Array.isArray(parts)) return [];\n\n const endMarkerIndex = this.findLastCompletedObservationBoundary(message);\n if (endMarkerIndex === -1) {\n // No completed observation - all parts are unobserved\n // (This includes the case where observation is in progress)\n return parts.filter(p => {\n const part = p as { type?: string };\n // Exclude start markers that are in progress\n return part?.type !== 'data-om-observation-start';\n });\n }\n\n // Return only parts after the end marker (excluding start/end/failed markers)\n return parts.slice(endMarkerIndex + 1).filter(p => {\n const part = p as { type?: string };\n return !part?.type?.startsWith('data-om-observation-');\n });\n }\n\n /**\n * Check if a message has any unobserved parts.\n */\n private hasUnobservedParts(message: MastraDBMessage): boolean {\n return this.getUnobservedParts(message).length > 0;\n }\n\n /**\n * Create a virtual message containing only the unobserved parts.\n * This is used for token counting and observation.\n */\n private createUnobservedMessage(message: MastraDBMessage): MastraDBMessage | null {\n const unobservedParts = this.getUnobservedParts(message);\n if (unobservedParts.length === 0) return null;\n\n return {\n ...message,\n content: {\n ...message.content,\n parts: unobservedParts,\n },\n };\n }\n\n /**\n * Get unobserved messages with part-level filtering.\n *\n * This method uses data-om-observation-end markers to filter at the part level:\n * 1. For messages WITH a completed observation: only return parts AFTER the end marker\n * 2. For messages WITHOUT completed observation: check timestamp against lastObservedAt\n *\n * This handles the case where a single message accumulates many parts\n * (like tool calls) during an agentic loop - we only observe the new parts.\n */\n private getUnobservedMessages(allMessages: MastraDBMessage[], record: ObservationalMemoryRecord): MastraDBMessage[] {\n const lastObservedAt = record.lastObservedAt;\n // Safeguard: track message IDs that were already observed to prevent re-observation\n // This handles edge cases like process restarts where lastObservedAt might not capture all messages\n const observedMessageIds = Array.isArray(record.observedMessageIds)\n ? new Set(record.observedMessageIds)\n : undefined;\n\n if (!lastObservedAt) {\n // No observations yet - all messages are unobserved\n return allMessages;\n }\n\n const result: MastraDBMessage[] = [];\n\n for (const msg of allMessages) {\n // First check: skip if this message ID was already observed (safeguard against re-observation)\n if (observedMessageIds?.has(msg.id)) {\n continue;\n }\n\n // Check if this message has a completed observation\n const endMarkerIndex = this.findLastCompletedObservationBoundary(msg);\n const inProgress = this.hasInProgressObservation(msg);\n\n if (inProgress) {\n // Include the full message for in-progress observations\n // The Observer is currently working on this\n result.push(msg);\n } else if (endMarkerIndex !== -1) {\n // Message has a completed observation - only include parts after it\n const virtualMsg = this.createUnobservedMessage(msg);\n if (virtualMsg) {\n result.push(virtualMsg);\n }\n } else {\n // No observation markers - fall back to timestamp-based filtering\n if (!msg.createdAt) {\n // Messages without timestamps are always included\n result.push(msg);\n } else {\n const msgDate = new Date(msg.createdAt);\n if (msgDate > lastObservedAt) {\n result.push(msg);\n }\n }\n }\n }\n\n return result;\n }\n\n /**\n * Wrapper for observer/reflector agent.generate() calls that checks for abort.\n * agent.generate() returns an empty result on abort instead of throwing,\n * so we must check the signal before and after the call.\n * Retries are handled by Mastra's built-in p-retry at the model execution layer.\n */\n private async withAbortCheck<T>(fn: () => Promise<T>, abortSignal?: AbortSignal): Promise<T> {\n if (abortSignal?.aborted) {\n throw new Error('The operation was aborted.');\n }\n\n const result = await fn();\n\n if (abortSignal?.aborted) {\n throw new Error('The operation was aborted.');\n }\n\n return result;\n }\n\n /**\n * Call the Observer agent to extract observations.\n */\n private async callObserver(\n existingObservations: string | undefined,\n messagesToObserve: MastraDBMessage[],\n abortSignal?: AbortSignal,\n ): Promise<{\n observations: string;\n currentTask?: string;\n suggestedContinuation?: string;\n usage?: { inputTokens?: number; outputTokens?: number; totalTokens?: number };\n }> {\n const agent = this.getObserverAgent();\n\n const prompt = buildObserverPrompt(existingObservations, messagesToObserve);\n\n const result = await this.withAbortCheck(\n () =>\n agent.generate(prompt, {\n modelSettings: {\n ...this.observationConfig.modelSettings,\n },\n providerOptions: this.observationConfig.providerOptions as any,\n abortSignal,\n }),\n abortSignal,\n );\n\n const parsed = parseObserverOutput(result.text);\n\n // Extract usage from result (totalUsage or usage)\n const usage = result.totalUsage ?? result.usage;\n\n return {\n observations: parsed.observations,\n currentTask: parsed.currentTask,\n suggestedContinuation: parsed.suggestedContinuation,\n usage: usage\n ? {\n inputTokens: usage.inputTokens,\n outputTokens: usage.outputTokens,\n totalTokens: usage.totalTokens,\n }\n : undefined,\n };\n }\n\n /**\n * Call the Observer agent for multiple threads in a single batched request.\n * This is more efficient than calling the Observer for each thread individually.\n * Returns per-thread results with observations, currentTask, and suggestedContinuation,\n * plus the total usage for the batch.\n */\n private async callMultiThreadObserver(\n existingObservations: string | undefined,\n messagesByThread: Map<string, MastraDBMessage[]>,\n threadOrder: string[],\n abortSignal?: AbortSignal,\n ): Promise<{\n results: Map<\n string,\n {\n observations: string;\n currentTask?: string;\n suggestedContinuation?: string;\n }\n >;\n usage?: { inputTokens?: number; outputTokens?: number; totalTokens?: number };\n }> {\n // Create a multi-thread observer agent with the special system prompt\n const agent = new Agent({\n id: 'multi-thread-observer',\n name: 'multi-thread-observer',\n model: this.observationConfig.model,\n instructions: buildObserverSystemPrompt(true),\n });\n\n const prompt = buildMultiThreadObserverPrompt(existingObservations, messagesByThread, threadOrder);\n\n // Flatten all messages for context dump\n const allMessages: MastraDBMessage[] = [];\n for (const msgs of messagesByThread.values()) {\n allMessages.push(...msgs);\n }\n\n // Mark all messages as observed (skip any already-observed)\n for (const msg of allMessages) {\n this.observedMessageIds.add(msg.id);\n }\n\n const result = await this.withAbortCheck(\n () =>\n agent.generate(prompt, {\n modelSettings: {\n ...this.observationConfig.modelSettings,\n },\n providerOptions: this.observationConfig.providerOptions as any,\n abortSignal,\n }),\n abortSignal,\n );\n\n const parsed = parseMultiThreadObserverOutput(result.text);\n\n // Convert to the expected return format\n const results = new Map<\n string,\n {\n observations: string;\n currentTask?: string;\n suggestedContinuation?: string;\n }\n >();\n\n for (const [threadId, threadResult] of parsed.threads) {\n results.set(threadId, {\n observations: threadResult.observations,\n currentTask: threadResult.currentTask,\n suggestedContinuation: threadResult.suggestedContinuation,\n });\n }\n\n // If some threads didn't get results, log a warning\n for (const threadId of threadOrder) {\n if (!results.has(threadId)) {\n // Add empty result so we still update the cursor\n results.set(threadId, { observations: '' });\n }\n }\n\n // Extract usage from result\n const usage = result.totalUsage ?? result.usage;\n\n return {\n results,\n usage: usage\n ? {\n inputTokens: usage.inputTokens,\n outputTokens: usage.outputTokens,\n totalTokens: usage.totalTokens,\n }\n : undefined,\n };\n }\n\n /**\n * Call the Reflector agent to condense observations.\n * Includes compression validation and retry logic.\n */\n private async callReflector(\n observations: string,\n manualPrompt?: string,\n streamContext?: {\n writer?: ProcessorStreamWriter;\n cycleId: string;\n startedAt: string;\n recordId: string;\n threadId: string;\n },\n observationTokensThreshold?: number,\n abortSignal?: AbortSignal,\n ): Promise<{\n observations: string;\n suggestedContinuation?: string;\n usage?: { inputTokens?: number; outputTokens?: number; totalTokens?: number };\n }> {\n const agent = this.getReflectorAgent();\n\n const originalTokens = this.tokenCounter.countObservations(observations);\n\n // Get the target threshold - use provided value or fall back to config\n const targetThreshold = observationTokensThreshold ?? this.getMaxThreshold(this.reflectionConfig.observationTokens);\n\n // Track total usage across attempts\n let totalUsage = { inputTokens: 0, outputTokens: 0, totalTokens: 0 };\n\n // First attempt\n let prompt = buildReflectorPrompt(observations, manualPrompt, false);\n let result = await this.withAbortCheck(\n () =>\n agent.generate(prompt, {\n modelSettings: {\n ...this.reflectionConfig.modelSettings,\n },\n providerOptions: this.reflectionConfig.providerOptions as any,\n abortSignal,\n }),\n abortSignal,\n );\n\n // Accumulate usage from first attempt\n const firstUsage = result.totalUsage ?? result.usage;\n if (firstUsage) {\n totalUsage.inputTokens += firstUsage.inputTokens ?? 0;\n totalUsage.outputTokens += firstUsage.outputTokens ?? 0;\n totalUsage.totalTokens += firstUsage.totalTokens ?? 0;\n }\n\n let parsed = parseReflectorOutput(result.text);\n let reflectedTokens = this.tokenCounter.countObservations(parsed.observations);\n\n // Check if compression was successful (reflected tokens should be below target threshold)\n if (!validateCompression(reflectedTokens, targetThreshold)) {\n // Emit failed marker for first attempt, then start marker for retry\n if (streamContext?.writer) {\n const failedMarker = this.createObservationFailedMarker({\n cycleId: streamContext.cycleId,\n operationType: 'reflection',\n startedAt: streamContext.startedAt,\n tokensAttempted: originalTokens,\n error: `Did not compress below threshold (${originalTokens} → ${reflectedTokens}, target: ${targetThreshold}), retrying with compression guidance`,\n recordId: streamContext.recordId,\n threadId: streamContext.threadId,\n });\n await streamContext.writer.custom(failedMarker).catch(() => {});\n\n // Generate new cycleId for retry\n const retryCycleId = crypto.randomUUID();\n streamContext.cycleId = retryCycleId;\n\n const startMarker = this.createObservationStartMarker({\n cycleId: retryCycleId,\n operationType: 'reflection',\n tokensToObserve: originalTokens,\n recordId: streamContext.recordId,\n threadId: streamContext.threadId,\n threadIds: [streamContext.threadId],\n });\n // Update startedAt from the marker that was just created\n streamContext.startedAt = startMarker.data.startedAt;\n await streamContext.writer.custom(startMarker).catch(() => {});\n }\n\n // Retry with compression prompt\n prompt = buildReflectorPrompt(observations, manualPrompt, true);\n result = await this.withAbortCheck(\n () =>\n agent.generate(prompt, {\n modelSettings: {\n ...this.reflectionConfig.modelSettings,\n },\n providerOptions: this.reflectionConfig.providerOptions as any,\n abortSignal,\n }),\n abortSignal,\n );\n\n // Accumulate usage from retry attempt\n const retryUsage = result.totalUsage ?? result.usage;\n if (retryUsage) {\n totalUsage.inputTokens += retryUsage.inputTokens ?? 0;\n totalUsage.outputTokens += retryUsage.outputTokens ?? 0;\n totalUsage.totalTokens += retryUsage.totalTokens ?? 0;\n }\n\n parsed = parseReflectorOutput(result.text);\n reflectedTokens = this.tokenCounter.countObservations(parsed.observations);\n }\n\n return {\n observations: parsed.observations,\n suggestedContinuation: parsed.suggestedContinuation,\n usage: totalUsage.totalTokens > 0 ? totalUsage : undefined,\n };\n }\n\n /**\n * Format observations for injection into context.\n * Applies token optimization before presenting to the Actor.\n *\n * In resource scope mode, filters continuity messages to only show\n * the message for the current thread.\n */\n /**\n * Format observations for injection into the Actor's context.\n * @param observations - The observations to inject\n * @param suggestedResponse - Thread-specific suggested response (from thread metadata)\n * @param unobservedContextBlocks - Formatted <unobserved-context> blocks from other threads\n */\n private formatObservationsForContext(\n observations: string,\n currentTask?: string,\n suggestedResponse?: string,\n unobservedContextBlocks?: string,\n currentDate?: Date,\n ): string {\n // Optimize observations to save tokens\n let optimized = optimizeObservationsForContext(observations);\n\n // Add relative time annotations to date headers if currentDate is provided\n if (currentDate) {\n optimized = addRelativeTimeToObservations(optimized, currentDate);\n }\n\n let content = `\nThe following observations block contains your memory of past conversations with this user.\n\n<observations>\n${optimized}\n</observations>\n\nIMPORTANT: When responding, reference specific details from these observations. Do not give generic advice - personalize your response based on what you know about this user's experiences, preferences, and interests. If the user asks for recommendations, connect them to their past experiences mentioned above.\n\nKNOWLEDGE UPDATES: When asked about current state (e.g., \"where do I currently...\", \"what is my current...\"), always prefer the MOST RECENT information. Observations include dates - if you see conflicting information, the newer observation supersedes the older one. Look for phrases like \"will start\", \"is switching\", \"changed to\", \"moved to\" as indicators that previous information has been updated.\n\nPLANNED ACTIONS: If the user stated they planned to do something (e.g., \"I'm going to...\", \"I'm looking forward to...\", \"I will...\") and the date they planned to do it is now in the past (check the relative time like \"3 weeks ago\"), assume they completed the action unless there's evidence they didn't. For example, if someone said \"I'll start my new diet on Monday\" and that was 2 weeks ago, assume they started the diet.`;\n\n // Add unobserved context from other threads (resource scope only)\n if (unobservedContextBlocks) {\n content += `\\n\\nThe following content is from OTHER conversations different from the current conversation, they're here for reference, but they're not necessarily your focus:\\nSTART_OTHER_CONVERSATIONS_BLOCK\\n${unobservedContextBlocks}\\nEND_OTHER_CONVERSATIONS_BLOCK`;\n }\n\n // Dynamically inject current-task from thread metadata (not stored in observations)\n if (currentTask) {\n content += `\n\n<current-task>\n${currentTask}\n</current-task>`;\n }\n\n if (suggestedResponse) {\n content += `\n\n<suggested-response>\n${suggestedResponse}\n</suggested-response>\n`;\n }\n\n return content;\n }\n\n /**\n * Get threadId and resourceId from either RequestContext or MessageList\n */\n private getThreadContext(\n requestContext: ProcessInputArgs['requestContext'],\n messageList: MessageList,\n ): { threadId: string; resourceId?: string } | null {\n // First try RequestContext (set by Memory)\n const memoryContext = requestContext?.get('MastraMemory') as\n | { thread?: { id: string }; resourceId?: string }\n | undefined;\n\n if (memoryContext?.thread?.id) {\n return {\n threadId: memoryContext.thread.id,\n resourceId: memoryContext.resourceId,\n };\n }\n\n // Fallback to MessageList's memoryInfo\n const serialized = messageList.serialize();\n if (serialized.memoryInfo?.threadId) {\n return {\n threadId: serialized.memoryInfo.threadId,\n resourceId: serialized.memoryInfo.resourceId,\n };\n }\n\n return null;\n }\n\n /**\n * Process input at each step - check threshold, observe if needed, save, inject observations.\n * This is the ONLY processor method - all OM logic happens here.\n *\n * Flow:\n * 1. Load historical messages (step 0 only)\n * 2. Check if observation threshold is reached\n * 3. If threshold reached: observe, save messages with markers\n * 4. Inject observations into context\n * 5. Filter out already-observed messages\n */\n async processInputStep(args: ProcessInputStepArgs): Promise<MessageList | MastraDBMessage[]> {\n const { messageList, requestContext, stepNumber, state: _state, writer, abortSignal, abort } = args;\n // Default state to {} for backward compat with older @mastra/core that doesn't pass state\n const state = _state ?? ({} as Record<string, unknown>);\n\n const context = this.getThreadContext(requestContext, messageList);\n if (!context) {\n return messageList;\n }\n\n const { threadId, resourceId } = context;\n\n // Check if readOnly from memoryConfig\n const memoryContext = parseMemoryRequestContext(requestContext);\n const readOnly = memoryContext?.memoryConfig?.readOnly;\n\n // Fetch fresh record\n let record = await this.getOrCreateRecord(threadId, resourceId);\n\n // ════════════════════════════════════════════════════════════════════════\n // STEP 1: LOAD HISTORICAL MESSAGES (step 0 only)\n // ════════════════════════════════════════════════════════════════════════\n\n if (!state.initialSetupDone) {\n state.initialSetupDone = true;\n\n // Load unobserved messages from storage\n const lastObservedAt = record.lastObservedAt;\n\n if (this.scope === 'resource' && resourceId) {\n // RESOURCE SCOPE: Load only the current thread's historical messages.\n // Other threads' unobserved context is loaded fresh each step (below)\n // to reflect the latest lastObservedAt cursors after observations.\n const currentThreadMessages = await this.loadUnobservedMessages(threadId, undefined, lastObservedAt);\n\n // Add only current thread's messages to messageList (skip fully observed)\n for (const msg of currentThreadMessages) {\n if (msg.role !== 'system') {\n if (!this.hasUnobservedParts(msg) && this.findLastCompletedObservationBoundary(msg) !== -1) {\n continue;\n }\n messageList.add(msg, 'memory');\n }\n }\n } else {\n // THREAD SCOPE: Load unobserved messages using resource-level lastObservedAt\n const historicalMessages = await this.loadUnobservedMessages(threadId, resourceId, lastObservedAt);\n\n if (historicalMessages.length > 0) {\n // Thread scope: add all messages (skip fully observed)\n for (const msg of historicalMessages) {\n if (msg.role !== 'system') {\n if (!this.hasUnobservedParts(msg) && this.findLastCompletedObservationBoundary(msg) !== -1) {\n continue;\n }\n messageList.add(msg, 'memory');\n }\n }\n }\n }\n }\n\n // ════════════════════════════════════════════════════════════════════════\n // STEP 1b: LOAD OTHER THREADS' UNOBSERVED CONTEXT (resource scope, every step)\n // Loaded fresh each step so it reflects the latest lastObservedAt cursors\n // after observations complete. Not cached in state.\n // ════════════════════════════════════════════════════════════════════════\n let unobservedContextBlocks: string | undefined;\n if (this.scope === 'resource' && resourceId) {\n unobservedContextBlocks = await this.loadOtherThreadsContext(resourceId, threadId);\n }\n\n // ════════════════════════════════════════════════════════════════════════\n // STEP 2: CHECK THRESHOLD AND OBSERVE IF NEEDED\n // On step N > 0, messageList contains the previous step's output\n // ════════════════════════════════════════════════════════════════════════\n if (!readOnly) {\n const allMessages = messageList.get.all.db();\n const unobservedMessages = this.getUnobservedMessages(allMessages, record);\n const currentSessionTokens = this.tokenCounter.countMessages(unobservedMessages);\n // In resource scope, also count tokens from other threads' unobserved context blocks.\n // These are injected as a system message but not included in messageList,\n // so they'd otherwise be invisible to the threshold check.\n const otherThreadTokens = unobservedContextBlocks ? this.tokenCounter.countString(unobservedContextBlocks) : 0;\n const currentObservationTokens = record.observationTokenCount ?? 0;\n const pendingTokens = record.pendingMessageTokens ?? 0;\n const totalPendingTokens = pendingTokens + currentSessionTokens + otherThreadTokens;\n\n const threshold = this.calculateDynamicThreshold(this.observationConfig.messageTokens, currentObservationTokens);\n // Calculate effective reflection threshold for UI display\n // When adaptive threshold is enabled, both thresholds share a budget\n // Reflection threshold = total budget - message threshold (what's left for observations)\n const baseReflectionThreshold = this.getMaxThreshold(this.reflectionConfig.observationTokens);\n const isSharedBudget = typeof this.observationConfig.messageTokens !== 'number';\n const totalBudget = isSharedBudget\n ? (this.observationConfig.messageTokens as { min: number; max: number }).max\n : 0;\n const effectiveObservationTokensThreshold = isSharedBudget\n ? Math.max(totalBudget - threshold, 1000) // What's left after message threshold\n : baseReflectionThreshold;\n const observationTokensPercent = Math.round(\n (currentObservationTokens / effectiveObservationTokensThreshold) * 100,\n );\n\n // Emit progress event for UI feedback\n this.emitDebugEvent({\n type: 'step_progress',\n timestamp: new Date(),\n threadId,\n resourceId: resourceId ?? '',\n stepNumber,\n finishReason: 'unknown',\n pendingTokens: totalPendingTokens,\n threshold,\n thresholdPercent: Math.round((totalPendingTokens / threshold) * 100),\n willSave: totalPendingTokens >= threshold,\n willObserve: totalPendingTokens >= threshold,\n });\n\n // Stream progress part to UI for real-time feedback\n if (writer) {\n const progressPart: DataOmProgressPart = {\n type: 'data-om-progress',\n data: {\n pendingTokens: totalPendingTokens,\n messageTokens: threshold,\n messageTokensPercent: Math.round((totalPendingTokens / threshold) * 100),\n observationTokens: currentObservationTokens,\n observationTokensThreshold: effectiveObservationTokensThreshold,\n observationTokensPercent: observationTokensPercent,\n willObserve: totalPendingTokens >= threshold,\n recordId: record.id,\n threadId,\n stepNumber,\n },\n };\n await writer.custom(progressPart).catch(() => {\n // Ignore errors if stream is closed\n });\n }\n\n // Track IDs of messages we've already saved with observation markers (sealed)\n // These IDs cannot be reused - if we see them again, we must regenerate\n const sealedIds: Set<string> = (state.sealedIds as Set<string>) ?? new Set<string>();\n\n if (stepNumber > 0 && totalPendingTokens >= threshold) {\n const lockKey = this.getLockKey(threadId, resourceId);\n let observationSucceeded = false;\n await this.withLock(lockKey, async () => {\n const freshRecord = await this.getOrCreateRecord(threadId, resourceId);\n const freshAllMessages = messageList.get.all.db();\n const freshUnobservedMessages = this.getUnobservedMessages(freshAllMessages, freshRecord);\n\n // Re-check threshold inside the lock. Another thread sharing this resource\n // may have already observed, advancing lastObservedAt and reducing the\n // other-threads token count. Without this check, both threads would observe\n // redundantly.\n const freshCurrentTokens = this.tokenCounter.countMessages(freshUnobservedMessages);\n const freshPending = freshRecord.pendingMessageTokens ?? 0;\n let freshOtherThreadTokens = 0;\n if (this.scope === 'resource' && resourceId) {\n const freshOtherContext = await this.loadOtherThreadsContext(resourceId, threadId);\n freshOtherThreadTokens = freshOtherContext ? this.tokenCounter.countString(freshOtherContext) : 0;\n }\n const freshTotal = freshPending + freshCurrentTokens + freshOtherThreadTokens;\n if (freshTotal < threshold) {\n return;\n }\n\n // Snapshot lastObservedAt BEFORE observation runs.\n // InMemoryMemory returns object references, so freshRecord.lastObservedAt\n // gets mutated by doSynchronousObservation/doResourceScopedObservation.\n const preObservationTime = freshRecord.lastObservedAt?.getTime() ?? 0;\n\n if (freshUnobservedMessages.length > 0) {\n try {\n if (this.scope === 'resource' && resourceId) {\n await this.doResourceScopedObservation(\n freshRecord,\n threadId,\n resourceId,\n freshUnobservedMessages,\n writer,\n abortSignal,\n );\n } else {\n await this.doSynchronousObservation(\n freshRecord,\n threadId,\n freshUnobservedMessages,\n writer,\n abortSignal,\n );\n }\n // Check if observation actually updated lastObservedAt\n const updatedRecord = await this.getOrCreateRecord(threadId, resourceId);\n const updatedTime = updatedRecord.lastObservedAt?.getTime() ?? 0;\n observationSucceeded = updatedTime > preObservationTime;\n } catch (error) {\n // If the abort signal fired, use tripwire to cleanly exit\n if (abortSignal?.aborted) {\n abort('Agent execution was aborted');\n } else {\n abort(\n `Encountered error during memory observation ${error instanceof Error ? error.message : JSON.stringify(error, null, 2)}`,\n );\n }\n // Observation failed - don't clear messages\n observationSucceeded = false;\n }\n }\n });\n\n // After observation, find the marker and remove observed messages.\n // We must do this BEFORE clearing, because clear.input/response.db() only\n // removes messages tracked as 'input'/'response' — not messages that were\n // previously per-step-saved and re-added as 'memory' source.\n // By using the marker + removeByIds, we correctly remove ALL observed messages\n // regardless of their source tracking.\n if (observationSucceeded) {\n const allMsgs = messageList.get.all.db();\n let markerIdx = -1;\n let markerMsg: MastraDBMessage | null = null;\n\n // Find the last observation end marker\n for (let i = allMsgs.length - 1; i >= 0; i--) {\n const msg = allMsgs[i];\n if (!msg) continue;\n if (this.findLastCompletedObservationBoundary(msg) !== -1) {\n markerIdx = i;\n markerMsg = msg;\n break;\n }\n }\n\n if (markerMsg && markerIdx !== -1) {\n // Collect all messages before the marker (these are fully observed)\n const idsToRemove: string[] = [];\n const messagesToSave: MastraDBMessage[] = [];\n\n for (let i = 0; i < markerIdx; i++) {\n const msg = allMsgs[i];\n if (msg?.id && msg.id !== 'om-continuation') {\n idsToRemove.push(msg.id);\n messagesToSave.push(msg);\n }\n }\n\n // Also include the marker message itself in the save\n messagesToSave.push(markerMsg);\n\n // Filter marker message to only unobserved parts\n const unobservedParts = this.getUnobservedParts(markerMsg);\n if (unobservedParts.length === 0) {\n // Marker message is fully observed — remove it too\n if (markerMsg.id) {\n idsToRemove.push(markerMsg.id);\n }\n } else if (unobservedParts.length < (markerMsg.content?.parts?.length ?? 0)) {\n // Trim marker message to only unobserved parts (in-place)\n markerMsg.content.parts = unobservedParts;\n }\n\n // Save all observed messages (with their markers) to DB\n if (messagesToSave.length > 0) {\n await this.saveMessagesWithSealedIdTracking(messagesToSave, sealedIds, threadId, resourceId, state);\n }\n\n // Remove observed messages from context\n if (idsToRemove.length > 0) {\n messageList.removeByIds(idsToRemove);\n }\n } else {\n // No marker found — fall back to source-based clearing\n const newInput = messageList.clear.input.db();\n const newOutput = messageList.clear.response.db();\n const messagesToSave = [...newInput, ...newOutput];\n if (messagesToSave.length > 0) {\n await this.saveMessagesWithSealedIdTracking(messagesToSave, sealedIds, threadId, resourceId, state);\n }\n }\n\n // Also clear any remaining input/response tracking that wasn't caught by removeByIds\n // (e.g., messages added after the marker during the observation)\n messageList.clear.input.db();\n messageList.clear.response.db();\n }\n\n // Re-fetch record to get updated observations\n record = await this.getOrCreateRecord(threadId, resourceId);\n } else if (stepNumber > 0) {\n // ── PER-STEP SAVE ──────────────────────────────────────────────────\n // Threshold not reached, but we still need to persist messages incrementally.\n // Without this, messages would be lost if the agent is interrupted or\n // the process exits before processOutputResult runs.\n //\n // Pattern: clear → save → re-add\n // 1. clear: get messages and remove from \"unsaved\" tracking\n // 2. save: persist to storage\n // 3. re-add: put back in context (deduped by messageList.add)\n // ────────────────────────────────────────────────────────────────────\n const newInput = messageList.clear.input.db();\n const newOutput = messageList.clear.response.db();\n const messagesToSave = [...newInput, ...newOutput];\n\n if (messagesToSave.length > 0) {\n await this.saveMessagesWithSealedIdTracking(messagesToSave, sealedIds, threadId, resourceId, state);\n\n // Re-add messages to context so the agent can still see them\n for (const msg of messagesToSave) {\n messageList.add(msg, 'memory');\n }\n }\n }\n }\n\n // ════════════════════════════════════════════════════════════════════════\n // STEP 3: INJECT OBSERVATIONS INTO CONTEXT\n // ════════════════════════════════════════════════════════════════════════\n const thread = await this.storage.getThreadById({ threadId });\n const threadOMMetadata = getThreadOMMetadata(thread?.metadata);\n const currentTask = threadOMMetadata?.currentTask;\n const suggestedResponse = threadOMMetadata?.suggestedResponse;\n const currentDate = (requestContext?.get('currentDate') as Date | undefined) ?? new Date();\n\n if (record.activeObservations) {\n const observationSystemMessage = this.formatObservationsForContext(\n record.activeObservations,\n currentTask,\n suggestedResponse,\n unobservedContextBlocks,\n currentDate,\n );\n\n // Clear any existing observation system message and add fresh one\n messageList.clearSystemMessages('observational-memory');\n messageList.addSystem(observationSystemMessage, 'observational-memory');\n\n // Add continuation reminder\n const continuationMessage: MastraDBMessage = {\n id: `om-continuation`,\n role: 'user',\n createdAt: new Date(0),\n content: {\n format: 2,\n parts: [\n {\n type: 'text',\n text: `<system-reminder>This message is not from the user, the conversation history grew too long and wouldn't fit in context! Thankfully the entire conversation is stored in your memory observations. Please continue from where the observations left off. Do not refer to your \"memory observations\" directly, the user doesn't know about them, they are your memories! Just respond naturally as if you're remembering the conversation (you are!). Do not say \"Hi there!\" or \"based on our previous conversation\" as if the conversation is just starting, this is not a new conversation. This is an ongoing conversation, keep continuity by responding based on your memory. For example do not say \"I understand. I've reviewed my memory observations\", or \"I remember [...]\". Answer naturally following the suggestion from your memory. Note that your memory may contain a suggested first response, which you should follow.\n\nIMPORTANT: this system reminder is NOT from the user. The system placed it here as part of your memory system. This message is part of you remembering your conversation with the user.\n\nNOTE: Any messages following this system reminder are newer than your memories.\n</system-reminder>`,\n },\n ],\n },\n threadId,\n resourceId,\n };\n messageList.add(continuationMessage, 'memory');\n }\n\n // ════════════════════════════════════════════════════════════════════════\n // STEP 4: FILTER OUT ALREADY-OBSERVED MESSAGES (historical only)\n // On step 0, historical messages loaded from DB may contain observation\n // markers from a previous session. Remove those observed messages.\n // For current-session observations, this is handled in the post-observation\n // block above (which runs while the marker is still in messageList).\n // ════════════════════════════════════════════════════════════════════════\n if (stepNumber === 0) {\n const allMessages = messageList.get.all.db();\n\n // Find the message with the last observation end marker\n let markerMessageIndex = -1;\n let markerMessage: MastraDBMessage | null = null;\n\n for (let i = allMessages.length - 1; i >= 0; i--) {\n const msg = allMessages[i];\n if (!msg) continue;\n if (this.findLastCompletedObservationBoundary(msg) !== -1) {\n markerMessageIndex = i;\n markerMessage = msg;\n break;\n }\n }\n\n if (markerMessage && markerMessageIndex !== -1) {\n const messagesToRemove: string[] = [];\n for (let i = 0; i < markerMessageIndex; i++) {\n const msg = allMessages[i];\n if (msg?.id && msg.id !== 'om-continuation') {\n messagesToRemove.push(msg.id);\n }\n }\n\n if (messagesToRemove.length > 0) {\n messageList.removeByIds(messagesToRemove);\n }\n\n // Filter marker message to only unobserved parts\n const unobservedParts = this.getUnobservedParts(markerMessage);\n if (unobservedParts.length === 0) {\n if (markerMessage.id) {\n messageList.removeByIds([markerMessage.id]);\n }\n } else if (unobservedParts.length < (markerMessage.content?.parts?.length ?? 0)) {\n markerMessage.content.parts = unobservedParts;\n }\n }\n }\n\n return messageList;\n }\n\n /**\n * Save any unsaved messages at the end of the agent turn.\n *\n * This is the \"final save\" that catches messages that processInputStep didn't save\n * (e.g., when the observation threshold was never reached, or on single-step execution).\n * Without this, messages would be lost because MessageHistory is disabled when OM is active.\n */\n async processOutputResult(args: ProcessOutputResultArgs): Promise<MessageList | MastraDBMessage[]> {\n const { messageList, requestContext, state: _state } = args;\n // Default state to {} for backward compat with older @mastra/core that doesn't pass state\n const state = _state ?? ({} as Record<string, unknown>);\n\n const context = this.getThreadContext(requestContext, messageList);\n if (!context) {\n return messageList;\n }\n\n const { threadId, resourceId } = context;\n\n // Check if readOnly\n const memoryContext = parseMemoryRequestContext(requestContext);\n const readOnly = memoryContext?.memoryConfig?.readOnly;\n if (readOnly) {\n return messageList;\n }\n\n // Final save: persist any messages that weren't saved during per-step saves\n // (e.g., the final assistant response after the last processInputStep)\n const newInput = messageList.get.input.db();\n const newOutput = messageList.get.response.db();\n const messagesToSave = [...newInput, ...newOutput];\n\n if (messagesToSave.length === 0) {\n return messageList;\n }\n\n const sealedIds: Set<string> = (state.sealedIds as Set<string>) ?? new Set<string>();\n\n await this.saveMessagesWithSealedIdTracking(messagesToSave, sealedIds, threadId, resourceId, state);\n\n return messageList;\n }\n\n /**\n * Save messages to storage, regenerating IDs for any messages that were\n * previously saved with observation markers (sealed).\n *\n * After saving, tracks which messages now have observation markers\n * so their IDs won't be reused in future save cycles.\n */\n private async saveMessagesWithSealedIdTracking(\n messagesToSave: MastraDBMessage[],\n sealedIds: Set<string>,\n threadId: string,\n resourceId: string | undefined,\n state: Record<string, unknown>,\n ): Promise<void> {\n // Regenerate IDs for messages that were already saved with observation markers\n // This prevents overwriting sealed messages in the DB\n for (const msg of messagesToSave) {\n if (sealedIds.has(msg.id)) {\n msg.id = crypto.randomUUID();\n }\n }\n\n await this.messageHistory.persistMessages({\n messages: messagesToSave,\n threadId,\n resourceId,\n });\n\n // After successful save, track IDs of messages that now have observation markers (sealed)\n // These IDs cannot be reused in future cycles\n for (const msg of messagesToSave) {\n if (this.findLastCompletedObservationBoundary(msg) !== -1) {\n sealedIds.add(msg.id);\n }\n }\n state.sealedIds = sealedIds;\n }\n\n /**\n * Load messages from storage that haven't been observed yet.\n * Uses cursor-based query with lastObservedAt timestamp for efficiency.\n *\n * In resource scope mode, loads messages for the entire resource (all threads).\n * In thread scope mode, loads messages for just the current thread.\n */\n private async loadUnobservedMessages(\n threadId: string,\n resourceId: string | undefined,\n lastObservedAt?: Date,\n ): Promise<MastraDBMessage[]> {\n // Add 1ms to lastObservedAt to make the filter exclusive (since dateRange.start is inclusive)\n // This prevents re-loading the same messages that were already observed\n const startDate = lastObservedAt ? new Date(lastObservedAt.getTime() + 1) : undefined;\n\n let result: { messages: MastraDBMessage[] };\n\n if (this.scope === 'resource' && resourceId) {\n // Resource scope: use the new listMessagesByResourceId method\n result = await this.storage.listMessagesByResourceId({\n resourceId,\n perPage: false, // Get all messages (no pagination limit)\n orderBy: { field: 'createdAt', direction: 'ASC' },\n filter: startDate\n ? {\n dateRange: {\n start: startDate,\n },\n }\n : undefined,\n });\n } else {\n // Thread scope: use listMessages with threadId\n result = await this.storage.listMessages({\n threadId,\n perPage: false, // Get all messages (no pagination limit)\n orderBy: { field: 'createdAt', direction: 'ASC' },\n filter: startDate\n ? {\n dateRange: {\n start: startDate,\n },\n }\n : undefined,\n });\n }\n\n return result.messages;\n }\n\n /**\n * Load unobserved messages from other threads (not the current thread) for a resource.\n * Called fresh each step so it reflects the latest lastObservedAt cursors\n * after observations complete.\n */\n private async loadOtherThreadsContext(resourceId: string, currentThreadId: string): Promise<string | undefined> {\n const { threads: allThreads } = await this.storage.listThreads({ filter: { resourceId } });\n\n const messagesByThread = new Map<string, MastraDBMessage[]>();\n\n for (const thread of allThreads) {\n // Skip current thread — its messages are already in messageList\n if (thread.id === currentThreadId) continue;\n\n const omMetadata = getThreadOMMetadata(thread.metadata);\n const threadLastObservedAt = omMetadata?.lastObservedAt;\n const startDate = threadLastObservedAt ? new Date(new Date(threadLastObservedAt).getTime() + 1) : undefined;\n\n const result = await this.storage.listMessages({\n threadId: thread.id,\n perPage: false,\n orderBy: { field: 'createdAt', direction: 'ASC' },\n filter: startDate ? { dateRange: { start: startDate } } : undefined,\n });\n\n // Filter out messages already observed in this instance's lifetime\n const filtered = result.messages.filter(m => !this.observedMessageIds.has(m.id));\n\n if (filtered.length > 0) {\n messagesByThread.set(thread.id, filtered);\n }\n }\n\n if (messagesByThread.size === 0) return undefined;\n\n const blocks = await this.formatUnobservedContextBlocks(messagesByThread, currentThreadId);\n return blocks || undefined;\n }\n\n /**\n * Format unobserved messages from other threads as <unobserved-context> blocks.\n * These are injected into the Actor's context so it has awareness of activity\n * in other threads for the same resource.\n */\n private async formatUnobservedContextBlocks(\n messagesByThread: Map<string, MastraDBMessage[]>,\n currentThreadId: string,\n ): Promise<string> {\n const blocks: string[] = [];\n\n for (const [threadId, messages] of messagesByThread) {\n // Skip current thread - those go in normal message history\n if (threadId === currentThreadId) continue;\n\n // Skip if no messages\n if (messages.length === 0) continue;\n\n // Format messages with timestamps, truncating large parts (e.g. tool results)\n // since this is injected as context for the actor, not sent to the observer\n const formattedMessages = formatMessagesForObserver(messages, { maxPartLength: 500 });\n\n if (formattedMessages) {\n const obscuredId = await this.representThreadIDInContext(threadId);\n blocks.push(`<other-conversation id=\"${obscuredId}\">\n${formattedMessages}\n</other-conversation>`);\n }\n }\n\n return blocks.join('\\n\\n');\n }\n\n private async representThreadIDInContext(threadId: string): Promise<string> {\n if (this.shouldObscureThreadIds) {\n // Check cache first\n const cached = this.threadIdCache.get(threadId);\n if (cached) return cached;\n\n // Use xxhash (32-bit) to create short, opaque, non-reversible identifiers\n // This prevents LLMs from recognizing patterns like \"answer_\" in base64\n const hasher = await this.hasher;\n const hashed = hasher.h32ToString(threadId);\n this.threadIdCache.set(threadId, hashed);\n return hashed;\n }\n return threadId;\n }\n\n /**\n * Strip any thread tags that the Observer might have added.\n * Thread attribution is handled externally by the system, not by the Observer.\n * This is a defense-in-depth measure.\n */\n private stripThreadTags(observations: string): string {\n // Remove any <thread...> or </thread> tags the Observer might add\n return observations.replace(/<thread[^>]*>|<\\/thread>/gi, '').trim();\n }\n\n /**\n * Get the maximum createdAt timestamp from a list of messages.\n * Used to set lastObservedAt to the most recent message timestamp instead of current time.\n * This ensures historical data (like LongMemEval fixtures) works correctly.\n */\n private getMaxMessageTimestamp(messages: MastraDBMessage[]): Date {\n let maxTime = 0;\n for (const msg of messages) {\n if (msg.createdAt) {\n const msgTime = new Date(msg.createdAt).getTime();\n if (msgTime > maxTime) {\n maxTime = msgTime;\n }\n }\n }\n // If no valid timestamps found, fall back to current time\n return maxTime > 0 ? new Date(maxTime) : new Date();\n }\n\n /**\n * Wrap observations in a thread attribution tag.\n * Used in resource scope to track which thread observations came from.\n */\n private async wrapWithThreadTag(threadId: string, observations: string): Promise<string> {\n // First strip any thread tags the Observer might have added\n const cleanObservations = this.stripThreadTags(observations);\n const obscuredId = await this.representThreadIDInContext(threadId);\n return `<thread id=\"${obscuredId}\">\\n${cleanObservations}\\n</thread>`;\n }\n\n /**\n * Append or merge new thread sections.\n * If the new section has the same thread ID and date as an existing section,\n * merge the observations into that section to reduce token usage.\n * Otherwise, append as a new section.\n */\n private replaceOrAppendThreadSection(\n existingObservations: string,\n _threadId: string,\n newThreadSection: string,\n ): string {\n if (!existingObservations) {\n return newThreadSection;\n }\n\n // Extract thread ID and date from new section\n const threadIdMatch = newThreadSection.match(/<thread id=\"([^\"]+)\">/);\n const dateMatch = newThreadSection.match(/Date:\\s*([A-Za-z]+\\s+\\d+,\\s+\\d+)/);\n\n if (!threadIdMatch || !dateMatch) {\n // Can't parse, just append\n return `${existingObservations}\\n\\n${newThreadSection}`;\n }\n\n const newThreadId = threadIdMatch[1]!;\n const newDate = dateMatch[1]!;\n\n // Look for existing section with same thread ID and date\n const existingPattern = new RegExp(\n `<thread id=\"${newThreadId}\">\\\\s*Date:\\\\s*${newDate.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&')}([\\\\s\\\\S]*?)</thread>`,\n );\n const existingMatch = existingObservations.match(existingPattern);\n\n if (existingMatch) {\n // Found existing section with same thread ID and date - merge observations\n // Extract just the observations from the new section (after the Date: line)\n const newObsMatch = newThreadSection.match(/<thread id=\"[^\"]+\">[\\s\\S]*?Date:[^\\n]*\\n([\\s\\S]*?)\\n<\\/thread>/);\n if (newObsMatch && newObsMatch[1]) {\n const newObsContent = newObsMatch[1].trim();\n // Insert new observations at the end of the existing section (before </thread>)\n const mergedSection = existingObservations.replace(existingPattern, match => {\n // Remove closing </thread>, add new observations, add closing </thread>\n const withoutClose = match.replace(/<\\/thread>$/, '').trimEnd();\n return `${withoutClose}\\n${newObsContent}\\n</thread>`;\n });\n return mergedSection;\n }\n }\n\n // No existing section with same thread ID and date - append\n return `${existingObservations}\\n\\n${newThreadSection}`;\n }\n\n /**\n * Sort threads by their oldest unobserved message.\n * Returns thread IDs in order from oldest to most recent.\n * This ensures no thread's messages get \"stuck\" unobserved.\n */\n private sortThreadsByOldestMessage(messagesByThread: Map<string, MastraDBMessage[]>): string[] {\n const threadOrder = Array.from(messagesByThread.entries())\n .map(([threadId, messages]) => {\n // Find oldest message timestamp\n const oldestTimestamp = Math.min(\n ...messages.map(m => (m.createdAt ? new Date(m.createdAt).getTime() : Date.now())),\n );\n return { threadId, oldestTimestamp };\n })\n .sort((a, b) => a.oldestTimestamp - b.oldestTimestamp);\n\n return threadOrder.map(t => t.threadId);\n }\n\n /**\n * Do synchronous observation (fallback when no buffering)\n */\n private async doSynchronousObservation(\n record: ObservationalMemoryRecord,\n threadId: string,\n unobservedMessages: MastraDBMessage[],\n writer?: ProcessorStreamWriter,\n abortSignal?: AbortSignal,\n ): Promise<void> {\n // Emit debug event for observation triggered\n this.emitDebugEvent({\n type: 'observation_triggered',\n timestamp: new Date(),\n threadId,\n resourceId: record.resourceId ?? '',\n previousObservations: record.activeObservations,\n messages: unobservedMessages.map(m => ({\n role: m.role,\n content: typeof m.content === 'string' ? m.content : JSON.stringify(m.content),\n })),\n });\n\n // ════════════════════════════════════════════════════════════\n // LOCKING: Acquire lock and re-check\n // ════════════════════════════════════════════════════════════\n await this.storage.setObservingFlag(record.id, true);\n\n // Generate unique cycle ID for this observation cycle\n // This ties together the start/end/failed markers\n const cycleId = crypto.randomUUID();\n\n // Insert START marker before observation\n const tokensToObserve = this.tokenCounter.countMessages(unobservedMessages);\n const lastMessage = unobservedMessages[unobservedMessages.length - 1];\n const startedAt = new Date().toISOString();\n\n if (lastMessage?.id) {\n const startMarker = this.createObservationStartMarker({\n cycleId,\n operationType: 'observation',\n tokensToObserve,\n recordId: record.id,\n threadId,\n threadIds: [threadId],\n });\n // Stream the start marker to the UI first - this adds the part via stream handler\n if (writer) {\n await writer.custom(startMarker).catch(() => {\n // Ignore errors from streaming - observation should continue\n });\n }\n\n // Then add to message (skipPush since writer.custom already added the part)\n }\n\n try {\n // Re-check: reload record to see if another request already observed\n const freshRecord = await this.storage.getObservationalMemory(record.threadId, record.resourceId);\n if (freshRecord && freshRecord.lastObservedAt && record.lastObservedAt) {\n if (freshRecord.lastObservedAt > record.lastObservedAt) {\n return;\n }\n }\n\n const result = await this.callObserver(\n freshRecord?.activeObservations ?? record.activeObservations,\n unobservedMessages,\n abortSignal,\n );\n\n // Build new observations (use freshRecord if available)\n const existingObservations = freshRecord?.activeObservations ?? record.activeObservations ?? '';\n let newObservations: string;\n if (this.scope === 'resource') {\n // In resource scope: wrap with thread tag and replace/append\n const threadSection = await this.wrapWithThreadTag(threadId, result.observations);\n newObservations = this.replaceOrAppendThreadSection(existingObservations, threadId, threadSection);\n } else {\n // In thread scope: simple append\n newObservations = existingObservations\n ? `${existingObservations}\\n\\n${result.observations}`\n : result.observations;\n }\n\n let totalTokenCount = this.tokenCounter.countObservations(newObservations);\n\n // Calculate tokens generated in THIS cycle only (for UI marker)\n const cycleObservationTokens = this.tokenCounter.countObservations(result.observations);\n\n // Use the max message timestamp as cursor instead of current time\n // This ensures historical data (like LongMemEval fixtures) works correctly\n const lastObservedAt = this.getMaxMessageTimestamp(unobservedMessages);\n\n // Collect message IDs being observed for the safeguard\n // Merge with existing IDs, filter to only keep IDs newer than lastObservedAt\n const newMessageIds = unobservedMessages.map(m => m.id);\n const existingIds = freshRecord?.observedMessageIds ?? record.observedMessageIds ?? [];\n const allObservedIds = [...new Set([...(Array.isArray(existingIds) ? existingIds : []), ...newMessageIds])];\n\n await this.storage.updateActiveObservations({\n id: record.id,\n observations: newObservations,\n tokenCount: totalTokenCount,\n lastObservedAt,\n observedMessageIds: allObservedIds,\n });\n\n // Save thread-specific metadata (currentTask, suggestedResponse only)\n if (result.suggestedContinuation || result.currentTask) {\n const thread = await this.storage.getThreadById({ threadId });\n if (thread) {\n const newMetadata = setThreadOMMetadata(thread.metadata, {\n suggestedResponse: result.suggestedContinuation,\n currentTask: result.currentTask,\n });\n await this.storage.updateThread({\n id: threadId,\n title: thread.title ?? '',\n metadata: newMetadata,\n });\n }\n }\n\n // ════════════════════════════════════════════════════════════════════════\n // INSERT END MARKER after successful observation\n // This marks the boundary between observed and unobserved parts\n // ════════════════════════════════════════════════════════════════════════\n if (lastMessage?.id) {\n const endMarker = this.createObservationEndMarker({\n cycleId,\n operationType: 'observation',\n startedAt,\n tokensObserved: tokensToObserve,\n observationTokens: cycleObservationTokens,\n observations: result.observations,\n currentTask: result.currentTask,\n suggestedResponse: result.suggestedContinuation,\n recordId: record.id,\n threadId,\n });\n\n // Stream the end marker to the UI first - this adds the part via stream handler\n if (writer) {\n await writer.custom(endMarker).catch(() => {\n // Ignore errors from streaming - observation should continue\n });\n }\n\n // Then seal the message (skipPush since writer.custom already added the part)\n }\n\n // Emit debug event for observation complete\n this.emitDebugEvent({\n type: 'observation_complete',\n timestamp: new Date(),\n threadId,\n resourceId: record.resourceId ?? '',\n observations: newObservations,\n rawObserverOutput: result.observations,\n previousObservations: record.activeObservations,\n messages: unobservedMessages.map(m => ({\n role: m.role,\n content: typeof m.content === 'string' ? m.content : JSON.stringify(m.content),\n })),\n usage: result.usage,\n });\n\n // Check for reflection\n await this.maybeReflect(\n { ...record, activeObservations: newObservations },\n totalTokenCount,\n threadId,\n writer,\n abortSignal,\n );\n } catch (error) {\n // Insert FAILED marker on error\n if (lastMessage?.id) {\n const failedMarker = this.createObservationFailedMarker({\n cycleId,\n operationType: 'observation',\n startedAt,\n tokensAttempted: tokensToObserve,\n error: error instanceof Error ? error.message : String(error),\n recordId: record.id,\n threadId,\n });\n\n // Stream the failed marker to the UI first - this adds the part via stream handler\n if (writer) {\n await writer.custom(failedMarker).catch(() => {\n // Ignore errors from streaming - observation should continue\n });\n }\n\n // Then seal the message (skipPush since writer.custom already added the part)\n }\n // If aborted, re-throw so the main agent loop can handle cancellation\n if (abortSignal?.aborted) {\n throw error;\n }\n // Log the error but don't re-throw - observation failure should not crash the agent\n console.error(`[OM] Observation failed:`, error instanceof Error ? error.message : String(error));\n } finally {\n await this.storage.setObservingFlag(record.id, false);\n }\n }\n\n /**\n * Resource-scoped observation: observe ALL threads with unobserved messages.\n * Threads are observed in oldest-first order to ensure no thread's messages\n * get \"stuck\" unobserved forever.\n *\n * Key differences from thread-scoped observation:\n * 1. Loads messages from ALL threads for the resource\n * 2. Observes threads one-by-one in oldest-first order\n * 3. Only updates lastObservedAt AFTER all threads are observed\n * 4. Only triggers reflection AFTER all threads are observed\n */\n private async doResourceScopedObservation(\n record: ObservationalMemoryRecord,\n currentThreadId: string,\n resourceId: string,\n currentThreadMessages: MastraDBMessage[],\n writer?: ProcessorStreamWriter,\n abortSignal?: AbortSignal,\n ): Promise<void> {\n // Clear debug entries at start of observation cycle\n\n // ════════════════════════════════════════════════════════════\n // PER-THREAD CURSORS: Load unobserved messages for each thread using its own lastObservedAt\n // This prevents message loss when threads have different observation progress\n // ════════════════════════════════════════════════════════════\n\n // First, get all threads for this resource to access their per-thread lastObservedAt\n const { threads: allThreads } = await this.storage.listThreads({ filter: { resourceId } });\n const threadMetadataMap = new Map<string, { lastObservedAt?: string }>();\n\n for (const thread of allThreads) {\n const omMetadata = getThreadOMMetadata(thread.metadata);\n threadMetadataMap.set(thread.id, { lastObservedAt: omMetadata?.lastObservedAt });\n }\n\n // Load messages per-thread using each thread's own cursor\n const messagesByThread = new Map<string, MastraDBMessage[]>();\n\n for (const thread of allThreads) {\n const threadLastObservedAt = threadMetadataMap.get(thread.id)?.lastObservedAt;\n\n // Query messages for this specific thread AFTER its lastObservedAt\n // Add 1ms to make the filter exclusive (since dateRange.start is inclusive)\n // This prevents re-observing the same messages\n const startDate = threadLastObservedAt ? new Date(new Date(threadLastObservedAt).getTime() + 1) : undefined;\n\n const result = await this.storage.listMessages({\n threadId: thread.id,\n perPage: false,\n orderBy: { field: 'createdAt', direction: 'ASC' },\n filter: startDate ? { dateRange: { start: startDate } } : undefined,\n });\n\n if (result.messages.length > 0) {\n messagesByThread.set(thread.id, result.messages);\n }\n }\n\n // Handle current thread messages (may not be in DB yet)\n // Merge with any DB messages for the current thread\n if (currentThreadMessages.length > 0) {\n const existingCurrentThreadMsgs = messagesByThread.get(currentThreadId) ?? [];\n const messageMap = new Map<string, MastraDBMessage>();\n\n // Add DB messages first\n for (const msg of existingCurrentThreadMsgs) {\n if (msg.id) messageMap.set(msg.id, msg);\n }\n\n // Add/override with current thread messages (they're more up-to-date)\n for (const msg of currentThreadMessages) {\n if (msg.id) messageMap.set(msg.id, msg);\n }\n\n messagesByThread.set(currentThreadId, Array.from(messageMap.values()));\n }\n\n // Filter out messages already observed in this instance's lifetime.\n // This can happen when doResourceScopedObservation re-queries the DB using per-thread\n // lastObservedAt cursors that haven't fully advanced past messages observed in a prior cycle.\n for (const [tid, msgs] of messagesByThread) {\n const filtered = msgs.filter(m => !this.observedMessageIds.has(m.id));\n if (filtered.length > 0) {\n messagesByThread.set(tid, filtered);\n } else {\n messagesByThread.delete(tid);\n }\n }\n // Count total messages\n let totalMessages = 0;\n for (const msgs of messagesByThread.values()) {\n totalMessages += msgs.length;\n }\n\n if (totalMessages === 0) {\n return;\n }\n\n // ════════════════════════════════════════════════════════════\n // THREAD SELECTION: Pick which threads to observe based on token threshold\n // - Sort by largest threads first (most messages = most value per Observer call)\n // - Accumulate until we hit the threshold\n // - This prevents making many small Observer calls for 1-message threads\n // ════════════════════════════════════════════════════════════\n const threshold = this.getMaxThreshold(this.observationConfig.messageTokens);\n\n // Calculate tokens per thread and sort by size (largest first)\n const threadTokenCounts = new Map<string, number>();\n for (const [threadId, msgs] of messagesByThread) {\n let tokens = 0;\n for (const msg of msgs) {\n tokens += this.tokenCounter.countMessage(msg);\n }\n threadTokenCounts.set(threadId, tokens);\n }\n\n const threadsBySize = Array.from(messagesByThread.keys()).sort((a, b) => {\n return (threadTokenCounts.get(b) ?? 0) - (threadTokenCounts.get(a) ?? 0);\n });\n\n // Select threads to observe until we hit the threshold\n let accumulatedTokens = 0;\n const threadsToObserve: string[] = [];\n\n for (const threadId of threadsBySize) {\n const threadTokens = threadTokenCounts.get(threadId) ?? 0;\n\n // If we've already accumulated enough, stop adding threads\n if (accumulatedTokens >= threshold) {\n break;\n }\n\n threadsToObserve.push(threadId);\n accumulatedTokens += threadTokens;\n }\n\n if (threadsToObserve.length === 0) {\n return;\n }\n\n // Now sort the selected threads by oldest message for consistent observation order\n const threadOrder = this.sortThreadsByOldestMessage(\n new Map(threadsToObserve.map(tid => [tid, messagesByThread.get(tid) ?? []])),\n );\n\n // Debug: Log message counts per thread and date ranges\n\n // ════════════════════════════════════════════════════════════\n // LOCKING: Acquire lock and re-check\n // Another request may have already observed while we were loading messages\n // ════════════════════════════════════════════════════════════\n await this.storage.setObservingFlag(record.id, true);\n\n // Generate unique cycle ID for this observation cycle\n // This ties together the start/end/failed markers across all threads\n const cycleId = crypto.randomUUID();\n\n // Declare variables outside try block so they're accessible in catch\n const threadsWithMessages = new Map<string, MastraDBMessage[]>();\n const threadTokensToObserve = new Map<string, number>();\n let observationStartedAt = '';\n\n try {\n // Re-check: reload record to see if another request already observed\n const freshRecord = await this.storage.getObservationalMemory(null, resourceId);\n if (freshRecord && freshRecord.lastObservedAt && record.lastObservedAt) {\n if (freshRecord.lastObservedAt > record.lastObservedAt) {\n return;\n }\n }\n\n const existingObservations = freshRecord?.activeObservations ?? record.activeObservations ?? '';\n\n // ═════════════════════════════════════════���══════════════════\n // BATCHED MULTI-THREAD OBSERVATION: Single Observer call for all threads\n // This is much more efficient than calling the Observer for each thread individually\n // ════════════════════════════════════════════════════════════\n\n // Filter to only threads with messages\n for (const threadId of threadOrder) {\n const msgs = messagesByThread.get(threadId);\n if (msgs && msgs.length > 0) {\n threadsWithMessages.set(threadId, msgs);\n }\n }\n\n // Emit debug event for observation triggered (combined for all threads)\n this.emitDebugEvent({\n type: 'observation_triggered',\n timestamp: new Date(),\n threadId: threadOrder.join(','),\n resourceId,\n previousObservations: existingObservations,\n messages: Array.from(threadsWithMessages.values())\n .flat()\n .map(m => ({\n role: m.role,\n content: typeof m.content === 'string' ? m.content : JSON.stringify(m.content),\n })),\n });\n\n // ════════════════════════════════════════════════════════════════════════\n // INSERT START MARKERS before observation\n // Each thread gets its own start marker in its last message\n // ════════════════════════════════════════════════════════════════════════\n observationStartedAt = new Date().toISOString();\n const allThreadIds = Array.from(threadsWithMessages.keys());\n\n for (const [threadId, msgs] of threadsWithMessages) {\n const lastMessage = msgs[msgs.length - 1];\n const tokensToObserve = this.tokenCounter.countMessages(msgs);\n threadTokensToObserve.set(threadId, tokensToObserve);\n\n if (lastMessage?.id) {\n const startMarker = this.createObservationStartMarker({\n cycleId,\n operationType: 'observation',\n tokensToObserve,\n recordId: record.id,\n threadId,\n threadIds: allThreadIds,\n });\n // Stream the start marker to the UI first - this adds the part via stream handler\n if (writer) {\n await writer.custom(startMarker).catch(() => {\n // Ignore errors from streaming - observation should continue\n });\n }\n\n // Then add to message (skipPush since writer.custom already added the part)\n }\n }\n\n // ════════════════════════════════════════════════════════════\n // PARALLEL BATCHING: Chunk threads into batches and process in parallel\n // This combines batching efficiency with parallel execution\n // ════��═══════════════════════════════════════════════════════\n const maxTokensPerBatch =\n this.observationConfig.maxTokensPerBatch ?? OBSERVATIONAL_MEMORY_DEFAULTS.observation.maxTokensPerBatch;\n const orderedThreadIds = threadOrder.filter(tid => threadsWithMessages.has(tid));\n\n // Chunk threads into batches based on token count\n const batches: Array<{ threadIds: string[]; threadMap: Map<string, MastraDBMessage[]> }> = [];\n let currentBatch: { threadIds: string[]; threadMap: Map<string, MastraDBMessage[]> } = {\n threadIds: [],\n threadMap: new Map(),\n };\n let currentBatchTokens = 0;\n\n for (const threadId of orderedThreadIds) {\n const msgs = threadsWithMessages.get(threadId)!;\n const threadTokens = threadTokenCounts.get(threadId) ?? 0;\n\n // If adding this thread would exceed the batch limit, start a new batch\n // (unless the current batch is empty - always include at least one thread)\n if (currentBatchTokens + threadTokens > maxTokensPerBatch && currentBatch.threadIds.length > 0) {\n batches.push(currentBatch);\n currentBatch = { threadIds: [], threadMap: new Map() };\n currentBatchTokens = 0;\n }\n\n currentBatch.threadIds.push(threadId);\n currentBatch.threadMap.set(threadId, msgs);\n currentBatchTokens += threadTokens;\n }\n\n // Don't forget the last batch\n if (currentBatch.threadIds.length > 0) {\n batches.push(currentBatch);\n }\n\n // Process batches in parallel\n const batchPromises = batches.map(async batch => {\n const batchResult = await this.callMultiThreadObserver(\n existingObservations,\n batch.threadMap,\n batch.threadIds,\n abortSignal,\n );\n return batchResult;\n });\n\n const batchResults = await Promise.all(batchPromises);\n\n // Merge all batch results into a single map and accumulate usage\n const multiThreadResults = new Map<\n string,\n {\n observations: string;\n currentTask?: string;\n suggestedContinuation?: string;\n }\n >();\n let totalBatchUsage = { inputTokens: 0, outputTokens: 0, totalTokens: 0 };\n for (const batchResult of batchResults) {\n for (const [threadId, result] of batchResult.results) {\n multiThreadResults.set(threadId, result);\n }\n // Accumulate usage from each batch\n if (batchResult.usage) {\n totalBatchUsage.inputTokens += batchResult.usage.inputTokens ?? 0;\n totalBatchUsage.outputTokens += batchResult.usage.outputTokens ?? 0;\n totalBatchUsage.totalTokens += batchResult.usage.totalTokens ?? 0;\n }\n }\n\n // Convert to the expected format for downstream processing\n const observationResults: Array<{\n threadId: string;\n threadMessages: MastraDBMessage[];\n result: {\n observations: string;\n currentTask?: string;\n suggestedContinuation?: string;\n };\n } | null> = [];\n\n for (const threadId of threadOrder) {\n const threadMessages = messagesByThread.get(threadId) ?? [];\n if (threadMessages.length === 0) continue;\n\n const result = multiThreadResults.get(threadId);\n if (!result) {\n continue;\n }\n\n // Debug: Log Observer output for this thread\n\n observationResults.push({\n threadId,\n threadMessages,\n result,\n });\n }\n\n // Combine results: wrap each thread's observations and append to existing\n let currentObservations = existingObservations;\n let cycleObservationTokens = 0; // Track total new observation tokens generated in this cycle\n\n for (const obsResult of observationResults) {\n if (!obsResult) continue;\n\n const { threadId, threadMessages, result } = obsResult;\n\n // Track tokens generated for this thread\n cycleObservationTokens += this.tokenCounter.countObservations(result.observations);\n\n // Wrap with thread tag and append (in thread order for consistency)\n const threadSection = await this.wrapWithThreadTag(threadId, result.observations);\n currentObservations = this.replaceOrAppendThreadSection(currentObservations, threadId, threadSection);\n\n // Update thread-specific metadata:\n // - lastObservedAt: ALWAYS update to track per-thread observation progress\n // - currentTask, suggestedResponse: only if present in result\n const threadLastObservedAt = this.getMaxMessageTimestamp(threadMessages);\n const thread = await this.storage.getThreadById({ threadId });\n if (thread) {\n const newMetadata = setThreadOMMetadata(thread.metadata, {\n lastObservedAt: threadLastObservedAt.toISOString(),\n ...(result.suggestedContinuation && { suggestedResponse: result.suggestedContinuation }),\n ...(result.currentTask && { currentTask: result.currentTask }),\n });\n await this.storage.updateThread({\n id: threadId,\n title: thread.title ?? '',\n metadata: newMetadata,\n });\n }\n\n // Emit debug event for observation complete (usage is for the entire batch, added to first thread only)\n const isFirstThread = observationResults.indexOf(obsResult) === 0;\n this.emitDebugEvent({\n type: 'observation_complete',\n timestamp: new Date(),\n threadId,\n resourceId,\n observations: threadSection,\n rawObserverOutput: result.observations,\n previousObservations: record.activeObservations,\n messages: threadMessages.map(m => ({\n role: m.role,\n content: typeof m.content === 'string' ? m.content : JSON.stringify(m.content),\n })),\n // Add batch usage to first thread's event only (to avoid double-counting)\n usage: isFirstThread && totalBatchUsage.totalTokens > 0 ? totalBatchUsage : undefined,\n });\n }\n\n // After ALL threads observed, update the record with final observations\n let totalTokenCount = this.tokenCounter.countObservations(currentObservations);\n\n // Compute global lastObservedAt as a \"high water mark\" across all threads\n // Note: Per-thread cursors (stored in ThreadOMMetadata.lastObservedAt) are the authoritative source\n // for determining which messages each thread has observed. This global value is used for:\n // - Quick concurrency checks (has any observation happened since we started?)\n // - Thread-scoped observation (non-resource scope)\n const observedMessages = observationResults\n .filter((r): r is NonNullable<typeof r> => r !== null)\n .flatMap(r => r.threadMessages);\n const lastObservedAt = this.getMaxMessageTimestamp(observedMessages);\n\n // Collect message IDs being observed for the safeguard\n const newMessageIds = observedMessages.map(m => m.id);\n const existingIds = record.observedMessageIds ?? [];\n const allObservedIds = [...new Set([...existingIds, ...newMessageIds])];\n\n await this.storage.updateActiveObservations({\n id: record.id,\n observations: currentObservations,\n tokenCount: totalTokenCount,\n lastObservedAt,\n observedMessageIds: allObservedIds,\n });\n\n // ════════════════════════════════════════════════════════════════════════\n // INSERT END MARKERS into each thread's last message\n // This completes the observation boundary (start markers were inserted above)\n // ════════════════════════════════════════════════════════════════════════\n for (const obsResult of observationResults) {\n if (!obsResult) continue;\n const { threadId, threadMessages, result } = obsResult;\n const lastMessage = threadMessages[threadMessages.length - 1];\n if (lastMessage?.id) {\n const tokensObserved = threadTokensToObserve.get(threadId) ?? this.tokenCounter.countMessages(threadMessages);\n const endMarker = this.createObservationEndMarker({\n cycleId,\n operationType: 'observation',\n startedAt: observationStartedAt,\n tokensObserved,\n observationTokens: cycleObservationTokens,\n observations: result.observations,\n currentTask: result.currentTask,\n suggestedResponse: result.suggestedContinuation,\n recordId: record.id,\n threadId,\n });\n\n // Stream the end marker to the UI first - this adds the part via stream handler\n if (writer) {\n await writer.custom(endMarker).catch(() => {\n // Ignore errors from streaming - observation should continue\n });\n }\n\n // Then seal the message (skipPush since writer.custom already added the part)\n }\n }\n\n // Check for reflection AFTER all threads are observed\n await this.maybeReflect(\n { ...record, activeObservations: currentObservations },\n totalTokenCount,\n currentThreadId,\n writer,\n abortSignal,\n );\n } catch (error) {\n // Insert FAILED markers into each thread's last message on error\n for (const [threadId, msgs] of threadsWithMessages) {\n const lastMessage = msgs[msgs.length - 1];\n if (lastMessage?.id) {\n const tokensAttempted = threadTokensToObserve.get(threadId) ?? 0;\n const failedMarker = this.createObservationFailedMarker({\n cycleId,\n operationType: 'observation',\n startedAt: observationStartedAt,\n tokensAttempted,\n error: error instanceof Error ? error.message : String(error),\n recordId: record.id,\n threadId,\n });\n\n // Stream the failed marker to the UI first - this adds the part via stream handler\n if (writer) {\n await writer.custom(failedMarker).catch(() => {\n // Ignore errors from streaming - observation should continue\n });\n }\n\n // Then seal the message (skipPush since writer.custom already added the part)\n }\n }\n // If aborted, re-throw so the main agent loop can handle cancellation\n if (abortSignal?.aborted) {\n throw error;\n }\n // Log the error but don't re-throw - observation failure should not crash the agent\n console.error(`[OM] Resource-scoped observation failed:`, error instanceof Error ? error.message : String(error));\n } finally {\n await this.storage.setObservingFlag(record.id, false);\n }\n }\n\n /**\n * Check if reflection needed and trigger if so.\n * SIMPLIFIED: Always uses synchronous reflection (async buffering disabled).\n */\n private async maybeReflect(\n record: ObservationalMemoryRecord,\n observationTokens: number,\n _threadId?: string,\n writer?: ProcessorStreamWriter,\n abortSignal?: AbortSignal,\n ): Promise<void> {\n if (!this.shouldReflect(observationTokens)) {\n return;\n }\n\n // ═══════════════════════════════════════════════════════════\n // LOCKING: Check if reflection is already in progress\n // ════════════════════════════════════════════════════════════\n if (record.isReflecting) {\n return;\n }\n\n const reflectThreshold = this.getMaxThreshold(this.reflectionConfig.observationTokens);\n\n // ════════════════════════════════════════════════════════════\n // SYNC PATH: Do synchronous reflection (blocking)\n // ════════════════════════════════════════════════════════════\n await this.storage.setReflectingFlag(record.id, true);\n\n // Generate unique cycle ID for this reflection\n const cycleId = crypto.randomUUID();\n const startedAt = new Date().toISOString();\n const threadId = _threadId ?? 'unknown';\n\n // Stream START marker for reflection\n if (writer) {\n const startMarker = this.createObservationStartMarker({\n cycleId,\n operationType: 'reflection',\n tokensToObserve: observationTokens,\n recordId: record.id,\n threadId,\n threadIds: [threadId],\n });\n await writer.custom(startMarker).catch(() => {});\n }\n\n // Emit reflection_triggered debug event\n this.emitDebugEvent({\n type: 'reflection_triggered',\n timestamp: new Date(),\n threadId,\n resourceId: record.resourceId ?? '',\n inputTokens: observationTokens,\n activeObservationsLength: record.activeObservations?.length ?? 0,\n });\n\n // Create mutable stream context for retry tracking\n const streamContext = writer\n ? {\n writer,\n cycleId,\n startedAt,\n recordId: record.id,\n threadId,\n }\n : undefined;\n\n try {\n const reflectResult = await this.callReflector(\n record.activeObservations,\n undefined,\n streamContext,\n reflectThreshold,\n abortSignal,\n );\n const reflectionTokenCount = this.tokenCounter.countObservations(reflectResult.observations);\n\n await this.storage.createReflectionGeneration({\n currentRecord: record,\n reflection: reflectResult.observations,\n tokenCount: reflectionTokenCount,\n });\n\n // Stream END marker for reflection (use streamContext values which may have been updated during retry)\n if (writer && streamContext) {\n const endMarker = this.createObservationEndMarker({\n cycleId: streamContext.cycleId,\n operationType: 'reflection',\n startedAt: streamContext.startedAt,\n tokensObserved: observationTokens,\n observationTokens: reflectionTokenCount,\n observations: reflectResult.observations,\n recordId: record.id,\n threadId,\n });\n await writer.custom(endMarker).catch(() => {});\n }\n\n // Emit reflection_complete debug event with usage\n this.emitDebugEvent({\n type: 'reflection_complete',\n timestamp: new Date(),\n threadId,\n resourceId: record.resourceId ?? '',\n inputTokens: observationTokens,\n outputTokens: reflectionTokenCount,\n observations: reflectResult.observations,\n usage: reflectResult.usage,\n });\n } catch (error) {\n // Stream FAILED marker for reflection (use streamContext values which may have been updated during retry)\n if (writer && streamContext) {\n const failedMarker = this.createObservationFailedMarker({\n cycleId: streamContext.cycleId,\n operationType: 'reflection',\n startedAt: streamContext.startedAt,\n tokensAttempted: observationTokens,\n error: error instanceof Error ? error.message : String(error),\n recordId: record.id,\n threadId,\n });\n await writer.custom(failedMarker).catch(() => {});\n }\n // If aborted, re-throw so the main agent loop can handle cancellation\n if (abortSignal?.aborted) {\n throw error;\n }\n // Log the error but don't re-throw - reflection failure should not crash the agent\n console.error(`[OM] Reflection failed:`, error instanceof Error ? error.message : String(error));\n } finally {\n await this.storage.setReflectingFlag(record.id, false);\n }\n }\n\n /**\n * Manually trigger observation.\n */\n async observe(threadId: string, resourceId?: string, _prompt?: string): Promise<void> {\n const lockKey = this.getLockKey(threadId, resourceId);\n\n await this.withLock(lockKey, async () => {\n // Re-fetch record inside lock to get latest state\n const freshRecord = await this.getOrCreateRecord(threadId, resourceId);\n\n if (this.scope === 'resource' && resourceId) {\n // Resource scope: observe all threads with unobserved messages\n await this.doResourceScopedObservation(\n freshRecord,\n threadId,\n resourceId,\n [], // no in-flight messages — everything is already in the DB\n );\n } else {\n // Thread scope: observe unobserved messages for this thread\n const unobservedMessages = await this.loadUnobservedMessages(\n threadId,\n resourceId,\n freshRecord.lastObservedAt ? new Date(freshRecord.lastObservedAt) : undefined,\n );\n\n if (unobservedMessages.length === 0) {\n return;\n }\n\n await this.doSynchronousObservation(freshRecord, threadId, unobservedMessages);\n }\n });\n }\n\n /**\n * Manually trigger reflection with optional guidance prompt.\n *\n * @example\n * ```ts\n * // Trigger reflection with specific focus\n * await om.reflect(threadId, resourceId,\n * \"focus on the authentication implementation, only keep minimal details about UI styling\"\n * );\n * ```\n */\n async reflect(threadId: string, resourceId?: string, prompt?: string): Promise<void> {\n const record = await this.getOrCreateRecord(threadId, resourceId);\n\n if (!record.activeObservations) {\n return;\n }\n\n await this.storage.setReflectingFlag(record.id, true);\n\n try {\n const reflectThreshold = this.getMaxThreshold(this.reflectionConfig.observationTokens);\n const reflectResult = await this.callReflector(record.activeObservations, prompt, undefined, reflectThreshold);\n const reflectionTokenCount = this.tokenCounter.countObservations(reflectResult.observations);\n\n await this.storage.createReflectionGeneration({\n currentRecord: record,\n reflection: reflectResult.observations,\n tokenCount: reflectionTokenCount,\n });\n\n // Note: Thread metadata (currentTask, suggestedResponse) is preserved on each thread\n // and doesn't need to be updated during reflection - it was set during observation\n } finally {\n await this.storage.setReflectingFlag(record.id, false);\n }\n }\n\n /**\n * Get current observations for a thread/resource\n */\n async getObservations(threadId: string, resourceId?: string): Promise<string | undefined> {\n const ids = this.getStorageIds(threadId, resourceId);\n const record = await this.storage.getObservationalMemory(ids.threadId, ids.resourceId);\n return record?.activeObservations;\n }\n\n /**\n * Get current record for a thread/resource\n */\n async getRecord(threadId: string, resourceId?: string): Promise<ObservationalMemoryRecord | null> {\n const ids = this.getStorageIds(threadId, resourceId);\n return this.storage.getObservationalMemory(ids.threadId, ids.resourceId);\n }\n\n /**\n * Get observation history (previous generations)\n */\n async getHistory(threadId: string, resourceId?: string, limit?: number): Promise<ObservationalMemoryRecord[]> {\n const ids = this.getStorageIds(threadId, resourceId);\n return this.storage.getObservationalMemoryHistory(ids.threadId, ids.resourceId, limit);\n }\n\n /**\n * Clear all memory for a specific thread/resource\n */\n async clear(threadId: string, resourceId?: string): Promise<void> {\n const ids = this.getStorageIds(threadId, resourceId);\n await this.storage.clearObservationalMemory(ids.threadId, ids.resourceId);\n }\n\n /**\n * Get the underlying storage adapter\n */\n getStorage(): MemoryStorage {\n return this.storage;\n }\n\n /**\n * Get the token counter\n */\n getTokenCounter(): TokenCounter {\n return this.tokenCounter;\n }\n\n /**\n * Get current observation configuration\n */\n getObservationConfig(): ResolvedObservationConfig {\n return this.observationConfig;\n }\n\n /**\n * Get current reflection configuration\n */\n getReflectionConfig(): ResolvedReflectionConfig {\n return this.reflectionConfig;\n }\n}\n"]}
|