@semiont/jobs 0.4.20 → 0.4.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -4
- package/dist/index.d.ts +42 -224
- package/dist/index.js +1525 -12487
- package/dist/index.js.map +1 -1
- package/dist/smelter-main.d.ts +2 -0
- package/dist/smelter-main.js +10063 -0
- package/dist/smelter-main.js.map +1 -0
- package/dist/worker-main.d.ts +2 -0
- package/dist/worker-main.js +1646 -0
- package/dist/worker-main.js.map +1 -0
- package/package.json +12 -2
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/workers/detection/motivation-prompts.ts","../src/workers/detection/motivation-parsers.ts","../../ontology/src/tag-schemas.ts","../src/workers/annotation-detection.ts","../src/workers/detection/entity-extractor.ts","../src/workers/generation/resource-generation.ts","../src/processors.ts","../src/worker-process.ts","../src/logger.ts","../src/worker-main.ts"],"names":["logger","response","validateAndCorrectOffsets"],"mappings":";;;;;;;;;;;;;;AAOO,IAAM,oBAAN,MAAwB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAU7B,OAAO,kBAAA,CACL,OAAA,EACA,YAAA,EACA,MACA,OAAA,EACQ;AACR,IAAA,IAAI,MAAA;AAEJ,IAAA,IAAI,YAAA,EAAc;AAEhB,MAAA,MAAM,YAAA,GAAe,IAAA,GAAO,CAAA,OAAA,EAAU,IAAI,CAAA,MAAA,CAAA,GAAW,EAAA;AACrD,MAAA,MAAM,kBAAkB,OAAA,GACpB;;AAAA,sBAAA,EAA6B,OAAO,CAAA,iCAAA,CAAA,GACpC,EAAA;AAEJ,MAAA,MAAA,GAAS,CAAA;;AAAA,EAEb,YAAY,CAAA,EAAG,YAAY,CAAA,EAAG,eAAe;;AAAA;AAAA;AAAA,EAI7C,OAAA,CAAQ,SAAA,CAAU,CAAA,EAAG,GAAI,CAAC;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;;AAAA;AAAA;AAAA;AAAA,CAAA,CAAA;AAAA,IAiBxB,CAAA,MAAO;AAEL,MAAA,MAAM,eAAe,IAAA,GACjB;;AAAA,YAAA,EAAmB,IAAI,CAAA,wBAAA,CAAA,GACvB,EAAA;AACJ,MAAA,MAAM,kBAAkB,OAAA,GACpB;AAAA,wBAAA,EAA6B,OAAO,CAAA,wBAAA,CAAA,GACpC;AAAA,+DAAA,CAAA;AAEJ,MAAA,MAAA,GAAS,CAAA;AAAA,+EAAA,EACkE,YAAY;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iDAAA,EAO1C,eAAe;;AAAA;AAAA;AAAA,EAIhE,OAAA,CAAQ,SAAA,CAAU,CAAA,EAAG,GAAI,CAAC;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;;AAAA;AAAA;AAAA;AAAA,CAAA,CAAA;AAAA,IAiBxB;AAEA,IAAA,OAAO,MAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,OAAO,oBAAA,CACL,OAAA,EACA,YAAA,EACA,OAAA,EACQ;AACR,IAAA,IAAI,MAAA;AAEJ,IAAA,IAAI,YAAA,EAAc;AAEhB,MAAA,MAAM,kBAAkB,OAAA,GACpB;;AAAA,sBAAA,EAA6B,OAAO,CAAA,mCAAA,CAAA,GACpC,EAAA;AAEJ,MAAA,MAAA,GAAS,CAAA;;AAAA,EAEb,YAAY,GAAG,eAAe;;AAAA;AAAA;AAAA,EAI9B,OAAA,CAAQ,SAAA,CAAU,CAAA,EAAG,GAAI,CAAC;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;;AAAA;AAAA;AAAA;AAAA,CAAA,CAAA;AAAA,IAgBxB,CAAA,MAAO;AAEL,MAAA,MAAM,kBAAkB,OAAA,GACpB;AAAA,wBAAA,EAA6B,OAAO,CAAA,0BAAA,CAAA,GACpC;AAAA,sDAAA,CAAA;AAEJ,MAAA,MAAA,GAAS,CAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kCAAA,EASqB,eAAe;;AAAA;AAAA;AAAA,EAIjD,OAAA,CAAQ,SAAA,CAAU,CAAA,EAAG,GAAI,CAAC;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;;AAAA;AAAA;AAAA;AAAA,CAAA,CAAA;AAAA,IAgBxB;AAEA,IAAA,OAAO,MAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,OAAO,qBAAA,CACL,OAAA,EACA,YAAA,EACA,MACA,OAAA,EACQ;AACR,IAAA,IAAI,MAAA;AAEJ,IAAA,IAAI,YAAA,EAAc;AAEhB,MAAA,MAAM,YAAA,GAAe,IAAA,GAAO,CAAA,OAAA,EAAU,IAAI,CAAA,MAAA,CAAA,GAAW,EAAA;AACrD,MAAA,MAAM,kBAAkB,OAAA,GACpB;;AAAA,sBAAA,EAA6B,OAAO,CAAA,oCAAA,CAAA,GACpC,EAAA;AAEJ,MAAA,MAAA,GAAS,CAAA;;AAAA,EAEb,YAAY,CAAA,EAAG,YAAY,CAAA,EAAG,eAAe;;AAAA;AAAA;AAAA,EAI7C,OAAA,CAAQ,SAAA,CAAU,CAAA,EAAG,GAAI,CAAC;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;;AAAA;AAAA;AAAA;AAAA,CAAA,CAAA;AAAA,IAiBxB,CAAA,MAAO;AAEL,MAAA,MAAM,eAAe,IAAA,GACjB;;AAAA,YAAA,EAAmB,IAAI,CAAA,2BAAA,CAAA,GACvB,EAAA;AACJ,MAAA,MAAM,kBAAkB,OAAA,GACpB;AAAA,wBAAA,EAA6B,OAAO,CAAA,2BAAA,CAAA,GACpC;AAAA,gEAAA,CAAA;AAEJ,MAAA,MAAA,GAAS,CAAA;AAAA,8EAAA,EACiE,YAAY;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oEAAA,EAOtB,eAAe;;AAAA;AAAA;AAAA,EAInF,OAAA,CAAQ,SAAA,CAAU,CAAA,EAAG,GAAI,CAAC;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;;AAAA;AAAA;AAAA;AAAA,CAAA,CAAA;AAAA,IAiBxB;AAEA,IAAA,OAAO,MAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,OAAO,eACL,OAAA,EACA,QAAA,EACA,YACA,iBAAA,EACA,YAAA,EACA,qBACA,gBAAA,EACQ;AAER,IAAA,MAAM,MAAA,GAAS,sCAAsC,UAAU,CAAA;;AAAA,QAAA,EAEzD,iBAAiB;AAAA,QAAA,EACjB,YAAY;;AAAA,gEAAA,EAE4C,QAAQ,CAAA;;AAAA,UAAA,EAE9D,QAAQ;AAAA,aAAA,EACL,mBAAmB;AAAA;AAAA,EAEhC,gBAAA,CAAiB,IAAI,CAAA,EAAA,KAAM,CAAA,EAAA,EAAK,EAAE,CAAA,CAAE,CAAA,CAAE,IAAA,CAAK,IAAI,CAAC;;AAAA;AAAA;AAAA,wBAAA,EAIxB,QAAQ,CAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA,EAQhC,OAAO;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA,CAAA,CAAA;AAkBL,IAAA,OAAO,MAAA;AAAA,EACT;AACF,CAAA;ACjTO,SAAS,wBAAwB,QAAA,EAA6B;AACnE,EAAA,IAAI,OAAA,GAAU,SAAS,IAAA,EAAK;AAG5B,EAAA,IAAI,OAAA,CAAQ,UAAA,CAAW,KAAK,CAAA,EAAG;AAC7B,IAAA,OAAA,GAAU,QAAQ,OAAA,CAAQ,qBAAA,EAAuB,EAAE,CAAA,CAAE,OAAA,CAAQ,cAAc,EAAE,CAAA;AAAA,EAC/E;AAGA,EAAA,IAAI;AACF,IAAA,MAAM,MAAA,GAAS,IAAA,CAAK,KAAA,CAAM,OAAO,CAAA;AACjC,IAAA,OAAO,KAAA,CAAM,OAAA,CAAQ,MAAM,CAAA,GAAI,SAAS,EAAC;AAAA,EAC3C,CAAA,CAAA,MAAQ;AAAA,EAER;AAMA,EAAA,MAAM,KAAA,GAAQ,OAAA,CAAQ,OAAA,CAAQ,GAAG,CAAA;AACjC,EAAA,IAAI,KAAA,KAAU,EAAA,EAAI,OAAO,EAAC;AAC1B,EAAA,MAAM,UAAA,GAAa,OAAA,CAAQ,WAAA,CAAY,GAAG,CAAA;AAC1C,EAAA,MAAM,GAAA,GAAM,UAAA,GAAa,KAAA,GAAQ,UAAA,GAAa,OAAA,CAAQ,MAAA;AAEtD,EAAA,MAAM,KAAA,GAAQ,OAAA,CAAQ,KAAA,CAAM,KAAA,GAAQ,GAAG,GAAG,CAAA;AAC1C,EAAA,MAAM,UAAqB,EAAC;AAC5B,EAAA,IAAI,KAAA,GAAQ,CAAA;AACZ,EAAA,IAAI,QAAA,GAAW,EAAA;AACf,EAAA,IAAI,QAAA,GAAW,KAAA;AACf,EAAA,IAAI,MAAA,GAAS,KAAA;AAEb,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,KAAA,CAAM,QAAQ,CAAA,EAAA,EAAK;AACrC,IAAA,MAAM,EAAA,GAAK,MAAM,CAAC,CAAA;AAClB,IAAA,IAAI,MAAA,EAAQ;AAAE,MAAA,MAAA,GAAS,KAAA;AAAO,MAAA;AAAA,IAAU;AACxC,IAAA,IAAI,OAAO,IAAA,EAAM;AAAE,MAAA,MAAA,GAAS,IAAA;AAAM,MAAA;AAAA,IAAU;AAC5C,IAAA,IAAI,OAAO,GAAA,EAAK;AAAE,MAAA,QAAA,GAAW,CAAC,QAAA;AAAU,MAAA;AAAA,IAAU;AAClD,IAAA,IAAI,QAAA,EAAU;AACd,IAAA,IAAI,OAAO,GAAA,EAAK;AACd,MAAA,IAAI,KAAA,KAAU,GAAG,QAAA,GAAW,CAAA;AAC5B,MAAA,KAAA,EAAA;AAAA,IACF,CAAA,MAAA,IAAW,OAAO,GAAA,EAAK;AACrB,MAAA,KAAA,EAAA;AACA,MAAA,IAAI,KAAA,KAAU,CAAA,IAAK,QAAA,KAAa,EAAA,EAAI;AAClC,QAAA,IAAI;AACF,UAAA,OAAA,CAAQ,IAAA,CAAK,KAAK,KAAA,CAAM,KAAA,CAAM,MAAM,QAAA,EAAU,CAAA,GAAI,CAAC,CAAC,CAAC,CAAA;AAAA,QACvD,CAAA,CAAA,MAAQ;AAAA,QAER;AACA,QAAA,QAAA,GAAW,EAAA;AAAA,MACb;AAAA,IACF;AAAA,EACF;AAEA,EAAA,OAAO,OAAA;AACT;AAiDO,IAAM,oBAAN,MAAwB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQ7B,OAAO,aAAA,CAAc,QAAA,EAAkB,OAAA,EAAiC;AACtE,IAAA,IAAI;AACF,MAAA,MAAM,MAAA,GAAS,wBAAwB,QAAQ,CAAA;AAG/C,MAAA,MAAM,QAAQ,MAAA,CAAO,MAAA;AAAA,QAAO,CAAC,CAAA,KAC3B,CAAC,CAAC,CAAA,IAAK,OAAO,CAAA,KAAM,QAAA,IACpB,OAAQ,CAAA,CAAU,KAAA,KAAU,QAAA,IAC5B,OAAQ,CAAA,CAAU,KAAA,KAAU,QAAA,IAC5B,OAAQ,CAAA,CAAU,GAAA,KAAQ,QAAA,IAC1B,OAAQ,CAAA,CAAU,OAAA,KAAY,QAAA,IAC7B,CAAA,CAAU,OAAA,CAAQ,IAAA,EAAK,CAAE,MAAA,GAAS;AAAA,OACrC;AAEA,MAAA,OAAA,CAAQ,IAAI,CAAA,2BAAA,EAA8B,KAAA,CAAM,MAAM,CAAA,qBAAA,EAAwB,MAAA,CAAO,MAAM,CAAA,MAAA,CAAQ,CAAA;AAInG,MAAA,MAAM,oBAAoC,EAAC;AAE3C,MAAA,KAAA,MAAW,WAAW,KAAA,EAAO;AAC3B,QAAA,IAAI;AACF,UAAA,MAAM,SAAA,GAAY,0BAA0B,OAAA,EAAS,OAAA,CAAQ,OAAO,OAAA,CAAQ,GAAA,EAAK,QAAQ,KAAK,CAAA;AAC9F,UAAA,iBAAA,CAAkB,IAAA,CAAK;AAAA,YACrB,GAAG,OAAA;AAAA,YACH,OAAO,SAAA,CAAU,KAAA;AAAA,YACjB,KAAK,SAAA,CAAU,GAAA;AAAA,YACf,QAAQ,SAAA,CAAU,MAAA;AAAA,YAClB,QAAQ,SAAA,CAAU;AAAA,WACnB,CAAA;AAAA,QACH,SAAS,KAAA,EAAO;AACd,UAAA,OAAA,CAAQ,IAAA,CAAK,CAAA,8CAAA,EAAiD,OAAA,CAAQ,KAAK,MAAM,KAAK,CAAA;AAAA,QAExF;AAAA,MACF;AAEA,MAAA,OAAO,iBAAA;AAAA,IACT,SAAS,KAAA,EAAO;AACd,MAAA,OAAA,CAAQ,KAAA,CAAM,4DAA4D,KAAK,CAAA;AAC/E,MAAA,OAAO,EAAC;AAAA,IACV;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,OAAO,eAAA,CAAgB,QAAA,EAAkB,OAAA,EAAmC;AAC1E,IAAA,IAAI;AACF,MAAA,MAAM,MAAA,GAAS,wBAAwB,QAAQ,CAAA;AAG/C,MAAA,MAAM,aAAa,MAAA,CAAO,MAAA;AAAA,QAAO,CAAC,CAAA,KAChC,CAAC,CAAC,CAAA,IAAK,OAAO,MAAM,QAAA,IACpB,OAAQ,CAAA,CAAU,KAAA,KAAU,YAC5B,OAAQ,CAAA,CAAU,UAAU,QAAA,IAC5B,OAAQ,EAAU,GAAA,KAAQ;AAAA,OAC5B;AAIA,MAAA,MAAM,sBAAwC,EAAC;AAE/C,MAAA,KAAA,MAAW,aAAa,UAAA,EAAY;AAClC,QAAA,IAAI;AACF,UAAA,MAAM,SAAA,GAAY,0BAA0B,OAAA,EAAS,SAAA,CAAU,OAAO,SAAA,CAAU,GAAA,EAAK,UAAU,KAAK,CAAA;AACpG,UAAA,mBAAA,CAAoB,IAAA,CAAK;AAAA,YACvB,GAAG,SAAA;AAAA,YACH,OAAO,SAAA,CAAU,KAAA;AAAA,YACjB,KAAK,SAAA,CAAU,GAAA;AAAA,YACf,QAAQ,SAAA,CAAU,MAAA;AAAA,YAClB,QAAQ,SAAA,CAAU;AAAA,WACnB,CAAA;AAAA,QACH,SAAS,KAAA,EAAO;AACd,UAAA,OAAA,CAAQ,IAAA,CAAK,CAAA,gDAAA,EAAmD,SAAA,CAAU,KAAK,MAAM,KAAK,CAAA;AAAA,QAE5F;AAAA,MACF;AAEA,MAAA,OAAO,mBAAA;AAAA,IACT,SAAS,KAAA,EAAO;AACd,MAAA,OAAA,CAAQ,KAAA,CAAM,8DAA8D,KAAK,CAAA;AACjF,MAAA,OAAA,CAAQ,KAAA,CAAM,iBAAiB,QAAQ,CAAA;AACvC,MAAA,OAAO,EAAC;AAAA,IACV;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,OAAO,gBAAA,CAAiB,QAAA,EAAkB,OAAA,EAAoC;AAC5E,IAAA,IAAI;AACF,MAAA,MAAM,MAAA,GAAS,wBAAwB,QAAQ,CAAA;AAG/C,MAAA,MAAM,cAAc,MAAA,CAAO,MAAA;AAAA,QAAO,CAAC,MACjC,CAAC,CAAC,KAAK,OAAO,CAAA,KAAM,QAAA,IACpB,OAAQ,CAAA,CAAU,KAAA,KAAU,YAC5B,OAAQ,CAAA,CAAU,UAAU,QAAA,IAC5B,OAAQ,EAAU,GAAA,KAAQ,QAAA,IAC1B,OAAQ,CAAA,CAAU,UAAA,KAAe;AAAA,OACnC;AAIA,MAAA,MAAM,uBAA0C,EAAC;AAEjD,MAAA,KAAA,MAAW,cAAc,WAAA,EAAa;AACpC,QAAA,IAAI;AACF,UAAA,MAAM,SAAA,GAAY,0BAA0B,OAAA,EAAS,UAAA,CAAW,OAAO,UAAA,CAAW,GAAA,EAAK,WAAW,KAAK,CAAA;AACvG,UAAA,oBAAA,CAAqB,IAAA,CAAK;AAAA,YACxB,GAAG,UAAA;AAAA,YACH,OAAO,SAAA,CAAU,KAAA;AAAA,YACjB,KAAK,SAAA,CAAU,GAAA;AAAA,YACf,QAAQ,SAAA,CAAU,MAAA;AAAA,YAClB,QAAQ,SAAA,CAAU;AAAA,WACnB,CAAA;AAAA,QACH,SAAS,KAAA,EAAO;AACd,UAAA,OAAA,CAAQ,IAAA,CAAK,CAAA,iDAAA,EAAoD,UAAA,CAAW,KAAK,MAAM,KAAK,CAAA;AAAA,QAE9F;AAAA,MACF;AAEA,MAAA,OAAO,oBAAA;AAAA,IACT,SAAS,KAAA,EAAO;AACd,MAAA,OAAA,CAAQ,KAAA,CAAM,+DAA+D,KAAK,CAAA;AAClF,MAAA,OAAA,CAAQ,KAAA,CAAM,iBAAiB,QAAQ,CAAA;AACvC,MAAA,OAAO,EAAC;AAAA,IACV;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,OAAO,UAAU,QAAA,EAAgD;AAC/D,IAAA,IAAI;AACF,MAAA,MAAM,MAAA,GAAS,wBAAwB,QAAQ,CAAA;AAG/C,MAAA,MAAM,QAAQ,MAAA,CAAO,MAAA;AAAA,QAAO,CAAC,CAAA,KAC3B,CAAC,CAAC,CAAA,IAAK,OAAO,CAAA,KAAM,QAAA,IACpB,OAAQ,CAAA,CAAU,KAAA,KAAU,QAAA,IAC5B,OAAQ,CAAA,CAAU,KAAA,KAAU,QAAA,IAC5B,OAAQ,CAAA,CAAU,GAAA,KAAQ,YACzB,CAAA,CAAU,KAAA,CAAM,IAAA,EAAK,CAAE,MAAA,GAAS;AAAA,OACnC;AAEA,MAAA,OAAA,CAAQ,IAAI,CAAA,2BAAA,EAA8B,KAAA,CAAM,MAAM,CAAA,iBAAA,EAAoB,MAAA,CAAO,MAAM,CAAA,MAAA,CAAQ,CAAA;AAE/F,MAAA,OAAO,KAAA;AAAA,IACT,SAAS,KAAA,EAAO;AACd,MAAA,OAAA,CAAQ,KAAA,CAAM,wDAAwD,KAAK,CAAA;AAC3E,MAAA,OAAO,EAAC;AAAA,IACV;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,OAAO,kBAAA,CACL,IAAA,EACA,OAAA,EACA,QAAA,EACY;AACZ,IAAA,MAAM,gBAA4B,EAAC;AAEnC,IAAA,KAAA,MAAW,OAAO,IAAA,EAAM;AACtB,MAAA,IAAI;AACF,QAAA,MAAM,SAAA,GAAY,0BAA0B,OAAA,EAAS,GAAA,CAAI,OAAO,GAAA,CAAI,GAAA,EAAK,IAAI,KAAK,CAAA;AAClF,QAAA,aAAA,CAAc,IAAA,CAAK;AAAA,UACjB,GAAG,GAAA;AAAA,UACH,QAAA;AAAA,UACA,OAAO,SAAA,CAAU,KAAA;AAAA,UACjB,KAAK,SAAA,CAAU,GAAA;AAAA,UACf,QAAQ,SAAA,CAAU,MAAA;AAAA,UAClB,QAAQ,SAAA,CAAU;AAAA,SACnB,CAAA;AAAA,MACH,SAAS,KAAA,EAAO;AACd,QAAA,OAAA,CAAQ,IAAA,CAAK,CAAA,uDAAA,EAA0D,QAAQ,CAAA,EAAA,CAAA,EAAM,KAAK,CAAA;AAAA,MAE5F;AAAA,IACF;AAEA,IAAA,OAAO,aAAA;AAAA,EACT;AACF,CAAA;;;AChUO,IAAM,WAAA,GAAyC;EACpD,YAAA,EAAc;IACZ,EAAA,EAAI,YAAA;IACJ,IAAA,EAAM,uBAAA;IACN,WAAA,EAAa,oEAAA;IACb,MAAA,EAAQ,OAAA;IACR,IAAA,EAAM;AACJ,MAAA;QACE,IAAA,EAAM,OAAA;QACN,WAAA,EAAa,8CAAA;QACb,QAAA,EAAU;AACR,UAAA,qCAAA;AACA,UAAA,6BAAA;AACA,UAAA;AACF;AACF,OAAA;AACA,MAAA;QACE,IAAA,EAAM,MAAA;QACN,WAAA,EAAa,+CAAA;QACb,QAAA,EAAU;AACR,UAAA,mBAAA;AACA,UAAA,6BAAA;AACA,UAAA;AACF;AACF,OAAA;AACA,MAAA;QACE,IAAA,EAAM,aAAA;QACN,WAAA,EAAa,4CAAA;QACb,QAAA,EAAU;AACR,UAAA,wCAAA;AACA,UAAA,sBAAA;AACA,UAAA;AACF;AACF,OAAA;AACA,MAAA;QACE,IAAA,EAAM,YAAA;QACN,WAAA,EAAa,iDAAA;QACb,QAAA,EAAU;AACR,UAAA,+BAAA;AACA,UAAA,6BAAA;AACA,UAAA;AACF;AACF;AACF;AACF,GAAA;EAEA,kBAAA,EAAoB;IAClB,EAAA,EAAI,kBAAA;IACJ,IAAA,EAAM,0BAAA;IACN,WAAA,EAAa,0EAAA;IACb,MAAA,EAAQ,YAAA;IACR,IAAA,EAAM;AACJ,MAAA;QACE,IAAA,EAAM,cAAA;QACN,WAAA,EAAa,4CAAA;QACb,QAAA,EAAU;AACR,UAAA,gCAAA;AACA,UAAA,wBAAA;AACA,UAAA;AACF;AACF,OAAA;AACA,MAAA;QACE,IAAA,EAAM,SAAA;QACN,WAAA,EAAa,oCAAA;QACb,QAAA,EAAU;AACR,UAAA,8BAAA;AACA,UAAA,yBAAA;AACA,UAAA;AACF;AACF,OAAA;AACA,MAAA;QACE,IAAA,EAAM,SAAA;QACN,WAAA,EAAa,2BAAA;QACb,QAAA,EAAU;AACR,UAAA,0BAAA;AACA,UAAA,oBAAA;AACA,UAAA;AACF;AACF,OAAA;AACA,MAAA;QACE,IAAA,EAAM,YAAA;QACN,WAAA,EAAa,4CAAA;QACb,QAAA,EAAU;AACR,UAAA,2BAAA;AACA,UAAA,4BAAA;AACA,UAAA;AACF;AACF;AACF;AACF,GAAA;EAEA,kBAAA,EAAoB;IAClB,EAAA,EAAI,kBAAA;IACJ,IAAA,EAAM,8BAAA;IACN,WAAA,EAAa,iFAAA;IACb,MAAA,EAAQ,SAAA;IACR,IAAA,EAAM;AACJ,MAAA;QACE,IAAA,EAAM,OAAA;QACN,WAAA,EAAa,8BAAA;QACb,QAAA,EAAU;AACR,UAAA,uBAAA;AACA,UAAA,yBAAA;AACA,UAAA;AACF;AACF,OAAA;AACA,MAAA;QACE,IAAA,EAAM,UAAA;QACN,WAAA,EAAa,oCAAA;QACb,QAAA,EAAU;AACR,UAAA,2BAAA;AACA,UAAA,qBAAA;AACA,UAAA;AACF;AACF,OAAA;AACA,MAAA;QACE,IAAA,EAAM,SAAA;QACN,WAAA,EAAa,wCAAA;QACb,QAAA,EAAU;AACR,UAAA,2CAAA;AACA,UAAA,oBAAA;AACA,UAAA;AACF;AACF,OAAA;AACA,MAAA;QACE,IAAA,EAAM,iBAAA;QACN,WAAA,EAAa,mCAAA;QACb,QAAA,EAAU;AACR,UAAA,0BAAA;AACA,UAAA,sBAAA;AACA,UAAA;AACF;AACF,OAAA;AACA,MAAA;QACE,IAAA,EAAM,UAAA;QACN,WAAA,EAAa,8BAAA;QACb,QAAA,EAAU;AACR,UAAA,iCAAA;AACA,UAAA,mCAAA;AACA,UAAA;AACF;AACF;AACF;AACF;AACF,CAAA;AAKO,SAAS,aAAa,QAAA,EAAoC;AAC/D,EAAA,OAAO,WAAA,CAAY,QAAQ,CAAA,IAAK,IAAA;AAClC;AA4BO,SAAS,iBAAA,CAAkB,UAAkB,YAAA,EAA0C;AAC5F,EAAA,MAAM,MAAA,GAAS,aAAa,QAAQ,CAAA;AACpC,EAAA,IAAI,CAAC,QAAQ,OAAO,IAAA;AACpB,EAAA,OAAO,MAAA,CAAO,KAAK,IAAA,CAAK,CAAA,QAAO,GAAA,CAAI,IAAA,KAAS,YAAY,CAAA,IAAK,IAAA;AAC/D;;;ACtLO,IAAM,sBAAN,MAA0B;AAAA;AAAA;AAAA;AAAA;AAAA,EAM/B,aAAa,YAAA,CAAa,cAAA,EAAgC,UAAA,EAAyC;AACjG,IAAA,MAAM,MAAA,GAAS,MAAM,cAAA,CAAe,UAAU,CAAA;AAC9C,IAAA,IAAI,CAAC,MAAA,EAAQ;AACX,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,oCAAA,EAAuC,UAAU,CAAA,CAAE,CAAA;AAAA,IACrE;AACA,IAAA,MAAM,SAAmB,EAAC;AAC1B,IAAA,WAAA,MAAiB,SAAS,MAAA,EAAQ;AAChC,MAAA,MAAA,CAAO,IAAA,CAAK,OAAO,QAAA,CAAS,KAAK,IAAI,KAAA,GAAQ,MAAA,CAAO,IAAA,CAAK,KAAK,CAAC,CAAA;AAAA,IACjE;AACA,IAAA,OAAO,MAAA,CAAO,MAAA,CAAO,MAAM,CAAA,CAAE,SAAS,OAAO,CAAA;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA,EAKA,aAAa,cAAA,CACX,OAAA,EACA,MAAA,EACA,YAAA,EACA,MACA,OAAA,EACyB;AACzB,IAAA,MAAM,SAAS,iBAAA,CAAkB,kBAAA,CAAmB,OAAA,EAAS,YAAA,EAAc,MAAM,OAAO,CAAA;AACxF,IAAA,MAAM,WAAW,MAAM,MAAA,CAAO,YAAA,CAAa,MAAA,EAAQ,KAAM,GAAG,CAAA;AAC5D,IAAA,OAAO,iBAAA,CAAkB,aAAA,CAAc,QAAA,EAAU,OAAO,CAAA;AAAA,EAC1D;AAAA;AAAA;AAAA;AAAA,EAKA,aAAa,gBAAA,CACX,OAAA,EACA,MAAA,EACA,cACA,OAAA,EAC2B;AAC3B,IAAA,MAAM,MAAA,GAAS,iBAAA,CAAkB,oBAAA,CAAqB,OAAA,EAAS,cAAc,OAAO,CAAA;AACpF,IAAA,MAAM,WAAW,MAAM,MAAA,CAAO,YAAA,CAAa,MAAA,EAAQ,KAAM,GAAG,CAAA;AAC5D,IAAA,OAAO,iBAAA,CAAkB,eAAA,CAAgB,QAAA,EAAU,OAAO,CAAA;AAAA,EAC5D;AAAA;AAAA;AAAA;AAAA,EAKA,aAAa,iBAAA,CACX,OAAA,EACA,MAAA,EACA,YAAA,EACA,MACA,OAAA,EAC4B;AAC5B,IAAA,MAAM,SAAS,iBAAA,CAAkB,qBAAA,CAAsB,OAAA,EAAS,YAAA,EAAc,MAAM,OAAO,CAAA;AAC3F,IAAA,MAAM,WAAW,MAAM,MAAA,CAAO,YAAA,CAAa,MAAA,EAAQ,KAAM,GAAG,CAAA;AAC5D,IAAA,OAAO,iBAAA,CAAkB,gBAAA,CAAiB,QAAA,EAAU,OAAO,CAAA;AAAA,EAC7D;AAAA;AAAA;AAAA;AAAA,EAKA,aAAa,UAAA,CACX,OAAA,EACA,MAAA,EACA,UACA,QAAA,EACqB;AACrB,IAAA,MAAM,MAAA,GAAS,aAAa,QAAQ,CAAA;AACpC,IAAA,IAAI,CAAC,MAAA,EAAQ;AACX,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,oBAAA,EAAuB,QAAQ,CAAA,CAAE,CAAA;AAAA,IACnD;AAEA,IAAA,MAAM,YAAA,GAAe,iBAAA,CAAkB,QAAA,EAAU,QAAQ,CAAA;AACzD,IAAA,IAAI,CAAC,YAAA,EAAc;AACjB,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,kBAAA,EAAqB,QAAQ,CAAA,aAAA,EAAgB,QAAQ,CAAA,CAAE,CAAA;AAAA,IACzE;AAEA,IAAA,MAAM,SAAS,iBAAA,CAAkB,cAAA;AAAA,MAC/B,OAAA;AAAA,MACA,QAAA;AAAA,MACA,MAAA,CAAO,IAAA;AAAA,MACP,MAAA,CAAO,WAAA;AAAA,MACP,MAAA,CAAO,MAAA;AAAA,MACP,YAAA,CAAa,WAAA;AAAA,MACb,YAAA,CAAa;AAAA,KACf;AAEA,IAAA,MAAM,WAAW,MAAM,MAAA,CAAO,YAAA,CAAa,MAAA,EAAQ,KAAM,GAAG,CAAA;AAC5D,IAAA,MAAM,UAAA,GAAa,iBAAA,CAAkB,SAAA,CAAU,QAAQ,CAAA;AACvD,IAAA,OAAO,iBAAA,CAAkB,kBAAA,CAAmB,UAAA,EAAY,OAAA,EAAS,QAAQ,CAAA;AAAA,EAC3E;AACF,CAAA;;;AC/FA,eAAsB,gBACpB,KAAA,EACA,WAAA,EACA,MAAA,EACA,4BAAA,GAAwC,OACxCA,OAAAA,EAC4B;AAG5B,EAAA,MAAM,sBAAA,GAAyB,WAAA,CAAY,GAAA,CAAI,CAAA,EAAA,KAAM;AACnD,IAAA,IAAI,OAAO,OAAO,QAAA,EAAU;AAC1B,MAAA,OAAO,EAAA;AAAA,IACT;AACA,IAAA,OAAO,EAAA,CAAG,YAAY,EAAA,CAAG,QAAA,CAAS,SAAS,CAAA,GACvC,CAAA,EAAG,GAAG,IAAI,CAAA,YAAA,EAAe,GAAG,QAAA,CAAS,KAAA,CAAM,GAAG,CAAC,CAAA,CAAE,KAAK,IAAI,CAAC,MAC3D,EAAA,CAAG,IAAA;AAAA,EACT,CAAC,CAAA,CAAE,IAAA,CAAK,IAAI,CAAA;AAMZ,EAAA,MAAM,+BAA+B,4BAAA,GACjC;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA,CAAA,GAoBA;AAAA;AAAA,CAAA;AAIJ,EAAA,MAAM,MAAA,GAAS,2EAA2E,sBAAsB,CAAA;AAAA,EAChH,4BAA4B;AAAA;AAAA;AAAA,EAG5B,KAAK;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;;AAAA;AAAA,2NAAA,CAAA;AAiBL,EAAA,MAAM,QAAA,GAAW,MAAM,MAAA,CAAO,wBAAA;AAAA,IAC5B,MAAA;AAAA,IACA,GAAA;AAAA;AAAA,IACA;AAAA;AAAA,GACF;AAGA,EAAA,IAAI;AAEF,IAAA,IAAI,OAAA,GAAU,QAAA,CAAS,IAAA,CAAK,IAAA,EAAK;AACjC,IAAA,IAAI,OAAA,CAAQ,UAAA,CAAW,KAAK,CAAA,EAAG;AAC7B,MAAA,OAAA,GAAU,QAAQ,OAAA,CAAQ,kBAAA,EAAoB,EAAE,CAAA,CAAE,OAAA,CAAQ,WAAW,EAAE,CAAA;AAAA,IACzE;AAEA,IAAA,MAAM,QAAA,GAAW,IAAA,CAAK,KAAA,CAAM,OAAO,CAAA;AACnC,IAAAA,SAAQ,KAAA,CAAM,kCAAA,EAAoC,EAAE,KAAA,EAAO,QAAA,CAAS,QAAQ,CAAA;AAG5E,IAAA,IAAI,QAAA,CAAS,eAAe,YAAA,EAAc;AACxC,MAAA,MAAM,QAAA,GAAW,CAAA,6BAAA,EAAgC,QAAA,CAAS,MAAM,CAAA,yFAAA,CAAA;AAChE,MAAAA,OAAAA,EAAQ,MAAM,QAAQ,CAAA;AACtB,MAAA,MAAM,IAAI,MAAM,QAAQ,CAAA;AAAA,IAC1B;AAGA,IAAA,OAAO,QAAA,CAAS,GAAA,CAAI,CAAC,MAAA,EAAa,GAAA,KAAgB;AAChD,MAAA,IAAI,cAAc,MAAA,CAAO,WAAA;AACzB,MAAA,IAAI,YAAY,MAAA,CAAO,SAAA;AAEvB,MAAAA,OAAAA,EAAQ,MAAM,mBAAA,EAAqB;AAAA,QACjC,OAAO,GAAA,GAAM,CAAA;AAAA,QACb,OAAO,QAAA,CAAS,MAAA;AAAA,QAChB,MAAM,MAAA,CAAO,UAAA;AAAA,QACb,MAAM,MAAA,CAAO,KAAA;AAAA,QACb,aAAA,EAAe,CAAA,CAAA,EAAI,WAAW,CAAA,CAAA,EAAI,SAAS,CAAA,CAAA;AAAA,OAC5C,CAAA;AAiBD,MAAA,MAAM,aAAA,GAAgB,KAAA,CAAM,SAAA,CAAU,WAAA,EAAa,SAAS,CAAA;AAC5D,MAAA,IAAI,YAAA;AAEJ,MAAA,IAAI,aAAA,KAAkB,OAAO,KAAA,EAAO;AAClC,QAAA,YAAA,GAAe,WAAA;AACf,QAAAA,OAAAA,EAAQ,MAAM,iBAAA,EAAmB;AAAA,UAC/B,MAAM,MAAA,CAAO,KAAA;AAAA,UACb,YAAY,MAAA,CAAO,UAAA;AAAA,UACnB;AAAA,SACD,CAAA;AAAA,MACH,CAAA,MAAO;AAGL,QAAAA,OAAAA,EAAQ,MAAM,kDAAA,EAA+C;AAAA,UAC3D,UAAU,MAAA,CAAO,KAAA;AAAA,UACjB,UAAA,EAAY,CAAA,CAAA,EAAI,WAAW,CAAA,CAAA,EAAI,SAAS,CAAA,CAAA,CAAA;AAAA,UACxC,iBAAA,EAAmB;AAAA,SACpB,CAAA;AAID,QAAA,IAAI,eAAA,GAAkB,CAAA;AACtB,QAAA,IAAI,eAAA,GAAkB,CAAA,CAAA;AACtB,QAAA,IAAI,SAAA,GAAY,CAAA;AAChB,QAAA,OAAA,CAAQ,YAAY,KAAA,CAAM,OAAA,CAAQ,OAAO,KAAA,EAAO,SAAS,OAAO,CAAA,CAAA,EAAI;AAClE,UAAA,IAAI,eAAA,KAAoB,IAAI,eAAA,GAAkB,SAAA;AAC9C,UAAA,eAAA,EAAA;AACA,UAAA,SAAA,EAAA;AAAA,QACF;AAEA,QAAA,IAAI,oBAAoB,CAAA,EAAG;AACzB,UAAA,YAAA,GAAe,SAAA;AACf,UAAAA,OAAAA,EAAQ,MAAM,mDAAA,EAAgD;AAAA,YAC5D,MAAM,MAAA,CAAO,KAAA;AAAA,YACb,YAAY,MAAA,CAAO,UAAA;AAAA,YACnB,UAAA,EAAY,CAAA,CAAA,EAAI,WAAW,CAAA,CAAA,EAAI,SAAS,CAAA,CAAA,CAAA;AAAA,YACxC,YAAA;AAAA,YACA,aAAA,EAAe,KAAA,CAAM,SAAA,CAAU,CAAA,EAAG,GAAG;AAAA,WACtC,CAAA;AACD,UAAA,OAAO,IAAA;AAAA,QACT;AAGA,QAAA,IAAI,eAAA,GAAkB,CAAA,CAAA;AACtB,QAAA,IAAI,MAAA,CAAO,MAAA,IAAU,MAAA,CAAO,MAAA,EAAQ;AAClC,UAAA,IAAI,CAAA,GAAI,CAAA;AACR,UAAA,OAAA,CAAQ,IAAI,KAAA,CAAM,OAAA,CAAQ,OAAO,KAAA,EAAO,CAAC,OAAO,CAAA,CAAA,EAAI;AAClD,YAAA,MAAM,eAAA,GAAkB,MAAM,SAAA,CAAU,IAAA,CAAK,IAAI,CAAA,EAAG,CAAA,GAAI,EAAE,CAAA,EAAG,CAAC,CAAA;AAC9D,YAAA,MAAM,kBAAkB,KAAA,CAAM,SAAA;AAAA,cAC5B,CAAA,GAAI,OAAO,KAAA,CAAM,MAAA;AAAA,cACjB,IAAA,CAAK,IAAI,KAAA,CAAM,MAAA,EAAQ,IAAI,MAAA,CAAO,KAAA,CAAM,SAAS,EAAE;AAAA,aACrD;AACA,YAAA,MAAM,cAAc,CAAC,MAAA,CAAO,UAAU,eAAA,CAAgB,QAAA,CAAS,OAAO,MAAM,CAAA;AAC5E,YAAA,MAAM,cAAc,CAAC,MAAA,CAAO,UAAU,eAAA,CAAgB,UAAA,CAAW,OAAO,MAAM,CAAA;AAC9E,YAAA,IAAI,eAAe,WAAA,EAAa;AAC9B,cAAA,eAAA,GAAkB,CAAA;AAClB,cAAA;AAAA,YACF;AACA,YAAA,CAAA,EAAA;AAAA,UACF;AAAA,QACF;AAEA,QAAA,IAAI,oBAAoB,CAAA,CAAA,EAAI;AAC1B,UAAA,YAAA,GAAe,mBAAA;AACf,UAAA,WAAA,GAAc,eAAA;AACd,UAAA,SAAA,GAAY,eAAA,GAAkB,OAAO,KAAA,CAAM,MAAA;AAC3C,UAAAA,OAAAA,EAAQ,MAAM,iBAAA,EAAmB;AAAA,YAC/B,MAAM,MAAA,CAAO,KAAA;AAAA,YACb,YAAY,MAAA,CAAO,UAAA;AAAA,YACnB,YAAA;AAAA,YACA,UAAA,EAAY,kBAAkB,MAAA,CAAO;AAAA,WACtC,CAAA;AAAA,QACH,CAAA,MAAA,IAAW,oBAAoB,CAAA,EAAG;AAChC,UAAA,YAAA,GAAe,cAAA;AACf,UAAA,WAAA,GAAc,eAAA;AACd,UAAA,SAAA,GAAY,eAAA,GAAkB,OAAO,KAAA,CAAM,MAAA;AAC3C,UAAAA,OAAAA,EAAQ,MAAM,iBAAA,EAAmB;AAAA,YAC/B,MAAM,MAAA,CAAO,KAAA;AAAA,YACb,YAAY,MAAA,CAAO,UAAA;AAAA,YACnB,YAAA;AAAA,YACA,UAAA,EAAY,kBAAkB,MAAA,CAAO;AAAA,WACtC,CAAA;AAAA,QACH,CAAA,MAAO;AAGL,UAAA,YAAA,GAAe,eAAA;AACf,UAAA,WAAA,GAAc,eAAA;AACd,UAAA,SAAA,GAAY,eAAA,GAAkB,OAAO,KAAA,CAAM,MAAA;AAC3C,UAAAA,OAAAA,EAAQ,KAAK,sEAAA,EAAmE;AAAA,YAC9E,MAAM,MAAA,CAAO,KAAA;AAAA,YACb,YAAY,MAAA,CAAO,UAAA;AAAA,YACnB,YAAA;AAAA,YACA,eAAA;AAAA,YACA,YAAA,EAAc,eAAA;AAAA,YACd,YAAY,CAAA,CAAA,EAAI,MAAA,CAAO,WAAW,CAAA,CAAA,EAAI,OAAO,SAAS,CAAA,CAAA,CAAA;AAAA,YACtD,SAAA,EAAW,CAAC,CAAC,MAAA,CAAO,MAAA;AAAA,YACpB,SAAA,EAAW,CAAC,CAAC,MAAA,CAAO;AAAA,WACrB,CAAA;AAAA,QACH;AAAA,MACF;AAEA,MAAA,OAAO;AAAA,QACL,OAAO,MAAA,CAAO,KAAA;AAAA,QACd,YAAY,MAAA,CAAO,UAAA;AAAA,QACnB,WAAA;AAAA,QACA,SAAA;AAAA,QACA,QAAQ,MAAA,CAAO,MAAA;AAAA,QACf,QAAQ,MAAA,CAAO;AAAA,OACjB;AAAA,IACF,CAAC,CAAA,CAAE,MAAA,CAAO,CAAC,MAAA,KAA8D;AAEvE,MAAA,IAAI,WAAW,IAAA,EAAM;AACnB,QAAAA,OAAAA,EAAQ,MAAM,uBAAuB,CAAA;AACrC,QAAA,OAAO,KAAA;AAAA,MACT;AACA,MAAA,IAAI,MAAA,CAAO,WAAA,KAAgB,KAAA,CAAA,IAAa,MAAA,CAAO,cAAc,KAAA,CAAA,EAAW;AACtE,QAAAA,SAAQ,IAAA,CAAK,kCAAA,EAAoC,EAAE,IAAA,EAAM,MAAA,CAAO,OAAO,CAAA;AACvE,QAAA,OAAO,KAAA;AAAA,MACT;AACA,MAAA,IAAI,MAAA,CAAO,cAAc,CAAA,EAAG;AAC1B,QAAAA,OAAAA,EAAQ,KAAK,uCAAA,EAAyC;AAAA,UACpD,MAAM,MAAA,CAAO,KAAA;AAAA,UACb,aAAa,MAAA,CAAO;AAAA,SACrB,CAAA;AACD,QAAA,OAAO,KAAA;AAAA,MACT;AACA,MAAA,IAAI,MAAA,CAAO,SAAA,GAAY,KAAA,CAAM,MAAA,EAAQ;AACnC,QAAAA,OAAAA,EAAQ,KAAK,gDAAA,EAAkD;AAAA,UAC7D,MAAM,MAAA,CAAO,KAAA;AAAA,UACb,WAAW,MAAA,CAAO,SAAA;AAAA,UAClB,YAAY,KAAA,CAAM;AAAA,SACnB,CAAA;AACD,QAAA,OAAO,KAAA;AAAA,MACT;AAGA,MAAA,MAAM,gBAAgB,KAAA,CAAM,SAAA,CAAU,MAAA,CAAO,WAAA,EAAa,OAAO,SAAS,CAAA;AAC1E,MAAA,IAAI,aAAA,KAAkB,OAAO,KAAA,EAAO;AAClC,QAAAA,OAAAA,EAAQ,KAAK,kCAAA,EAAoC;AAAA,UAC/C,UAAU,MAAA,CAAO,KAAA;AAAA,UACjB,GAAA,EAAK,aAAA;AAAA,UACL,SAAS,CAAA,CAAA,EAAI,MAAA,CAAO,WAAW,CAAA,CAAA,EAAI,OAAO,SAAS,CAAA,CAAA;AAAA,SACpD,CAAA;AACD,QAAA,OAAO,KAAA;AAAA,MACT;AAEA,MAAAA,OAAAA,EAAQ,MAAM,iBAAA,EAAmB;AAAA,QAC/B,MAAM,MAAA,CAAO,KAAA;AAAA,QACb,SAAS,CAAA,CAAA,EAAI,MAAA,CAAO,WAAW,CAAA,CAAA,EAAI,OAAO,SAAS,CAAA,CAAA;AAAA,OACpD,CAAA;AACD,MAAA,OAAO,IAAA;AAAA,IACT,CAAC,CAAA;AAAA,EACH,SAAS,KAAA,EAAO;AAId,IAAA,OAAO,EAAC;AAAA,EACV;AACF;ACpSA,SAAS,gBAAgB,MAAA,EAAwB;AAC/C,EAAA,OAAO,oBAAA,CAAqB,MAAM,CAAA,IAAK,MAAA;AACzC;AAKA,eAAsB,yBAAA,CACpB,OACA,WAAA,EACA,MAAA,EACA,YACA,MAAA,EACA,OAAA,EACA,WAAA,EACA,SAAA,EACAA,OAAAA,EAC6C;AAe7C,EAAA,MAAM,mBAAmB,WAAA,IAAe,GAAA;AACxC,EAAA,MAAM,iBAAiB,SAAA,IAAa,GAAA;AAGpC,EAAA,MAAM,mBAAA,GAAsB,MAAA,IAAU,MAAA,KAAW,IAAA,GAC7C;;AAAA,wCAAA,EAA+C,eAAA,CAAgB,MAAM,CAAC,CAAA,CAAA,CAAA,GACtE,EAAA;AAGJ,EAAA,IAAI,iBAAA,GAAoB,EAAA;AACxB,EAAA,IAAI,OAAA,EAAS;AACX,IAAA,MAAM,QAAkB,EAAC;AACzB,IAAA,KAAA,CAAM,IAAA,CAAK,CAAA,yBAAA,EAA4B,OAAA,CAAQ,UAAA,CAAW,UAAU,CAAA,CAAE,CAAA;AACtE,IAAA,KAAA,CAAM,IAAA,CAAK,CAAA,mBAAA,EAAsB,OAAA,CAAQ,cAAA,CAAe,IAAI,CAAA,CAAE,CAAA;AAE9D,IAAA,MAAM,EAAE,UAAA,EAAY,IAAA,EAAK,GAAI,OAAA,CAAQ,UAAA;AACrC,IAAA,IAAI,UAAA,KAAe,YAAA,IAAgB,UAAA,KAAe,WAAA,EAAa;AAC7D,MAAA,MAAM,WAAW,KAAA,CAAM,OAAA,CAAQ,IAAI,CAAA,GAAI,IAAA,CAAK,CAAC,CAAA,GAAI,IAAA;AACjD,MAAA,IAAI,QAAA,IAAY,OAAA,IAAW,QAAA,IAAY,QAAA,CAAS,KAAA,EAAO;AACrD,QAAA,MAAM,KAAA,GAAQ,UAAA,KAAe,YAAA,GAAe,SAAA,GAAY,YAAA;AACxD,QAAA,KAAA,CAAM,KAAK,CAAA,EAAA,EAAK,KAAK,CAAA,EAAA,EAAK,QAAA,CAAS,KAAK,CAAA,CAAE,CAAA;AAAA,MAC5C;AAAA,IACF;AACA,IAAA,iBAAA,GAAoB;;AAAA;AAAA,EAA4B,KAAA,CAAM,IAAA,CAAK,IAAI,CAAC,CAAA,CAAA;AAAA,EAClE;AAGA,EAAA,IAAI,cAAA,GAAiB,EAAA;AACrB,EAAA,IAAI,SAAS,aAAA,EAAe;AAC1B,IAAA,MAAM,EAAE,MAAA,EAAQ,QAAA,EAAU,KAAA,KAAU,OAAA,CAAQ,aAAA;AAC5C,IAAA,cAAA,GAAiB;;AAAA;AAAA;AAAA,EAEnB,MAAA,GAAS,CAAA,GAAA,EAAM,MAAM,CAAA,CAAA,GAAK,EAAE;AAAA,GAAA,EACzB,QAAQ,CAAA;AAAA,EACX,KAAA,GAAQ,CAAA,EAAG,KAAK,CAAA,GAAA,CAAA,GAAQ,EAAE;AAAA;AAAA,CAAA;AAAA,EAG1B;AAGA,EAAA,IAAI,mBAAA,GAAsB,EAAA;AAC1B,EAAA,IAAI,SAAS,YAAA,EAAc;AACzB,IAAA,MAAM,KAAK,OAAA,CAAQ,YAAA;AACnB,IAAA,MAAM,WAAA,GAAc,EAAA,CAAG,WAAA,IAAe,EAAC;AACvC,IAAA,MAAM,OAAA,GAAU,EAAA,CAAG,OAAA,IAAW,EAAC;AAC/B,IAAA,MAAM,QAAkB,EAAC;AAEzB,IAAA,IAAI,WAAA,CAAY,SAAS,CAAA,EAAG;AAC1B,MAAA,MAAM,QAAA,GAAW,YACd,GAAA,CAAI,CAAA,CAAA,KAAK,GAAG,CAAA,CAAE,YAAY,CAAA,EAAG,CAAA,CAAE,WAAA,EAAa,MAAA,GAAS,KAAK,CAAA,CAAE,WAAA,CAAY,KAAK,IAAI,CAAC,MAAM,EAAE,CAAA,CAAE,CAAA,CAC5F,IAAA,CAAK,IAAI,CAAA;AACZ,MAAA,KAAA,CAAM,IAAA,CAAK,CAAA,uBAAA,EAA0B,QAAQ,CAAA,CAAE,CAAA;AAAA,IACjD;AAEA,IAAA,IAAI,EAAA,CAAG,YAAA,IAAgB,EAAA,CAAG,YAAA,GAAe,CAAA,EAAG;AAC1C,MAAA,MAAM,UAAA,GAAa,QAAQ,GAAA,CAAI,CAAA,CAAA,KAAK,EAAE,YAAY,CAAA,CAAE,KAAK,IAAI,CAAA;AAC7D,MAAA,KAAA,CAAM,KAAK,CAAA,4BAAA,EAA+B,EAAA,CAAG,YAAY,CAAA,eAAA,EAAkB,GAAG,YAAA,GAAe,CAAA,GAAI,GAAA,GAAM,EAAE,GAAG,UAAA,GAAa,CAAA,EAAA,EAAK,UAAU,CAAA,CAAA,GAAK,EAAE,CAAA,CAAE,CAAA;AAAA,IACnJ;AAEA,IAAA,IAAI,EAAA,CAAG,kBAAA,IAAsB,EAAA,CAAG,kBAAA,CAAmB,SAAS,CAAA,EAAG;AAC7D,MAAA,KAAA,CAAM,KAAK,CAAA,yCAAA,EAA4C,EAAA,CAAG,mBAAmB,IAAA,CAAK,IAAI,CAAC,CAAA,CAAE,CAAA;AAAA,IAC3F;AAEA,IAAA,IAAI,GAAG,2BAAA,EAA6B;AAClC,MAAA,KAAA,CAAM,IAAA,CAAK,CAAA,wBAAA,EAA2B,EAAA,CAAG,2BAA2B,CAAA,CAAE,CAAA;AAAA,IACxE;AAEA,IAAA,IAAI,KAAA,CAAM,SAAS,CAAA,EAAG;AACpB,MAAA,mBAAA,GAAsB;;AAAA;AAAA,EAAiC,KAAA,CAAM,IAAA,CAAK,IAAI,CAAC,CAAA,CAAA;AAAA,IACzE;AAAA,EACF;AAEA,EAAA,MAAM,iBAAA,GAAoB,cAAA,IAAkB,GAAA,GACxC,6EAAA,GACA,2CAAA;AAGJ,EAAA,MAAM,MAAA,GAAS,mDAAmD,KAAK,CAAA;AAAA,EACvE,WAAA,CAAY,SAAS,CAAA,GAAI,CAAA,6BAAA,EAAgC,YAAY,IAAA,CAAK,IAAI,CAAC,CAAA,CAAA,CAAA,GAAM,EAAE;AAAA,EACvF,UAAA,GAAa,CAAA,oBAAA,EAAuB,UAAU,CAAA,CAAA,GAAK,EAAE,CAAA,EAAG,iBAAiB,CAAA,EAAG,cAAc,CAAA,EAAG,mBAAmB,CAAA,EAAG,mBAAmB;;AAAA;AAAA;AAAA,wBAAA,EAI9G,cAAc,uBAAuB,iBAAiB;AAAA;AAAA;AAAA,gCAAA,CAAA;AAM9E,EAAA,MAAM,aAAA,GAAgB,CAACC,SAAAA,KAAyD;AAE9E,IAAA,IAAI,OAAA,GAAUA,UAAS,IAAA,EAAK;AAC5B,IAAA,IAAI,QAAQ,UAAA,CAAW,aAAa,KAAK,OAAA,CAAQ,UAAA,CAAW,OAAO,CAAA,EAAG;AACpE,MAAA,OAAA,GAAU,QAAQ,KAAA,CAAM,OAAA,CAAQ,OAAA,CAAQ,IAAI,IAAI,CAAC,CAAA;AACjD,MAAA,MAAM,QAAA,GAAW,OAAA,CAAQ,WAAA,CAAY,KAAK,CAAA;AAC1C,MAAA,IAAI,aAAa,EAAA,EAAI;AACnB,QAAA,OAAA,GAAU,OAAA,CAAQ,KAAA,CAAM,CAAA,EAAG,QAAQ,CAAA;AAAA,MACrC;AAAA,IACF,CAAA,MAAA,IAAW,OAAA,CAAQ,UAAA,CAAW,KAAK,CAAA,EAAG;AACpC,MAAA,OAAA,GAAU,OAAA,CAAQ,MAAM,CAAC,CAAA;AACzB,MAAA,MAAM,QAAA,GAAW,OAAA,CAAQ,WAAA,CAAY,KAAK,CAAA;AAC1C,MAAA,IAAI,aAAa,EAAA,EAAI;AACnB,QAAA,OAAA,GAAU,OAAA,CAAQ,KAAA,CAAM,CAAA,EAAG,QAAQ,CAAA;AAAA,MACrC;AAAA,IACF;AAEA,IAAA,OAAA,GAAU,QAAQ,IAAA,EAAK;AAGvB,IAAA,OAAO;AAAA,MACL,KAAA,EAAO,KAAA;AAAA,MACP;AAAA,KACF;AAAA,EACF,CAAA;AAOA,EAAA,MAAM,WAAW,MAAM,MAAA,CAAO,YAAA,CAAa,MAAA,EAAQ,gBAAgB,gBAAgB,CAAA;AAGnF,EAAA,MAAM,MAAA,GAAS,cAAc,QAAQ,CAAA;AAQrC,EAAA,OAAO,MAAA;AACT;ACtHA,SAAS,oBACP,UAAA,EACA,MAAA,EACA,SAAA,EACA,UAAA,EACA,OAMA,IAAA,EACA;AACA,EAAA,OAAO;AAAA,IACL,UAAA,EAAY,kCAAA;AAAA,IACZ,MAAA,EAAQ,YAAA;AAAA,IACR,MAAM,oBAAA,EAAqB;AAAA,IAC3B,UAAA;AAAA,IACA,OAAA,EAAS,WAAW,MAAM,CAAA;AAAA,IAC1B,SAAA;AAAA,IACA,OAAA,EAAA,iBAAS,IAAI,IAAA,EAAK,EAAE,WAAA,EAAY;AAAA,IAChC,MAAA,EAAQ;AAAA,MACN,IAAA,EAAM,kBAAA;AAAA,MACN,MAAA,EAAQ,UAAA;AAAA,MACR,QAAA,EAAU;AAAA,QACR,EAAE,MAAM,sBAAA,EAAiC,KAAA,EAAO,MAAM,KAAA,EAAO,GAAA,EAAK,MAAM,GAAA,EAAI;AAAA,QAC5E;AAAA,UACE,IAAA,EAAM,mBAAA;AAAA,UACN,OAAO,KAAA,CAAM,KAAA;AAAA,UACb,GAAI,KAAA,CAAM,MAAA,IAAU,EAAE,MAAA,EAAQ,MAAM,MAAA,EAAO;AAAA,UAC3C,GAAI,KAAA,CAAM,MAAA,IAAU,EAAE,MAAA,EAAQ,MAAM,MAAA;AAAO;AAC7C;AACF,KACF;AAAA,IACA,GAAI,IAAA,KAAS,MAAA,GAAY,EAAE,IAAA,KAAS;AAAC,GACvC;AACF;AAEA,eAAsB,oBACpB,OAAA,EACA,eAAA,EACA,MAAA,EACA,MAAA,EACA,WACA,UAAA,EACoD;AACpD,EAAA,UAAA,CAAW,EAAA,EAAI,uBAAuB,WAAW,CAAA;AACjD,EAAA,UAAA,CAAW,EAAA,EAAI,qBAAqB,WAAW,CAAA;AAE/C,EAAA,MAAM,UAAA,GAAa,MAAM,mBAAA,CAAoB,gBAAA;AAAA,IAC3C,OAAA;AAAA,IAAS,eAAA;AAAA,IAAiB,MAAA,CAAO,YAAA;AAAA,IAAc,MAAA,CAAO;AAAA,GACxD;AAEA,EAAA,UAAA,CAAW,EAAA,EAAI,CAAA,SAAA,EAAY,UAAA,CAAW,MAAM,mBAAmB,UAAU,CAAA;AAIzE,EAAA,MAAM,cAAc,UAAA,CAAW,GAAA;AAAA,IAAI,CAAC,MAClC,mBAAA,CAAoB,MAAA,CAAO,YAAY,MAAA,EAAQ,SAAA,EAAW,gBAAgB,CAAC;AAAA,GAC7E;AAEA,EAAA,UAAA,CAAW,GAAA,EAAK,CAAA,kBAAA,EAAqB,WAAA,CAAY,MAAM,eAAe,UAAU,CAAA;AAEhF,EAAA,OAAO;AAAA,IACL,WAAA;AAAA,IACA,QAAQ,EAAE,eAAA,EAAiB,WAAW,MAAA,EAAQ,iBAAA,EAAmB,YAAY,MAAA;AAAO,GACtF;AACF;AAEA,eAAsB,kBACpB,OAAA,EACA,eAAA,EACA,MAAA,EACA,MAAA,EACA,WACA,UAAA,EACkD;AAClD,EAAA,UAAA,CAAW,EAAA,EAAI,uBAAuB,WAAW,CAAA;AACjD,EAAA,UAAA,CAAW,EAAA,EAAI,qBAAqB,WAAW,CAAA;AAE/C,EAAA,MAAM,QAAA,GAAW,MAAM,mBAAA,CAAoB,cAAA;AAAA,IACzC,OAAA;AAAA,IAAS,eAAA;AAAA,IAAiB,MAAA,CAAO,YAAA;AAAA,IAAc,MAAA,CAAO,IAAA;AAAA,IAAM,MAAA,CAAO;AAAA,GACrE;AAEA,EAAA,UAAA,CAAW,EAAA,EAAI,CAAA,SAAA,EAAY,QAAA,CAAS,MAAM,mBAAmB,UAAU,CAAA;AAEvE,EAAA,MAAM,cAAc,QAAA,CAAS,GAAA;AAAA,IAAI,CAAC,CAAA;AAAA;AAAA;AAAA;AAAA,MAIhC,oBAAoB,MAAA,CAAO,UAAA,EAAY,MAAA,EAAQ,SAAA,EAAW,cAAc,CAAA,EAAG;AAAA,QACzE,EAAE,IAAA,EAAM,aAAA,EAAe,KAAA,EAAO,CAAA,CAAE,OAAA,EAAS,OAAA,EAAS,YAAA,EAAc,MAAA,EAAQ,YAAA,EAAc,QAAA,EAAU,IAAA;AAAK,OACtG;AAAA;AAAA,GACH;AAEA,EAAA,UAAA,CAAW,GAAA,EAAK,CAAA,kBAAA,EAAqB,WAAA,CAAY,MAAM,aAAa,UAAU,CAAA;AAE9E,EAAA,OAAO;AAAA,IACL,WAAA;AAAA,IACA,QAAQ,EAAE,aAAA,EAAe,SAAS,MAAA,EAAQ,eAAA,EAAiB,YAAY,MAAA;AAAO,GAChF;AACF;AAEA,eAAsB,qBACpB,OAAA,EACA,eAAA,EACA,MAAA,EACA,MAAA,EACA,WACA,UAAA,EACqD;AACrD,EAAA,UAAA,CAAW,EAAA,EAAI,uBAAuB,WAAW,CAAA;AACjD,EAAA,UAAA,CAAW,EAAA,EAAI,qBAAqB,WAAW,CAAA;AAE/C,EAAA,MAAM,WAAA,GAAc,MAAM,mBAAA,CAAoB,iBAAA;AAAA,IAC5C,OAAA;AAAA,IAAS,eAAA;AAAA,IAAiB,MAAA,CAAO,YAAA;AAAA,IAAc,MAAA,CAAO,IAAA;AAAA,IAAM,MAAA,CAAO;AAAA,GACrE;AAEA,EAAA,UAAA,CAAW,EAAA,EAAI,CAAA,SAAA,EAAY,WAAA,CAAY,MAAM,mBAAmB,UAAU,CAAA;AAE1E,EAAA,MAAM,cAAc,WAAA,CAAY,GAAA;AAAA,IAAI,CAAC,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOnC,oBAAoB,MAAA,CAAO,UAAA,EAAY,MAAA,EAAQ,SAAA,EAAW,aAAa,CAAA,EAAG;AAAA,QACxE,IAAA,EAAM,aAAA;AAAA,QAAe,OAAO,CAAA,CAAE,UAAA;AAAA,QAAY,OAAA,EAAS,WAAA;AAAA,QAAa,MAAA,EAAQ,YAAA;AAAA,QAAc,QAAA,EAAU;AAAA,OACjG;AAAA;AAAA,GACH;AAEA,EAAA,UAAA,CAAW,GAAA,EAAK,CAAA,kBAAA,EAAqB,WAAA,CAAY,MAAM,gBAAgB,UAAU,CAAA;AAEjF,EAAA,OAAO;AAAA,IACL,WAAA;AAAA,IACA,QAAQ,EAAE,gBAAA,EAAkB,YAAY,MAAA,EAAQ,kBAAA,EAAoB,YAAY,MAAA;AAAO,GACzF;AACF;AAEA,eAAsB,oBACpB,OAAA,EACA,eAAA,EACA,QACA,MAAA,EACA,SAAA,EACA,YACAD,OAAAA,EAC2C;AAC3C,EAAA,MAAM,eAAA,GAAkB,MAAA,CAAO,WAAA,CAAY,GAAA,CAAI,MAAM,CAAA;AACrD,EAAA,MAAM,aAAA,GAAgB,CAAC,EAAE,KAAA,EAAO,cAAA,EAAgB,OAAO,eAAA,CAAgB,IAAA,CAAK,IAAI,CAAA,EAAG,CAAA;AACnF,EAAA,MAAM,uBAA0E,EAAC;AACjF,EAAA,IAAI,UAAA,GAAa,CAAA;AACjB,EAAA,IAAI,YAAA,GAAe,CAAA;AACnB,EAAA,IAAI,MAAA,GAAS,CAAA;AACb,EAAA,MAAM,iBAA4C,EAAC;AAEnD,EAAA,UAAA,CAAW,EAAA,EAAI,qBAAA,EAAuB,WAAA,EAAa,EAAE,eAAe,CAAA;AAEpE,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,eAAA,CAAgB,QAAQ,CAAA,EAAA,EAAK;AAC/C,IAAA,MAAM,cAAA,GAAiB,gBAAgB,CAAC,CAAA;AACxC,IAAA,IAAI,CAAC,cAAA,EAAgB;AACrB,IAAA,MAAM,MAAM,EAAA,GAAK,IAAA,CAAK,MAAO,CAAA,GAAI,eAAA,CAAgB,SAAU,EAAE,CAAA;AAC7D,IAAA,UAAA,CAAW,GAAA,EAAK,CAAA,UAAA,EAAa,cAAc,CAAA,YAAA,CAAA,EAAgB,WAAA,EAAa;AAAA,MACtE,iBAAA,EAAmB,cAAA;AAAA,MACnB,oBAAA,EAAsB,CAAA;AAAA,MACtB,kBAAkB,eAAA,CAAgB,MAAA;AAAA,MAClC,aAAA,EAAe,UAAA;AAAA,MACf,eAAA,EAAiB,YAAA;AAAA,MACjB,oBAAA,EAAsB,CAAC,GAAG,oBAAoB,CAAA;AAAA,MAC9C;AAAA,KACD,CAAA;AAED,IAAA,MAAM,oBAAoB,MAAM,eAAA;AAAA,MAC9B,OAAA;AAAA,MAAS,CAAC,cAAc,CAAA;AAAA,MAAG,eAAA;AAAA,MAAiB,OAAO,4BAAA,IAAgC,KAAA;AAAA,MAAOA;AAAA,KAC5F;AAEA,IAAA,UAAA,IAAc,iBAAA,CAAkB,MAAA;AAChC,IAAA,oBAAA,CAAqB,KAAK,EAAE,UAAA,EAAY,gBAAgB,UAAA,EAAY,iBAAA,CAAkB,QAAQ,CAAA;AAM9F,IAAA,MAAM,cAAA,GAAiB,CAAC,EAAE,IAAA,EAAM,eAAe,KAAA,EAAO,cAAA,EAAgB,OAAA,EAAS,SAAA,EAAW,CAAA;AAE1F,IAAA,KAAA,MAAW,UAAU,iBAAA,EAAmB;AACtC,MAAA,IAAI;AACF,QAAA,MAAM,SAAA,GAAYE,0BAA0B,OAAA,EAAS,MAAA,CAAO,aAAa,MAAA,CAAO,SAAA,EAAW,OAAO,KAAK,CAAA;AACvG,QAAA,MAAM,GAAA,GAAM,mBAAA;AAAA,UACV,MAAA,CAAO,UAAA;AAAA,UAAY,MAAA;AAAA,UAAQ,SAAA;AAAA,UAAW,SAAA;AAAA,UAAW,SAAA;AAAA,UAAW;AAAA,SAC9D;AACA,QAAA,cAAA,CAAe,KAAK,GAAG,CAAA;AACvB,QAAA,YAAA,EAAA;AAAA,MACF,CAAA,CAAA,MAAQ;AACN,QAAA,MAAA,EAAA;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,EAAA,UAAA,CAAW,GAAA,EAAK,CAAA,kBAAA,EAAqB,YAAY,CAAA,WAAA,CAAA,EAAe,UAAU,CAAA;AAE1E,EAAA,OAAO;AAAA,IACL,WAAA,EAAa,cAAA;AAAA,IACb,MAAA,EAAQ,EAAE,UAAA,EAAY,YAAA,EAAc,MAAA;AAAO,GAC7C;AACF;AAEA,eAAsB,cACpB,OAAA,EACA,eAAA,EACA,MAAA,EACA,MAAA,EACA,WACA,UAAA,EAC8C;AAC9C,EAAA,UAAA,CAAW,EAAA,EAAI,uBAAuB,WAAW,CAAA;AACjD,EAAA,UAAA,CAAW,EAAA,EAAI,8BAA8B,WAAW,CAAA;AAExD,EAAA,MAAM,UAAU,EAAC;AACjB,EAAA,KAAA,MAAW,QAAA,IAAY,OAAO,UAAA,EAAY;AACxC,IAAA,MAAM,YAAA,GAAe,MAAM,mBAAA,CAAoB,UAAA;AAAA,MAC7C,OAAA;AAAA,MAAS,eAAA;AAAA,MAAiB,MAAA,CAAO,QAAA;AAAA,MAAU;AAAA,KAC7C;AACA,IAAA,OAAA,CAAQ,IAAA,CAAK,GAAG,YAAY,CAAA;AAAA,EAC9B;AACA,EAAA,MAAM,IAAA,GAAO,OAAA;AAEb,EAAA,UAAA,CAAW,EAAA,EAAI,CAAA,SAAA,EAAY,IAAA,CAAK,MAAM,uBAAuB,UAAU,CAAA;AAEvE,EAAA,MAAM,aAAqC,EAAC;AAC5C,EAAA,MAAM,WAAA,GAAc,IAAA,CAAK,GAAA,CAAI,CAAC,CAAA,KAAM;AAClC,IAAA,MAAM,QAAA,GAAW,EAAE,QAAA,IAAY,SAAA;AAC/B,IAAA,UAAA,CAAW,QAAQ,CAAA,GAAA,CAAK,UAAA,CAAW,QAAQ,KAAK,CAAA,IAAK,CAAA;AAMrD,IAAA,OAAO,oBAAoB,MAAA,CAAO,UAAA,EAAY,MAAA,EAAQ,SAAA,EAAW,WAAW,CAAA,EAAG;AAAA,MAC7E,EAAE,IAAA,EAAM,aAAA,EAAe,KAAA,EAAO,QAAA,EAAiB,SAAS,SAAA,EAAe,MAAA,EAAQ,YAAA,EAAc,QAAA,EAAU,IAAA,EAAK;AAAA,MAC5G,EAAE,MAAM,aAAA,EAAe,KAAA,EAAO,OAAO,QAAA,EAAU,OAAA,EAAS,aAAA,EAAe,MAAA,EAAQ,YAAA;AAAa,KAC7F,CAAA;AAAA,EACH,CAAC,CAAA;AAED,EAAA,UAAA,CAAW,GAAA,EAAK,CAAA,kBAAA,EAAqB,WAAA,CAAY,MAAM,SAAS,UAAU,CAAA;AAE1E,EAAA,OAAO;AAAA,IACL,WAAA;AAAA,IACA,MAAA,EAAQ,EAAE,SAAA,EAAW,IAAA,CAAK,QAAQ,WAAA,EAAa,WAAA,CAAY,QAAQ,UAAA;AAAW,GAChF;AACF;AAEA,eAAsB,oBAAA,CACpB,eAAA,EACA,MAAA,EACA,UAAA,EACuF;AACvF,EAAA,UAAA,CAAW,EAAA,EAAI,uBAAuB,UAAU,CAAA;AAEhD,EAAA,MAAM,KAAA,GAAQ,OAAO,KAAA,IAAS,UAAA;AAC9B,EAAA,MAAM,eAAe,MAAA,CAAO,WAAA,IAAe,EAAC,EAAG,IAAI,MAAM,CAAA;AAEzD,EAAA,UAAA,CAAW,EAAA,EAAI,0BAA0B,YAAY,CAAA;AAErD,EAAA,MAAM,YAAY,MAAM,yBAAA;AAAA,IACtB,KAAA;AAAA,IACA,WAAA;AAAA,IACA,eAAA;AAAA,IACA,MAAA,CAAO,MAAA;AAAA,IACP,MAAA,CAAO,QAAA;AAAA,IACP,MAAA,CAAO,OAAA;AAAA,IACP,MAAA,CAAO,WAAA;AAAA,IACP,MAAA,CAAO;AAAA,GACT;AAEA,EAAA,UAAA,CAAW,EAAA,EAAI,wBAAwB,UAAU,CAAA;AAEjD,EAAA,OAAO;AAAA,IACL,SAAS,SAAA,CAAU,OAAA;AAAA,IACnB,KAAA,EAAO,UAAU,KAAA,IAAS,KAAA;AAAA,IAC1B,MAAA,EAAQ,eAAA;AAAA,IACR,MAAA,EAAQ;AAAA,MACN,UAAA,EAAY,EAAA;AAAA,MACZ,YAAA,EAAc,UAAU,KAAA,IAAS;AAAA;AACnC,GACF;AACF;;;ACxRA,eAAe,SAAA,CACb,OAAA,EACA,OAAA,EACA,OAAA,EACe;AACf,EAAA,MAAM,WAAA,GAAe,wBAAA,CAA+C,QAAA,CAAS,OAAiB,CAAA;AAC9F,EAAA,MAAM,aAAA,GAAgB,WAAA,GAAe,OAAA,CAAQ,UAAA,GAAoC,MAAA;AACjF,EAAA,MAAM,QAAQ,MAAA,CAAO,KAAA,CAAM,IAAA,CAAK,OAAA,EAAmB,SAAS,aAAa,CAAA;AAC3E;AAEO,SAAS,mBAAmB,MAAA,EAA8C;AAC/E,EAAA,MAAM,EAAE,OAAA,EAAS,MAAA,EAAAF,OAAAA,EAAO,GAAI,MAAA;AAC5B,EAAA,MAAM,UAAU,qBAAA,CAAsB;AAAA,IACpC,KAAA,EAAO,QAAQ,MAAA,CAAO,KAAA;AAAA,IACtB,UAAU,MAAA,CAAO;AAAA,GAClB,CAAA;AAED,EAAA,OAAA,CAAQ,UAAA,CAAW,SAAA,CAAU,CAAC,GAAA,KAAQ;AACpC,IAAA,IAAI,CAAC,GAAA,EAAK;AACV,IAAAA,OAAAA,CAAO,IAAA,CAAK,gBAAA,EAAkB,EAAE,KAAA,EAAO,GAAA,CAAI,KAAA,EAAO,IAAA,EAAM,GAAA,CAAI,IAAA,EAAM,UAAA,EAAY,GAAA,CAAI,YAAY,CAAA;AAC9F,IAAA,SAAA,CAAU,SAAS,MAAA,EAAQ,GAAG,CAAA,CAAE,KAAA,CAAM,CAAC,KAAA,KAAU;AAC/C,MAAA,MAAM,UAAU,KAAA,YAAiB,KAAA,GAAQ,KAAA,CAAM,OAAA,GAAU,OAAO,KAAK,CAAA;AACrE,MAAAA,OAAAA,CAAO,KAAA,CAAM,YAAA,EAAc,EAAE,OAAO,GAAA,CAAI,KAAA,EAAO,KAAA,EAAO,OAAA,EAAS,OAAO,KAAA,YAAiB,KAAA,GAAQ,KAAA,CAAM,KAAA,GAAQ,QAAW,CAAA;AACxH,MAAA,MAAM,gBAAA,GAAoB,IAAI,MAAA,CAAoC,WAAA;AAClE,MAAA,SAAA,CAAU,SAAS,UAAA,EAAY;AAAA,QAC7B,YAAY,GAAA,CAAI,UAAA;AAAA,QAChB,QAAQ,GAAA,CAAI,MAAA;AAAA,QACZ,OAAO,GAAA,CAAI,KAAA;AAAA,QACX,SAAS,GAAA,CAAI,IAAA;AAAA,QACb,GAAI,gBAAA,GAAmB,EAAE,YAAA,EAAc,gBAAA,KAAqB,EAAC;AAAA,QAC7D,KAAA,EAAO;AAAA,OACR,CAAA,CAAE,KAAA,CAAM,MAAM;AAAA,MAAC,CAAC,CAAA;AACjB,MAAA,OAAA,CAAQ,OAAA,CAAQ,GAAA,CAAI,KAAA,EAAO,OAAO,CAAA;AAAA,IACpC,CAAC,CAAA;AAAA,EACH,CAAC,CAAA;AAED,EAAA,OAAA,CAAQ,KAAA,EAAM;AACd,EAAA,OAAO,OAAA;AACT;AAKA,eAAsB,SAAA,CACpB,OAAA,EACA,MAAA,EACA,GAAA,EACe;AACf,EAAA,MAAM,EAAE,SAAQ,GAAI,MAAA;AACpB,EAAA,MAAM,EAAE,UAAA,EAAY,MAAA,EAAQ,KAAA,EAAO,IAAA,EAAM,SAAQ,GAAI,GAAA;AAOrD,EAAA,MAAM,YAAA,GAAgB,IAAI,MAAA,CAAoC,WAAA;AAC9D,EAAA,MAAM,aAAA,GAAgB;AAAA,IACpB,UAAA;AAAA,IAAY,MAAA;AAAA,IAAQ,KAAA;AAAA,IAAO,OAAA;AAAA,IAC3B,GAAI,YAAA,GAAe,EAAE,YAAA,KAAiB;AAAC,GACzC;AASA,EAAA,MAAM,SAAA,CAAU,OAAA,EAAS,WAAA,EAAa,aAAa,CAAA;AAEnD,EAAA,MAAM,MAAA,GAAS,MAAA,CAAO,OAAA,CAAQ,OAAO,CAAA;AACrC,EAAA,IAAI,CAAC,MAAA,EAAQ;AACX,IAAA,OAAA,CAAQ,OAAA,CAAQ,KAAA,EAAO,CAAA,6CAAA,EAAgD,OAAO,CAAA,CAAE,CAAA;AAChF,IAAA;AAAA,EACF;AACA,EAAA,MAAM,EAAE,eAAA,EAAiB,SAAA,EAAU,GAAI,MAAA;AAEvC,EAAA,MAAM,UAAA,GAAyB,CAAC,UAAA,EAAY,OAAA,EAAS,OAAO,KAAA,KAAU;AACpE,IAAA,SAAA,CAAU,SAAS,qBAAA,EAAuB;AAAA,MACxC,GAAG,aAAA;AAAA,MACH,UAAA;AAAA,MACA,QAAA,EAAU;AAAA,QACR,KAAA;AAAA,QAAO,UAAA;AAAA,QAAY,OAAA;AAAA,QACnB,GAAI,YAAA,GAAe,EAAE,YAAA,KAAiB,EAAC;AAAA,QACvC,GAAI,SAAS;AAAC;AAChB,KACD,CAAA,CAAE,KAAA,CAAM,MAAM;AAAA,IAAC,CAAC,CAAA;AAAA,EACnB,CAAA;AAEA,EAAA,MAAM,eAAe,YAA6B;AAChD,IAAA,OAAO,MAAM,OAAA,CAAQ,MAAA,CAAO,MAAA,CAAO,gBAAgB,UAAmB,CAAA;AAAA,EACxE,CAAA;AAEA,EAAA,IAAI,YAAY,sBAAA,EAAwB;AACtC,IAAA,MAAM,OAAA,GAAU,MAAM,YAAA,EAAa;AACnC,IAAA,MAAM,EAAE,WAAA,EAAa,MAAA,EAAO,GAAI,MAAM,mBAAA;AAAA,MACpC,OAAA;AAAA,MAAS,eAAA;AAAA,MAAiB,GAAA,CAAI,MAAA;AAAA,MAAiB,MAAA;AAAA,MAAQ,SAAA;AAAA,MAAW;AAAA,KACpE;AACA,IAAA,KAAA,MAAW,OAAO,WAAA,EAAa;AAC7B,MAAA,MAAM,SAAA,CAAU,SAAS,aAAA,EAAe,EAAE,YAAY,GAAA,EAAK,MAAA,EAAQ,YAAY,CAAA;AAAA,IACjF;AACA,IAAA,MAAM,SAAA,CAAU,SAAS,cAAA,EAAgB;AAAA,MACvC,GAAG,aAAA;AAAA,MACH;AAAA,KACD,CAAA;AACD,IAAA,OAAA,CAAQ,WAAA,EAAY;AAAA,EAEtB,CAAA,MAAA,IAAW,YAAY,oBAAA,EAAsB;AAC3C,IAAA,MAAM,OAAA,GAAU,MAAM,YAAA,EAAa;AACnC,IAAA,MAAM,EAAE,WAAA,EAAa,MAAA,EAAO,GAAI,MAAM,iBAAA;AAAA,MACpC,OAAA;AAAA,MAAS,eAAA;AAAA,MAAiB,GAAA,CAAI,MAAA;AAAA,MAAiB,MAAA;AAAA,MAAQ,SAAA;AAAA,MAAW;AAAA,KACpE;AACA,IAAA,KAAA,MAAW,OAAO,WAAA,EAAa;AAC7B,MAAA,MAAM,SAAA,CAAU,SAAS,aAAA,EAAe,EAAE,YAAY,GAAA,EAAK,MAAA,EAAQ,YAAY,CAAA;AAAA,IACjF;AACA,IAAA,MAAM,SAAA,CAAU,SAAS,cAAA,EAAgB;AAAA,MACvC,GAAG,aAAA;AAAA,MACH;AAAA,KACD,CAAA;AACD,IAAA,OAAA,CAAQ,WAAA,EAAY;AAAA,EAEtB,CAAA,MAAA,IAAW,YAAY,uBAAA,EAAyB;AAC9C,IAAA,MAAM,OAAA,GAAU,MAAM,YAAA,EAAa;AACnC,IAAA,MAAM,EAAE,WAAA,EAAa,MAAA,EAAO,GAAI,MAAM,oBAAA;AAAA,MACpC,OAAA;AAAA,MAAS,eAAA;AAAA,MAAiB,GAAA,CAAI,MAAA;AAAA,MAAiB,MAAA;AAAA,MAAQ,SAAA;AAAA,MAAW;AAAA,KACpE;AACA,IAAA,KAAA,MAAW,OAAO,WAAA,EAAa;AAC7B,MAAA,MAAM,SAAA,CAAU,SAAS,aAAA,EAAe,EAAE,YAAY,GAAA,EAAK,MAAA,EAAQ,YAAY,CAAA;AAAA,IACjF;AACA,IAAA,MAAM,SAAA,CAAU,SAAS,cAAA,EAAgB;AAAA,MACvC,GAAG,aAAA;AAAA,MACH;AAAA,KACD,CAAA;AACD,IAAA,OAAA,CAAQ,WAAA,EAAY;AAAA,EAEtB,CAAA,MAAA,IAAW,YAAY,sBAAA,EAAwB;AAC7C,IAAA,MAAM,OAAA,GAAU,MAAM,YAAA,EAAa;AACnC,IAAA,MAAM,EAAE,WAAA,EAAa,MAAA,EAAO,GAAI,MAAM,mBAAA;AAAA,MACpC,OAAA;AAAA,MAAS,eAAA;AAAA,MAAiB,GAAA,CAAI,MAAA;AAAA,MAAiB,MAAA;AAAA,MAAQ,SAAA;AAAA,MAAW;AAAA,KACpE;AACA,IAAA,KAAA,MAAW,OAAO,WAAA,EAAa;AAC7B,MAAA,MAAM,SAAA,CAAU,SAAS,aAAA,EAAe,EAAE,YAAY,GAAA,EAAK,MAAA,EAAQ,YAAY,CAAA;AAAA,IACjF;AACA,IAAA,MAAM,SAAA,CAAU,SAAS,cAAA,EAAgB;AAAA,MACvC,GAAG,aAAA;AAAA,MACH;AAAA,KACD,CAAA;AACD,IAAA,OAAA,CAAQ,WAAA,EAAY;AAAA,EAEtB,CAAA,MAAA,IAAW,YAAY,gBAAA,EAAkB;AACvC,IAAA,MAAM,OAAA,GAAU,MAAM,YAAA,EAAa;AACnC,IAAA,MAAM,EAAE,WAAA,EAAa,MAAA,EAAO,GAAI,MAAM,aAAA;AAAA,MACpC,OAAA;AAAA,MAAS,eAAA;AAAA,MAAiB,GAAA,CAAI,MAAA;AAAA,MAAiB,MAAA;AAAA,MAAQ,SAAA;AAAA,MAAW;AAAA,KACpE;AACA,IAAA,KAAA,MAAW,OAAO,WAAA,EAAa;AAC7B,MAAA,MAAM,SAAA,CAAU,SAAS,aAAA,EAAe,EAAE,YAAY,GAAA,EAAK,MAAA,EAAQ,YAAY,CAAA;AAAA,IACjF;AACA,IAAA,MAAM,SAAA,CAAU,SAAS,cAAA,EAAgB;AAAA,MACvC,GAAG,aAAA;AAAA,MACH;AAAA,KACD,CAAA;AACD,IAAA,OAAA,CAAQ,WAAA,EAAY;AAAA,EAEtB,CAAA,MAAA,IAAW,YAAY,YAAA,EAAc;AACnC,IAAA,MAAM,YAAY,MAAM,oBAAA;AAAA,MACtB,eAAA;AAAA,MAAiB,GAAA,CAAI,MAAA;AAAA,MAAiB;AAAA,KACxC;AAOA,IAAA,MAAM,YAAY,GAAA,CAAI,MAAA;AAKtB,IAAA,MAAM,UAAA,GAAa,gBAAA,CAAiB,SAAA,CAAU,KAAA,EAAO,UAAU,MAAM,CAAA;AAErE,IAAA,MAAM,EAAE,YAAY,aAAA,EAAc,GAAI,MAAM,OAAA,CAAQ,MAAA,CAAO,MAAM,QAAA,CAAS;AAAA,MACxE,MAAM,SAAA,CAAU,KAAA;AAAA,MAChB,IAAA,EAAM,MAAA,CAAO,IAAA,CAAK,SAAA,CAAU,OAAO,CAAA;AAAA,MACnC,QAAQ,SAAA,CAAU,MAAA;AAAA,MAClB,UAAA;AAAA,MACA,cAAA,EAAgB,WAAA;AAAA,MAChB,gBAAA,EAAkB,UAAA;AAAA,MAClB,GAAI,UAAU,WAAA,GAAc,EAAE,oBAAoB,SAAA,CAAU,WAAA,KAAgB,EAAC;AAAA,MAC7E,GAAI,UAAU,MAAA,GAAS,EAAE,kBAAkB,SAAA,CAAU,MAAA,KAAW,EAAC;AAAA,MACjE,GAAI,UAAU,QAAA,GAAW,EAAE,UAAU,SAAA,CAAU,QAAA,KAAa,EAAC;AAAA,MAC7D;AAAA,KACD,CAAA;AAED,IAAA,MAAM,SAAA,CAAU,SAAS,cAAA,EAAgB;AAAA,MACvC,GAAG,aAAA;AAAA,MACH,QAAQ,EAAE,UAAA,EAAY,aAAA,EAAe,YAAA,EAAc,UAAU,KAAA;AAAM,KACpE,CAAA;AACD,IAAA,OAAA,CAAQ,WAAA,EAAY;AAAA,EAEtB,CAAA,MAAO;AACL,IAAA,OAAA,CAAQ,OAAA,CAAQ,KAAA,EAAO,CAAA,kBAAA,EAAqB,OAAO,CAAA,CAAE,CAAA;AAAA,EACvD;AACF;ACxQO,SAAS,oBAAoB,SAAA,EAA2B;AAC7D,EAAA,MAAM,KAAA,GAAQ,OAAA,CAAQ,GAAA,CAAI,SAAA,IAAa,MAAA;AACvC,EAAA,MAAM,SAAS,OAAA,CAAQ,GAAA,CAAI,UAAA,KAAe,QAAA,GACtC,QAAQ,MAAA,CAAO,OAAA;AAAA,IACb,QAAQ,MAAA,CAAO,SAAA,CAAU,EAAE,MAAA,EAAQ,uBAAuB,CAAA;AAAA,IAC1D,QAAQ,MAAA,CAAO,MAAA,CAAO,EAAE,KAAA,EAAO,MAAM,CAAA;AAAA,IACrC,OAAA,CAAQ,MAAA,CAAO,MAAA,CAAO,CAAC,EAAE,KAAA,EAAO,GAAA,EAAK,OAAA,EAAS,SAAA,EAAW,GAAG,IAAA,EAAK,KAAM;AACrE,MAAA,MAAM,OAAA,GAAU,MAAA,CAAO,IAAA,CAAK,IAAI,CAAA,CAAE,MAAA,GAAS,CAAA,GAAI,CAAA,CAAA,EAAI,IAAA,CAAK,SAAA,CAAU,IAAI,CAAC,CAAA,CAAA,GAAK,EAAA;AAC5E,MAAA,OAAO,CAAA,EAAG,SAAS,CAAA,EAAA,EAAK,GAAA,CAAI,WAAA,EAAa,CAAA,GAAA,EAAM,SAAS,CAAA,EAAA,EAAK,OAAO,CAAA,EAAG,OAAO,CAAA,CAAA;AAAA,IAChF,CAAC;AAAA,GACH,GACA,QAAQ,MAAA,CAAO,OAAA;AAAA,IACb,OAAA,CAAQ,OAAO,SAAA,EAAU;AAAA,IACzB,QAAQ,MAAA,CAAO,MAAA,CAAO,EAAE,KAAA,EAAO,MAAM,CAAA;AAAA,IACrC,OAAA,CAAQ,OAAO,IAAA;AAAK,GACtB;AAEJ,EAAA,MAAMA,OAAAA,GAAS,QAAQ,YAAA,CAAa;AAAA,IAClC,KAAA;AAAA,IACA,WAAA,EAAa,EAAE,SAAA,EAAU;AAAA,IACzB,MAAA;AAAA,IACA,YAAY,CAAC,IAAI,OAAA,CAAQ,UAAA,CAAW,SAAS;AAAA,GAC9C,CAAA;AAED,EAAA,OAAOA,OAAAA;AACT;;;ACiBA,IAAM,aAAA,GAAgB;AAAA,EACpB,sBAAA;AAAA,EAAwB,YAAA;AAAA,EAAc,sBAAA;AAAA,EACtC,uBAAA;AAAA,EAAyB,oBAAA;AAAA,EAAsB;AACjD,CAAA;AAiBA,IAAM,UAAA,GAAa,IAAA,CAAK,OAAA,EAAQ,EAAG,gBAAgB,CAAA;AACnD,IAAM,UAAA,GAAa;AAAA,EACjB,YAAA,EAAc,CAAC,CAAA,KAA6B,UAAA,CAAW,CAAC,CAAA,GAAI,YAAA,CAAa,CAAA,EAAG,OAAO,CAAA,GAAI;AACzF,CAAA;AACA,IAAM,SAAA,GAAY,sBAAA;AAAA,EAChB,UAAA;AAAA,EACA,UAAA;AAAA,EACA,OAAA,CAAQ;AACV,CAAA,CAAE,MAAM,OAAO,CAAA;AAKf,IAAM,kBAAA,GAAsB,UAAU,SAAA,EAErB,OAAA;AACjB,IAAI,CAAC,kBAAA,IAAsB,MAAA,CAAO,KAAK,kBAAkB,CAAA,CAAE,WAAW,CAAA,EAAG;AACvE,EAAA,MAAM,IAAI,KAAA;AAAA,IACR;AAAA,GAEF;AACF;AAEA,SAAS,cAAc,OAAA,EAAoC;AACzD,EAAA,MAAM,QAAA,GAAW,mBAAoB,OAAO,CAAA;AAC5C,EAAA,IAAI,UAAU,OAAO,QAAA;AACrB,EAAA,MAAM,GAAA,GAAM,mBAAoB,SAAS,CAAA;AACzC,EAAA,IAAI,KAAK,OAAO,GAAA;AAChB,EAAA,MAAM,IAAI,KAAA;AAAA,IACR,mCAAmC,OAAO,CAAA,6CAAA;AAAA,GAC5C;AACF;AAEA,IAAM,gBAAA,GAAmB,SAAA,CAAU,QAAA,EAAU,OAAA,EAAS,SAAA;AACtD,IAAI,CAAC,gBAAA,EAAkB;AACrB,EAAA,MAAM,IAAI,MAAM,4DAA4D,CAAA;AAC9E;AACA,IAAM,cAAA,GAAyB,gBAAA;AAE/B,IAAM,YAAA,GAAe,OAAA,CAAQ,GAAA,CAAI,qBAAA,IAAyB,EAAA;AAC1D,IAAM,UAAA,GAAa,IAAA;AAInB,IAAM,MAAA,GAAS,oBAAoB,QAAQ,CAAA;AAI3C,SAAS,UAAU,CAAA,EAA8B;AAC/C,EAAA,OAAO,CAAC,CAAA,CAAE,IAAA,EAAM,CAAA,CAAE,KAAA,EAAO,EAAE,MAAA,IAAU,EAAA,EAAI,CAAA,CAAE,QAAA,IAAY,IAAI,CAAA,CAAE,OAAA,IAAW,EAAE,CAAA,CAAE,KAAK,GAAG,CAAA;AACtF;AAEA,SAAS,eAAe,CAAA,EAA6C;AACnE,EAAA,OAAO;AAAA,IACL,MAAM,CAAA,CAAE,IAAA;AAAA,IACR,OAAO,CAAA,CAAE,KAAA;AAAA,IACT,GAAI,CAAA,CAAE,QAAA,IAAY,EAAE,QAAA,EAAU,EAAE,QAAA,EAAS;AAAA,IACzC,GAAI,CAAA,CAAE,OAAA,IAAW,EAAE,OAAA,EAAS,EAAE,OAAA,EAAQ;AAAA,IACtC,GAAI,CAAA,CAAE,MAAA,IAAU,EAAE,MAAA,EAAQ,EAAE,MAAA;AAAO,GACrC;AACF;AAEA,IAAM,WAAA,uBAAkB,GAAA,EAA6B;AACrD,IAAM,UAAwC,EAAC;AAC/C,KAAA,MAAW,WAAW,aAAA,EAAe;AACnC,EAAA,MAAM,CAAA,GAAI,cAAc,OAAO,CAAA;AAC/B,EAAA,MAAM,GAAA,GAAM,UAAU,CAAC,CAAA;AACvB,EAAA,IAAI,MAAA,GAAS,WAAA,CAAY,GAAA,CAAI,GAAG,CAAA;AAChC,EAAA,IAAI,CAAC,MAAA,EAAQ;AACX,IAAA,MAAA,GAAS,qBAAA,CAAsB,cAAA,CAAe,CAAC,CAAA,EAAG,MAAM,CAAA;AACxD,IAAA,WAAA,CAAY,GAAA,CAAI,KAAK,MAAM,CAAA;AAAA,EAC7B;AACA,EAAA,MAAM,SAAA,GAAmB;AAAA,IACvB,OAAA,EAAS,eAAA;AAAA,IACT,MAAM,CAAA,cAAA,EAAiB,CAAA,CAAE,IAAI,CAAA,CAAA,EAAI,EAAE,KAAK,CAAA,CAAA;AAAA,IACxC,MAAA,EAAQ,aAAA;AAAA,IACR,mBAAmB,CAAA,CAAE,IAAA;AAAA,IACrB,OAAO,CAAA,CAAE;AAAA,GACX;AACA,EAAA,OAAA,CAAQ,OAAO,CAAA,GAAI,EAAE,eAAA,EAAiB,QAAQ,SAAA,EAAU;AAC1D;AAUA,SAAS,gBAAgB,GAAA,EAAyE;AAChG,EAAA,MAAM,MAAA,GAAS,IAAI,GAAA,CAAI,GAAG,CAAA;AAC1B,EAAA,MAAM,QAAA,GAAY,OAAO,QAAA,CAAS,OAAA,CAAQ,KAAK,EAAE,CAAA,KAAM,UAAU,OAAA,GAAU,MAAA;AAC3E,EAAA,MAAM,OAAO,MAAA,CAAO,QAAA;AACpB,EAAA,MAAM,IAAA,GAAO,OAAO,IAAA,GAChB,MAAA,CAAO,OAAO,IAAI,CAAA,GAClB,QAAA,KAAa,OAAA,GAAU,GAAA,GAAM,EAAA;AACjC,EAAA,OAAO,EAAE,QAAA,EAAU,IAAA,EAAM,IAAA,EAAK;AAChC;AAIA,eAAe,YAAA,GAAgC;AAC7C,EAAA,IAAI,CAAC,YAAA,EAAc;AACjB,IAAA,MAAA,CAAO,KAAK,uDAAkD,CAAA;AAC9D,IAAA,OAAO,EAAA;AAAA,EACT;AAEA,EAAA,MAAM,QAAA,GAAW,MAAM,KAAA,CAAM,CAAA,EAAG,cAAc,CAAA,kBAAA,CAAA,EAAsB;AAAA,IAClE,MAAA,EAAQ,MAAA;AAAA,IACR,OAAA,EAAS,EAAE,cAAA,EAAgB,kBAAA,EAAmB;AAAA,IAC9C,MAAM,IAAA,CAAK,SAAA,CAAU,EAAE,MAAA,EAAQ,cAAc;AAAA,GAC9C,CAAA;AAED,EAAA,IAAI,CAAC,SAAS,EAAA,EAAI;AAChB,IAAA,MAAM,IAAI,MAAM,CAAA,uBAAA,EAA0B,QAAA,CAAS,MAAM,CAAA,CAAA,EAAI,QAAA,CAAS,UAAU,CAAA,CAAE,CAAA;AAAA,EACpF;AAEA,EAAA,MAAM,EAAE,KAAA,EAAM,GAAI,MAAM,SAAS,IAAA,EAAK;AACtC,EAAA,OAAO,KAAA;AACT;AAEA,eAAe,IAAA,GAAO;AACpB,EAAA,MAAA,CAAO,IAAA,CAAK,gBAAA,EAAkB,EAAE,OAAA,EAAS,gBAAgB,CAAA;AACzD,EAAA,MAAM,YAAA,GAAe,MAAM,YAAA,EAAa;AACxC,EAAA,MAAA,CAAO,KAAK,eAAe,CAAA;AAK3B,EAAA,MAAM,EAAE,QAAA,EAAU,IAAA,EAAM,IAAA,EAAK,GAAI,gBAAgB,cAAc,CAAA;AAC/D,EAAA,MAAM,IAAA,GAAO,CAAA,OAAA,EAAU,QAAA,EAAU,CAAA,CAAA;AACjC,EAAA,MAAM,EAAA,GAAoB;AAAA,IACxB,EAAA,EAAI,IAAA;AAAA,IACJ,KAAA,EAAO,iBAAiB,IAAI,CAAA,CAAA;AAAA,IAC5B,IAAA;AAAA,IACA,IAAA;AAAA,IACA,QAAA;AAAA,IACA,KAAA,EAAO,eAAe,IAAI,CAAA;AAAA,GAC5B;AACA,EAAA,MAAM,OAAA,GAAU,IAAI,sBAAA,EAAuB;AAC3C,EAAA,gBAAA,CAAiB,SAAS,IAAA,EAAM,EAAE,QAAQ,YAAA,EAAc,OAAA,EAAS,IAAI,CAAA;AAErE,EAAA,MAAM,OAAA,GAAU,IAAI,cAAA,CAAe;AAAA,IACjC,EAAA;AAAA,IACA,OAAA;AAAA,IACA,SAAS,YAAY;AACnB,MAAA,IAAI;AACF,QAAA,OAAO,MAAM,YAAA,EAAa;AAAA,MAC5B,SAAS,GAAA,EAAK;AACZ,QAAA,MAAA,CAAO,MAAM,6BAAA,EAA+B;AAAA,UAC1C,OAAO,GAAA,YAAe,KAAA,GAAQ,GAAA,CAAI,OAAA,GAAU,OAAO,GAAG;AAAA,SACvD,CAAA;AACD,QAAA,OAAO,IAAA;AAAA,MACT;AAAA,IACF,CAAA;AAAA;AAAA;AAAA,IAGA,OAAA,EAAS,CAAC,GAAA,KAAQ;AAChB,MAAA,MAAA,CAAO,KAAA,CAAM,iBAAiB,EAAE,IAAA,EAAM,IAAI,IAAA,EAAM,OAAA,EAAS,GAAA,CAAI,OAAA,EAAS,CAAA;AAAA,IACxE;AAAA,GACD,CAAA;AACD,EAAA,MAAM,OAAA,CAAQ,KAAA;AAEd,EAAA,MAAM,WAAW,kBAAA,CAAmB;AAAA,IAClC,OAAA;AAAA,IACA,QAAA,EAAU,aAAA;AAAA,IACV,OAAA;AAAA,IACA;AAAA,GACD,CAAA;AAED,EAAA,MAAA,CAAO,KAAK,WAAA,EAAa;AAAA,IACvB,OAAA,EAAS,cAAA;AAAA,IACT,SAAS,MAAA,CAAO,WAAA;AAAA,MACd,MAAA,CAAO,QAAQ,OAAO,CAAA,CAAE,IAAI,CAAC,CAAC,IAAI,CAAC,CAAA,KAAM,CAAC,EAAA,EAAI,CAAA,EAAG,EAAE,SAAA,CAAU,iBAAiB,MAAM,CAAA,CAAE,SAAA,CAAU,KAAK,CAAA,CAAE,CAAC;AAAA;AAC1G,GACD,CAAA;AAED,EAAA,MAAM,MAAA,GAAS,YAAA,CAAa,CAAC,GAAA,EAAK,GAAA,KAAQ;AACxC,IAAA,IAAI,GAAA,CAAI,QAAQ,SAAA,EAAW;AACzB,MAAA,GAAA,CAAI,SAAA,CAAU,GAAA,EAAK,EAAE,cAAA,EAAgB,oBAAoB,CAAA;AACzD,MAAA,GAAA,CAAI,IAAI,IAAA,CAAK,SAAA,CAAU,EAAE,MAAA,EAAQ,IAAA,EAAM,CAAC,CAAA;AAAA,IAC1C,CAAA,MAAO;AACL,MAAA,GAAA,CAAI,UAAU,GAAG,CAAA;AACjB,MAAA,GAAA,CAAI,GAAA,EAAI;AAAA,IACV;AAAA,EACF,CAAC,CAAA;AACD,EAAA,MAAA,CAAO,MAAA,CAAO,YAAY,MAAM;AAC9B,IAAA,MAAA,CAAO,IAAA,CAAK,uBAAA,EAAyB,EAAE,IAAA,EAAM,YAAY,CAAA;AAAA,EAC3D,CAAC,CAAA;AAED,EAAA,MAAM,WAAW,YAAY;AAC3B,IAAA,MAAA,CAAO,KAAK,eAAe,CAAA;AAC3B,IAAA,QAAA,CAAS,OAAA,EAAQ;AACjB,IAAA,MAAM,QAAQ,OAAA,EAAQ;AACtB,IAAA,MAAA,CAAO,KAAA,EAAM;AACb,IAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,EAChB,CAAA;AAEA,EAAA,OAAA,CAAQ,EAAA,CAAG,WAAW,QAAQ,CAAA;AAC9B,EAAA,OAAA,CAAQ,EAAA,CAAG,UAAU,QAAQ,CAAA;AAC/B;AAEA,IAAA,EAAK,CAAE,KAAA,CAAM,CAAC,KAAA,KAAU;AACtB,EAAA,MAAA,CAAO,MAAM,OAAA,EAAS,EAAE,KAAA,EAAO,KAAA,YAAiB,QAAQ,KAAA,CAAM,OAAA,GAAU,MAAA,CAAO,KAAK,GAAG,KAAA,EAAO,KAAA,YAAiB,QAAQ,KAAA,CAAM,KAAA,GAAQ,QAAW,CAAA;AAChJ,EAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAChB,CAAC,CAAA","file":"worker-main.js","sourcesContent":["/**\n * Prompt builders for annotation detection motivations\n *\n * Provides static methods to build AI prompts for each Web Annotation motivation type.\n * Extracted from worker implementations to centralize prompt logic.\n */\n\nexport class MotivationPrompts {\n /**\n * Build a prompt for detecting comment-worthy passages\n *\n * @param content - The text content to analyze (will be truncated to 8000 chars)\n * @param instructions - Optional user-provided instructions\n * @param tone - Optional tone guidance (e.g., \"academic\", \"conversational\")\n * @param density - Optional target number of comments per 2000 words\n * @returns Formatted prompt string\n */\n static buildCommentPrompt(\n content: string,\n instructions?: string,\n tone?: string,\n density?: number\n ): string {\n let prompt: string;\n\n if (instructions) {\n // User provided specific instructions - minimal prompt, let instructions drive behavior\n const toneGuidance = tone ? ` Use a ${tone} tone.` : '';\n const densityGuidance = density\n ? `\\n\\nAim for approximately ${density} comments per 2000 words of text.`\n : ''; // Let user instructions determine density\n\n prompt = `Add comments to passages in this text following these instructions:\n\n${instructions}${toneGuidance}${densityGuidance}\n\nText to analyze:\n---\n${content.substring(0, 8000)}\n---\n\nReturn a JSON array of comments. Each comment must have:\n- \"exact\": the exact text passage being commented on (quoted verbatim from source)\n- \"start\": character offset where the passage starts\n- \"end\": character offset where the passage ends\n- \"prefix\": up to 32 characters of text immediately before the passage\n- \"suffix\": up to 32 characters of text immediately after the passage\n- \"comment\": your comment following the instructions above\n\nRespond with a valid JSON array.\n\nExample:\n[\n {\"exact\": \"the quarterly review meeting\", \"start\": 142, \"end\": 169, \"prefix\": \"We need to schedule \", \"suffix\": \" for next month.\", \"comment\": \"Who will lead this? Should we invite the external auditors?\"}\n]`;\n } else {\n // No specific instructions - fall back to explanatory/educational mode\n const toneGuidance = tone\n ? `\\n\\nTone: Use a ${tone} style in your comments.`\n : '';\n const densityGuidance = density\n ? `\\n- Aim for approximately ${density} comments per 2000 words`\n : `\\n- Aim for 3-8 comments per 2000 words (not too sparse or dense)`;\n\n prompt = `Identify passages in this text that would benefit from explanatory comments.\nFor each passage, provide contextual information, clarification, or background.${toneGuidance}\n\nGuidelines:\n- Select passages that reference technical terms, historical figures, complex concepts, or unclear references\n- Provide comments that ADD VALUE beyond restating the text\n- Focus on explanation, background, or connections to other ideas\n- Avoid obvious or trivial comments\n- Keep comments concise (1-3 sentences typically)${densityGuidance}\n\nText to analyze:\n---\n${content.substring(0, 8000)}\n---\n\nReturn a JSON array of comments. Each comment should have:\n- \"exact\": the exact text passage being commented on (quoted verbatim from source)\n- \"start\": character offset where the passage starts\n- \"end\": character offset where the passage ends\n- \"prefix\": up to 32 characters of text immediately before the passage\n- \"suffix\": up to 32 characters of text immediately after the passage\n- \"comment\": your explanatory comment (1-3 sentences, provide context/background/clarification)\n\nRespond with a valid JSON array.\n\nExample format:\n[\n {\"exact\": \"Ouranos\", \"start\": 52, \"end\": 59, \"prefix\": \"In the beginning, \", \"suffix\": \" ruled the universe\", \"comment\": \"Ouranos (also spelled Uranus) is the primordial Greek deity personifying the sky. In Hesiod's Theogony, he is the son and husband of Gaia (Earth) and father of the Titans.\"}\n]`;\n }\n\n return prompt;\n }\n\n /**\n * Build a prompt for detecting highlight-worthy passages\n *\n * @param content - The text content to analyze (will be truncated to 8000 chars)\n * @param instructions - Optional user-provided instructions\n * @param density - Optional target number of highlights per 2000 words\n * @returns Formatted prompt string\n */\n static buildHighlightPrompt(\n content: string,\n instructions?: string,\n density?: number\n ): string {\n let prompt: string;\n\n if (instructions) {\n // User provided specific instructions - minimal prompt, let instructions drive behavior\n const densityGuidance = density\n ? `\\n\\nAim for approximately ${density} highlights per 2000 words of text.`\n : ''; // Let user instructions determine density\n\n prompt = `Identify passages in this text to highlight following these instructions:\n\n${instructions}${densityGuidance}\n\nText to analyze:\n---\n${content.substring(0, 8000)}\n---\n\nReturn a JSON array of highlights. Each highlight must have:\n- \"exact\": the exact text passage to highlight (quoted verbatim from source)\n- \"start\": character offset where the passage starts\n- \"end\": character offset where the passage ends\n- \"prefix\": up to 32 characters of text immediately before the passage\n- \"suffix\": up to 32 characters of text immediately after the passage\n\nRespond with a valid JSON array.\n\nExample:\n[\n {\"exact\": \"revenue grew 45% year-over-year\", \"start\": 142, \"end\": 174, \"prefix\": \"In Q3 2024, \", \"suffix\": \", exceeding all forecasts.\"}\n]`;\n } else {\n // No specific instructions - fall back to importance/salience mode\n const densityGuidance = density\n ? `\\n- Aim for approximately ${density} highlights per 2000 words`\n : `\\n- Aim for 3-8 highlights per 2000 words (be selective)`;\n\n prompt = `Identify passages in this text that merit highlighting for their importance or salience.\nFocus on content that readers should notice and remember.\n\nGuidelines:\n- Highlight key claims, findings, or conclusions\n- Highlight important definitions, terminology, or concepts\n- Highlight notable quotes or particularly striking statements\n- Highlight critical decisions, action items, or turning points\n- Select passages that are SIGNIFICANT, not just interesting\n- Avoid trivial or obvious content${densityGuidance}\n\nText to analyze:\n---\n${content.substring(0, 8000)}\n---\n\nReturn a JSON array of highlights. Each highlight should have:\n- \"exact\": the exact text passage to highlight (quoted verbatim from source)\n- \"start\": character offset where the passage starts\n- \"end\": character offset where the passage ends\n- \"prefix\": up to 32 characters of text immediately before the passage\n- \"suffix\": up to 32 characters of text immediately after the passage\n\nRespond with a valid JSON array.\n\nExample format:\n[\n {\"exact\": \"we will discontinue support for legacy systems by March 2025\", \"start\": 52, \"end\": 113, \"prefix\": \"After careful consideration, \", \"suffix\": \". This decision affects\"}\n]`;\n }\n\n return prompt;\n }\n\n /**\n * Build a prompt for detecting assessment-worthy passages\n *\n * @param content - The text content to analyze (will be truncated to 8000 chars)\n * @param instructions - Optional user-provided instructions\n * @param tone - Optional tone guidance (e.g., \"critical\", \"supportive\")\n * @param density - Optional target number of assessments per 2000 words\n * @returns Formatted prompt string\n */\n static buildAssessmentPrompt(\n content: string,\n instructions?: string,\n tone?: string,\n density?: number\n ): string {\n let prompt: string;\n\n if (instructions) {\n // User provided specific instructions - minimal prompt, let instructions drive behavior\n const toneGuidance = tone ? ` Use a ${tone} tone.` : '';\n const densityGuidance = density\n ? `\\n\\nAim for approximately ${density} assessments per 2000 words of text.`\n : ''; // Let user instructions determine density\n\n prompt = `Assess passages in this text following these instructions:\n\n${instructions}${toneGuidance}${densityGuidance}\n\nText to analyze:\n---\n${content.substring(0, 8000)}\n---\n\nReturn a JSON array of assessments. Each assessment must have:\n- \"exact\": the exact text passage being assessed (quoted verbatim from source)\n- \"start\": character offset where the passage starts\n- \"end\": character offset where the passage ends\n- \"prefix\": up to 32 characters of text immediately before the passage\n- \"suffix\": up to 32 characters of text immediately after the passage\n- \"assessment\": your assessment following the instructions above\n\nRespond with a valid JSON array.\n\nExample:\n[\n {\"exact\": \"the quarterly revenue target\", \"start\": 142, \"end\": 169, \"prefix\": \"We established \", \"suffix\": \" for Q4 2024.\", \"assessment\": \"This target seems ambitious given market conditions. Consider revising based on recent trends.\"}\n]`;\n } else {\n // No specific instructions - fall back to analytical/evaluation mode\n const toneGuidance = tone\n ? `\\n\\nTone: Use a ${tone} style in your assessments.`\n : '';\n const densityGuidance = density\n ? `\\n- Aim for approximately ${density} assessments per 2000 words`\n : `\\n- Aim for 2-6 assessments per 2000 words (focus on key passages)`;\n\n prompt = `Identify passages in this text that merit critical assessment or evaluation.\nFor each passage, provide analysis of its validity, strength, or implications.${toneGuidance}\n\nGuidelines:\n- Select passages containing claims, arguments, conclusions, or assertions\n- Assess evidence quality, logical soundness, or practical implications\n- Provide assessments that ADD INSIGHT beyond restating the text\n- Focus on passages where evaluation would help readers form judgments\n- Keep assessments concise yet substantive (1-3 sentences typically)${densityGuidance}\n\nText to analyze:\n---\n${content.substring(0, 8000)}\n---\n\nReturn a JSON array of assessments. Each assessment should have:\n- \"exact\": the exact text passage being assessed (quoted verbatim from source)\n- \"start\": character offset where the passage starts\n- \"end\": character offset where the passage ends\n- \"prefix\": up to 32 characters of text immediately before the passage\n- \"suffix\": up to 32 characters of text immediately after the passage\n- \"assessment\": your analytical assessment (1-3 sentences, evaluate validity/strength/implications)\n\nRespond with a valid JSON array.\n\nExample format:\n[\n {\"exact\": \"AI will replace most jobs by 2030\", \"start\": 52, \"end\": 89, \"prefix\": \"Many experts predict that \", \"suffix\": \", fundamentally reshaping\", \"assessment\": \"This claim lacks nuance and supporting evidence. Employment patterns historically show job transformation rather than wholesale replacement. The timeline appears speculative without specific sector analysis.\"}\n]`;\n }\n\n return prompt;\n }\n\n /**\n * Build a prompt for detecting structural tags\n *\n * @param content - The full text content to analyze (NOT truncated for structural analysis)\n * @param category - The specific category to detect\n * @param schemaName - Human-readable schema name\n * @param schemaDescription - Schema description\n * @param schemaDomain - Schema domain\n * @param categoryDescription - Category description\n * @param categoryExamples - Example questions/guidance for this category\n * @returns Formatted prompt string\n */\n static buildTagPrompt(\n content: string,\n category: string,\n schemaName: string,\n schemaDescription: string,\n schemaDomain: string,\n categoryDescription: string,\n categoryExamples: string[]\n ): string {\n // Build prompt with schema context and category-specific guidance\n const prompt = `You are analyzing a text using the ${schemaName} framework.\n\nSchema: ${schemaDescription}\nDomain: ${schemaDomain}\n\nYour task: Identify passages that serve the structural role of \"${category}\".\n\nCategory: ${category}\nDescription: ${categoryDescription}\nKey questions:\n${categoryExamples.map(ex => `- ${ex}`).join('\\n')}\n\nGuidelines:\n- Focus on STRUCTURAL FUNCTION, not semantic content\n- A passage serves the \"${category}\" role if it performs this function in the document's structure\n- Look for passages that explicitly fulfill this role\n- Passages can be sentences, paragraphs, or sections\n- Aim for precision - only tag passages that clearly serve this structural role\n- Typical documents have 1-5 instances of each category (some may have 0)\n\nText to analyze:\n---\n${content}\n---\n\nReturn a JSON array of tags. Each tag should have:\n- \"exact\": the exact text passage (quoted verbatim from source)\n- \"start\": character offset where the passage starts\n- \"end\": character offset where the passage ends\n- \"prefix\": up to 32 characters of text immediately before the passage\n- \"suffix\": up to 32 characters of text immediately after the passage\n\nRespond with a valid JSON array.\n\nExample format:\n[\n {\"exact\": \"What duty did the defendant owe?\", \"start\": 142, \"end\": 175, \"prefix\": \"The central question is: \", \"suffix\": \" This question must be\"},\n {\"exact\": \"In tort law, a duty of care is established when...\", \"start\": 412, \"end\": 520, \"prefix\": \"Legal framework:\\\\n\", \"suffix\": \"\\\\n\\\\nApplying this standard\"}\n]`;\n\n return prompt;\n }\n}\n","/**\n * Response parsers for annotation detection motivations\n *\n * Provides static methods to parse and validate AI responses for each motivation type.\n * Includes offset validation and correction logic.\n * Extracted from worker implementations to centralize parsing logic.\n *\n * NOTE: These are static utility methods without logger access.\n * Console statements kept for debugging - consider adding logger parameter in future.\n */\n\nimport { validateAndCorrectOffsets } from '@semiont/api-client';\n\n/**\n * Best-effort extractor that pulls a JSON array of objects out of a raw\n * LLM response. Tolerates:\n * - markdown code fences (``` / ```json)\n * - prose before/after the array\n * - stray non-JSON tokens between array elements (a common\n * hallucination: e.g. a line like `wide: 0,` inserted between two\n * well-formed objects).\n *\n * Strategy: try strict `JSON.parse` first (fast path); on failure, walk\n * between the outermost `[` and `]` and parse each balanced `{ ... }`\n * object independently, skipping any that don't parse. Returns the\n * recovered objects — callers should still filter/validate fields.\n *\n * Exported for direct unit testing of the state machine edge cases\n * (nested braces in strings, escape sequences, empty/garbage input).\n */\nexport function extractObjectsFromArray(response: string): unknown[] {\n let cleaned = response.trim();\n\n // Strip markdown code fences if present\n if (cleaned.startsWith('```')) {\n cleaned = cleaned.replace(/^```(?:json)?\\s*\\n?/, '').replace(/\\n?```\\s*$/, '');\n }\n\n // Fast path: well-formed JSON\n try {\n const parsed = JSON.parse(cleaned);\n return Array.isArray(parsed) ? parsed : [];\n } catch {\n // fall through to tolerant parse\n }\n\n // Tolerant path: extract each top-level `{ ... }` from within the\n // first `[` / last `]`, parse independently. If the response was\n // truncated mid-stream (no closing `]`), fall back to end-of-string\n // so we still recover whatever closed cleanly before the cutoff.\n const start = cleaned.indexOf('[');\n if (start === -1) return [];\n const endBracket = cleaned.lastIndexOf(']');\n const end = endBracket > start ? endBracket : cleaned.length;\n\n const inner = cleaned.slice(start + 1, end);\n const objects: unknown[] = [];\n let depth = 0;\n let objStart = -1;\n let inString = false;\n let escape = false;\n\n for (let i = 0; i < inner.length; i++) {\n const ch = inner[i];\n if (escape) { escape = false; continue; }\n if (ch === '\\\\') { escape = true; continue; }\n if (ch === '\"') { inString = !inString; continue; }\n if (inString) continue;\n if (ch === '{') {\n if (depth === 0) objStart = i;\n depth++;\n } else if (ch === '}') {\n depth--;\n if (depth === 0 && objStart !== -1) {\n try {\n objects.push(JSON.parse(inner.slice(objStart, i + 1)));\n } catch {\n // Skip malformed object\n }\n objStart = -1;\n }\n }\n }\n\n return objects;\n}\n\n/**\n * Represents a detected comment with validated position\n */\nexport interface CommentMatch {\n exact: string;\n start: number;\n end: number;\n prefix?: string;\n suffix?: string;\n comment: string;\n}\n\n/**\n * Represents a detected highlight with validated position\n */\nexport interface HighlightMatch {\n exact: string;\n start: number;\n end: number;\n prefix?: string;\n suffix?: string;\n}\n\n/**\n * Represents a detected assessment with validated position\n */\nexport interface AssessmentMatch {\n exact: string;\n start: number;\n end: number;\n prefix?: string;\n suffix?: string;\n assessment: string;\n}\n\n/**\n * Represents a detected tag with validated position\n */\nexport interface TagMatch {\n exact: string;\n start: number;\n end: number;\n prefix?: string;\n suffix?: string;\n category: string;\n}\n\nexport class MotivationParsers {\n /**\n * Parse and validate AI response for comment detection\n *\n * @param response - Raw AI response string (may include markdown code fences)\n * @param content - Original content to validate offsets against\n * @returns Array of validated comment matches\n */\n static parseComments(response: string, content: string): CommentMatch[] {\n try {\n const parsed = extractObjectsFromArray(response);\n\n // Validate and filter\n const valid = parsed.filter((c): c is CommentMatch =>\n !!c && typeof c === 'object' &&\n typeof (c as any).exact === 'string' &&\n typeof (c as any).start === 'number' &&\n typeof (c as any).end === 'number' &&\n typeof (c as any).comment === 'string' &&\n (c as any).comment.trim().length > 0\n );\n\n console.log(`[MotivationParsers] Parsed ${valid.length} valid comments from ${parsed.length} total`);\n\n // Validate and correct AI's offsets, then extract proper context\n // AI sometimes returns offsets that don't match the actual text position\n const validatedComments: CommentMatch[] = [];\n\n for (const comment of valid) {\n try {\n const validated = validateAndCorrectOffsets(content, comment.start, comment.end, comment.exact);\n validatedComments.push({\n ...comment,\n start: validated.start,\n end: validated.end,\n prefix: validated.prefix,\n suffix: validated.suffix\n });\n } catch (error) {\n console.warn(`[MotivationParsers] Skipping invalid comment \"${comment.exact}\":`, error);\n // Skip this comment - AI hallucinated text that doesn't exist\n }\n }\n\n return validatedComments;\n } catch (error) {\n console.error('[MotivationParsers] Failed to parse AI comment response:', error);\n return [];\n }\n }\n\n /**\n * Parse and validate AI response for highlight detection\n *\n * @param response - Raw AI response string (may include markdown code fences)\n * @param content - Original content to validate offsets against\n * @returns Array of validated highlight matches\n */\n static parseHighlights(response: string, content: string): HighlightMatch[] {\n try {\n const parsed = extractObjectsFromArray(response);\n\n // Validate and filter results\n const highlights = parsed.filter((h): h is HighlightMatch =>\n !!h && typeof h === 'object' &&\n typeof (h as any).exact === 'string' &&\n typeof (h as any).start === 'number' &&\n typeof (h as any).end === 'number'\n );\n\n // Validate and correct AI's offsets, then extract proper context\n // AI sometimes returns offsets that don't match the actual text position\n const validatedHighlights: HighlightMatch[] = [];\n\n for (const highlight of highlights) {\n try {\n const validated = validateAndCorrectOffsets(content, highlight.start, highlight.end, highlight.exact);\n validatedHighlights.push({\n ...highlight,\n start: validated.start,\n end: validated.end,\n prefix: validated.prefix,\n suffix: validated.suffix\n });\n } catch (error) {\n console.warn(`[MotivationParsers] Skipping invalid highlight \"${highlight.exact}\":`, error);\n // Skip this highlight - AI hallucinated text that doesn't exist\n }\n }\n\n return validatedHighlights;\n } catch (error) {\n console.error('[MotivationParsers] Failed to parse AI highlight response:', error);\n console.error('Raw response:', response);\n return [];\n }\n }\n\n /**\n * Parse and validate AI response for assessment detection\n *\n * @param response - Raw AI response string (may include markdown code fences)\n * @param content - Original content to validate offsets against\n * @returns Array of validated assessment matches\n */\n static parseAssessments(response: string, content: string): AssessmentMatch[] {\n try {\n const parsed = extractObjectsFromArray(response);\n\n // Validate and filter results\n const assessments = parsed.filter((a): a is AssessmentMatch =>\n !!a && typeof a === 'object' &&\n typeof (a as any).exact === 'string' &&\n typeof (a as any).start === 'number' &&\n typeof (a as any).end === 'number' &&\n typeof (a as any).assessment === 'string'\n );\n\n // Validate and correct AI's offsets, then extract proper context\n // AI sometimes returns offsets that don't match the actual text position\n const validatedAssessments: AssessmentMatch[] = [];\n\n for (const assessment of assessments) {\n try {\n const validated = validateAndCorrectOffsets(content, assessment.start, assessment.end, assessment.exact);\n validatedAssessments.push({\n ...assessment,\n start: validated.start,\n end: validated.end,\n prefix: validated.prefix,\n suffix: validated.suffix\n });\n } catch (error) {\n console.warn(`[MotivationParsers] Skipping invalid assessment \"${assessment.exact}\":`, error);\n // Skip this assessment - AI hallucinated text that doesn't exist\n }\n }\n\n return validatedAssessments;\n } catch (error) {\n console.error('[MotivationParsers] Failed to parse AI assessment response:', error);\n console.error('Raw response:', response);\n return [];\n }\n }\n\n /**\n * Parse and validate AI response for tag detection\n * Note: Does NOT validate offsets - caller must do that with content\n *\n * @param response - Raw AI response string (may include markdown code fences)\n * @returns Array of tag matches (offsets not yet validated)\n */\n static parseTags(response: string): Omit<TagMatch, 'category'>[] {\n try {\n const parsed = extractObjectsFromArray(response);\n\n // Validate and filter\n const valid = parsed.filter((t): t is Omit<TagMatch, 'category'> =>\n !!t && typeof t === 'object' &&\n typeof (t as any).exact === 'string' &&\n typeof (t as any).start === 'number' &&\n typeof (t as any).end === 'number' &&\n (t as any).exact.trim().length > 0\n );\n\n console.log(`[MotivationParsers] Parsed ${valid.length} valid tags from ${parsed.length} total`);\n\n return valid;\n } catch (error) {\n console.error('[MotivationParsers] Failed to parse AI tag response:', error);\n return [];\n }\n }\n\n /**\n * Validate tag offsets against content and add category\n * Helper for tag detection after initial parsing\n *\n * @param tags - Parsed tags without validated offsets\n * @param content - Original content to validate against\n * @param category - Category to assign to validated tags\n * @returns Array of validated tag matches\n */\n static validateTagOffsets(\n tags: Omit<TagMatch, 'category'>[],\n content: string,\n category: string\n ): TagMatch[] {\n const validatedTags: TagMatch[] = [];\n\n for (const tag of tags) {\n try {\n const validated = validateAndCorrectOffsets(content, tag.start, tag.end, tag.exact);\n validatedTags.push({\n ...tag,\n category,\n start: validated.start,\n end: validated.end,\n prefix: validated.prefix,\n suffix: validated.suffix\n });\n } catch (error) {\n console.warn(`[MotivationParsers] Skipping invalid tag for category \"${category}\":`, error);\n // Skip this tag - AI hallucinated text that doesn't exist\n }\n }\n\n return validatedTags;\n }\n}\n","/**\n * Tag Schema Registry\n *\n * Defines structural analysis frameworks for automatic tagging detection.\n * Each schema provides categories that passages can be classified into\n * based on their structural role (not their semantic content).\n *\n * Examples: IRAC (legal), IMRAD (scientific), Toulmin (argumentation)\n */\n\nexport interface TagCategory {\n name: string;\n description: string;\n examples: string[];\n}\n\nexport interface TagSchema {\n id: string;\n name: string;\n description: string;\n domain: 'legal' | 'scientific' | 'general';\n tags: TagCategory[];\n}\n\nexport const TAG_SCHEMAS: Record<string, TagSchema> = {\n 'legal-irac': {\n id: 'legal-irac',\n name: 'Legal Analysis (IRAC)',\n description: 'Issue, Rule, Application, Conclusion framework for legal reasoning',\n domain: 'legal',\n tags: [\n {\n name: 'Issue',\n description: 'The legal question or problem to be resolved',\n examples: [\n 'What is the central legal question?',\n 'What must the court decide?',\n 'What is the dispute about?'\n ]\n },\n {\n name: 'Rule',\n description: 'The relevant law, statute, or legal principle',\n examples: [\n 'What law applies?',\n 'What is the legal standard?',\n 'What statute governs this case?'\n ]\n },\n {\n name: 'Application',\n description: 'How the rule applies to the specific facts',\n examples: [\n 'How does the law apply to these facts?',\n 'Analysis of the case',\n 'How do the facts satisfy the legal standard?'\n ]\n },\n {\n name: 'Conclusion',\n description: 'The resolution or outcome based on the analysis',\n examples: [\n 'What is the court\\'s decision?',\n 'What is the final judgment?',\n 'What is the holding?'\n ]\n }\n ]\n },\n\n 'scientific-imrad': {\n id: 'scientific-imrad',\n name: 'Scientific Paper (IMRAD)',\n description: 'Introduction, Methods, Results, Discussion structure for research papers',\n domain: 'scientific',\n tags: [\n {\n name: 'Introduction',\n description: 'Background, context, and research question',\n examples: [\n 'What is the research question?',\n 'Why is this important?',\n 'What is the hypothesis?'\n ]\n },\n {\n name: 'Methods',\n description: 'Experimental design and procedures',\n examples: [\n 'How was the study conducted?',\n 'What methods were used?',\n 'What was the experimental design?'\n ]\n },\n {\n name: 'Results',\n description: 'Findings and observations',\n examples: [\n 'What did the study find?',\n 'What are the data?',\n 'What were the observations?'\n ]\n },\n {\n name: 'Discussion',\n description: 'Interpretation and implications of results',\n examples: [\n 'What do the results mean?',\n 'What are the implications?',\n 'How do these findings relate to prior work?'\n ]\n }\n ]\n },\n\n 'argument-toulmin': {\n id: 'argument-toulmin',\n name: 'Argument Structure (Toulmin)',\n description: 'Claim, Evidence, Warrant, Counterargument, Rebuttal framework for argumentation',\n domain: 'general',\n tags: [\n {\n name: 'Claim',\n description: 'The main assertion or thesis',\n examples: [\n 'What is being argued?',\n 'What is the main point?',\n 'What position is being taken?'\n ]\n },\n {\n name: 'Evidence',\n description: 'Data or facts supporting the claim',\n examples: [\n 'What supports this claim?',\n 'What are the facts?',\n 'What data is provided?'\n ]\n },\n {\n name: 'Warrant',\n description: 'Reasoning connecting evidence to claim',\n examples: [\n 'Why does this evidence support the claim?',\n 'What is the logic?',\n 'How does this reasoning work?'\n ]\n },\n {\n name: 'Counterargument',\n description: 'Opposing viewpoints or objections',\n examples: [\n 'What are the objections?',\n 'What do critics say?',\n 'What are alternative views?'\n ]\n },\n {\n name: 'Rebuttal',\n description: 'Response to counterarguments',\n examples: [\n 'How is the objection addressed?',\n 'Why is the counterargument wrong?',\n 'How is the criticism answered?'\n ]\n }\n ]\n }\n};\n\n/**\n * Get a tag schema by ID\n */\nexport function getTagSchema(schemaId: string): TagSchema | null {\n return TAG_SCHEMAS[schemaId] || null;\n}\n\n/**\n * Get all available tag schemas\n */\nexport function getAllTagSchemas(): TagSchema[] {\n return Object.values(TAG_SCHEMAS);\n}\n\n/**\n * Get tag schemas filtered by domain\n */\nexport function getTagSchemasByDomain(domain: 'legal' | 'scientific' | 'general'): TagSchema[] {\n return Object.values(TAG_SCHEMAS).filter(schema => schema.domain === domain);\n}\n\n/**\n * Validate that a category name is valid for a schema\n */\nexport function isValidCategory(schemaId: string, categoryName: string): boolean {\n const schema = getTagSchema(schemaId);\n if (!schema) return false;\n return schema.tags.some(tag => tag.name === categoryName);\n}\n\n/**\n * Get a specific category from a schema\n */\nexport function getSchemaCategory(schemaId: string, categoryName: string): TagCategory | null {\n const schema = getTagSchema(schemaId);\n if (!schema) return null;\n return schema.tags.find(tag => tag.name === categoryName) || null;\n}\n","/**\n * Annotation Detection\n *\n * Orchestrates the full annotation detection pipeline:\n * 1. Build AI prompts using MotivationPrompts\n * 2. Call AI inference\n * 3. Parse and validate results using MotivationParsers\n *\n * All methods take content as a string parameter.\n * Workers are responsible for fetching content via ContentFetcher.\n */\n\nimport type { InferenceClient } from '@semiont/inference';\nimport { MotivationPrompts } from './detection/motivation-prompts';\nimport {\n MotivationParsers,\n type CommentMatch,\n type HighlightMatch,\n type AssessmentMatch,\n type TagMatch,\n} from './detection/motivation-parsers';\nimport { getTagSchema, getSchemaCategory } from '@semiont/ontology';\nimport type { ResourceId } from '@semiont/core';\nimport type { ContentFetcher } from '../types';\n\nexport class AnnotationDetection {\n\n /**\n * Fetch content from a ContentFetcher and read the stream to a string.\n * Shared helper for all workers.\n */\n static async fetchContent(contentFetcher: ContentFetcher, resourceId: ResourceId): Promise<string> {\n const stream = await contentFetcher(resourceId);\n if (!stream) {\n throw new Error(`Could not load content for resource ${resourceId}`);\n }\n const chunks: Buffer[] = [];\n for await (const chunk of stream) {\n chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));\n }\n return Buffer.concat(chunks).toString('utf-8');\n }\n\n /**\n * Detect comments in content\n */\n static async detectComments(\n content: string,\n client: InferenceClient,\n instructions?: string,\n tone?: string,\n density?: number\n ): Promise<CommentMatch[]> {\n const prompt = MotivationPrompts.buildCommentPrompt(content, instructions, tone, density);\n const response = await client.generateText(prompt, 3000, 0.4);\n return MotivationParsers.parseComments(response, content);\n }\n\n /**\n * Detect highlights in content\n */\n static async detectHighlights(\n content: string,\n client: InferenceClient,\n instructions?: string,\n density?: number\n ): Promise<HighlightMatch[]> {\n const prompt = MotivationPrompts.buildHighlightPrompt(content, instructions, density);\n const response = await client.generateText(prompt, 2000, 0.3);\n return MotivationParsers.parseHighlights(response, content);\n }\n\n /**\n * Detect assessments in content\n */\n static async detectAssessments(\n content: string,\n client: InferenceClient,\n instructions?: string,\n tone?: string,\n density?: number\n ): Promise<AssessmentMatch[]> {\n const prompt = MotivationPrompts.buildAssessmentPrompt(content, instructions, tone, density);\n const response = await client.generateText(prompt, 3000, 0.3);\n return MotivationParsers.parseAssessments(response, content);\n }\n\n /**\n * Detect tags in content for a specific category\n */\n static async detectTags(\n content: string,\n client: InferenceClient,\n schemaId: string,\n category: string\n ): Promise<TagMatch[]> {\n const schema = getTagSchema(schemaId);\n if (!schema) {\n throw new Error(`Invalid tag schema: ${schemaId}`);\n }\n\n const categoryInfo = getSchemaCategory(schemaId, category);\n if (!categoryInfo) {\n throw new Error(`Invalid category \"${category}\" for schema ${schemaId}`);\n }\n\n const prompt = MotivationPrompts.buildTagPrompt(\n content,\n category,\n schema.name,\n schema.description,\n schema.domain,\n categoryInfo.description,\n categoryInfo.examples\n );\n\n const response = await client.generateText(prompt, 4000, 0.2);\n const parsedTags = MotivationParsers.parseTags(response);\n return MotivationParsers.validateTagOffsets(parsedTags, content, category);\n }\n}\n","import type { InferenceClient } from '@semiont/inference';\nimport type { Logger } from '@semiont/core';\n\n/**\n * Entity reference extracted from text\n */\nexport interface ExtractedEntity {\n exact: string; // The actual text span\n entityType: string; // The detected entity type\n startOffset: number; // Character offset where entity starts\n endOffset: number; // Character offset where entity ends\n prefix?: string; // Text immediately before entity (for disambiguation)\n suffix?: string; // Text immediately after entity (for disambiguation)\n}\n\n/**\n * Extract entity references from text using AI\n *\n * @param text - The text to analyze\n * @param entityTypes - Array of entity types to detect (optionally with examples)\n * @param client - Inference client for AI operations\n * @param includeDescriptiveReferences - Include anaphoric/cataphoric references (default: false)\n * @param logger - Optional logger for debugging entity extraction\n * @returns Array of extracted entities with their character offsets\n */\nexport async function extractEntities(\n exact: string,\n entityTypes: string[] | { type: string; examples?: string[] }[],\n client: InferenceClient,\n includeDescriptiveReferences: boolean = false,\n logger?: Logger\n): Promise<ExtractedEntity[]> {\n\n // Format entity types for the prompt\n const entityTypesDescription = entityTypes.map(et => {\n if (typeof et === 'string') {\n return et;\n }\n return et.examples && et.examples.length > 0\n ? `${et.type} (examples: ${et.examples.slice(0, 3).join(', ')})`\n : et.type;\n }).join(', ');\n\n // Build prompt with optional support for anaphoric/cataphoric references\n // Anaphora: references that point backward (e.g., \"John arrived. He was tired.\")\n // Cataphora: references that point forward (e.g., \"When she arrived, Mary was surprised.\")\n // When enabled, include substantive descriptive references beyond simple pronouns\n const descriptiveReferenceGuidance = includeDescriptiveReferences\n ? `\nInclude both:\n- Direct mentions (names, proper nouns)\n- Descriptive references (substantive phrases that refer to entities)\n\nFor descriptive references, include:\n- Definite descriptions: \"the Nobel laureate\", \"the tech giant\", \"the former president\"\n- Role-based references: \"the CEO\", \"the physicist\", \"the author\", \"the owner\", \"the contractor\"\n- Epithets with context: \"the Cupertino-based company\", \"the iPhone maker\"\n- References to entities even when identity is unknown or unspecified\n\nDo NOT include:\n- Simple pronouns alone: he, she, it, they, him, her, them\n- Generic determiners alone: this, that, these, those\n- Possessives without substance: his, her, their, its\n\nExamples:\n- For \"Marie Curie\", include \"the Nobel laureate\" and \"the physicist\" but NOT \"she\"\n- For an unknown person, include \"the owner\" or \"the contractor\" (role-based references count even when identity is unspecified)\n`\n : `\nFind direct mentions only (names, proper nouns). Do not include pronouns or descriptive references.\n`;\n\n const prompt = `Identify entity references in the following text. Look for mentions of: ${entityTypesDescription}.\n${descriptiveReferenceGuidance}\nText to analyze:\n\"\"\"\n${exact}\n\"\"\"\n\nRespond with a JSON array of entities found. Each entity should have:\n- exact: the exact text span from the input\n- entityType: one of the provided entity types\n- startOffset: character position where the entity starts (0-indexed)\n- endOffset: character position where the entity ends\n- prefix: up to 32 characters of text immediately before the entity (helps identify correct occurrence)\n- suffix: up to 32 characters of text immediately after the entity (helps identify correct occurrence)\n\nIf no entities are found, respond with an empty array [].\n\nExample output:\n[{\"exact\":\"Alice\",\"entityType\":\"Person\",\"startOffset\":0,\"endOffset\":5,\"prefix\":\"\",\"suffix\":\" went to\"},{\"exact\":\"Paris\",\"entityType\":\"Location\",\"startOffset\":20,\"endOffset\":25,\"prefix\":\"went to \",\"suffix\":\" yesterday\"}]`;\n\n logger?.debug('Sending entity extraction request', { entityTypes: entityTypesDescription });\n const response = await client.generateTextWithMetadata(\n prompt,\n 4000, // Increased to handle many entities without truncation\n 0.3 // Lower temperature for more consistent extraction\n );\n logger?.debug('Got entity extraction response', { responseLength: response.text.length });\n\n try {\n // Clean up response if wrapped in markdown\n let jsonStr = response.text.trim();\n if (jsonStr.startsWith('```')) {\n jsonStr = jsonStr.replace(/^```(?:json)?\\n?/, '').replace(/\\n?```$/, '');\n }\n\n const entities = JSON.parse(jsonStr);\n logger?.debug('Parsed entities from AI response', { count: entities.length });\n\n // Check if response was truncated - this is an ERROR condition\n if (response.stopReason === 'max_tokens') {\n const errorMsg = `AI response truncated: Found ${entities.length} entities but response hit max_tokens limit. Increase max_tokens or reduce resource size.`;\n logger?.error(errorMsg);\n throw new Error(errorMsg);\n }\n\n // Validate and fix offsets\n return entities.map((entity: any, idx: number) => {\n let startOffset = entity.startOffset;\n let endOffset = entity.endOffset;\n\n logger?.debug('Processing entity', {\n index: idx + 1,\n total: entities.length,\n type: entity.entityType,\n text: entity.exact,\n offsetsFromAI: `[${startOffset}:${endOffset}]`\n });\n\n // Verify the LLM-provided offsets point at the text the LLM says they do.\n //\n // Re-anchoring classification:\n // 'llm-exact' — LLM offsets match `exact` on the first try (happy path)\n // 'context-recovered' — mismatch recovered via prefix/suffix disambiguation\n // 'unique-match' — mismatch recovered by first-occurrence, but `exact`\n // appears exactly once so there's no ambiguity\n // 'first-of-many' — mismatch fell back to first-occurrence while `exact`\n // appears multiple times — the annotation *may* be\n // anchored at the wrong occurrence\n // 'dropped' — `exact` doesn't appear anywhere; entity skipped\n //\n // Log severity is tuned so that normal operation is silent at info/warn\n // level. Only 'first-of-many' warns (it's genuinely risky) and 'dropped'\n // errors (the LLM emitted something that isn't in the text).\n const extractedText = exact.substring(startOffset, endOffset);\n let anchorMethod: 'llm-exact' | 'context-recovered' | 'unique-match' | 'first-of-many' | 'dropped';\n\n if (extractedText === entity.exact) {\n anchorMethod = 'llm-exact';\n logger?.debug('Entity anchored', {\n text: entity.exact,\n entityType: entity.entityType,\n anchorMethod,\n });\n } else {\n // LLM offsets are wrong — the text at [start, end] isn't what they said.\n // Try to recover via prefix/suffix context, then by unique/first occurrence.\n logger?.debug('LLM offsets mismatch — attempting re-anchor', {\n expected: entity.exact,\n llmOffsets: `[${startOffset}:${endOffset}]`,\n foundAtLlmOffsets: extractedText,\n });\n\n // Count total occurrences up front — needed for the 'unique-match' vs\n // 'first-of-many' distinction below.\n let occurrenceCount = 0;\n let firstOccurrence = -1;\n let searchPos = 0;\n while ((searchPos = exact.indexOf(entity.exact, searchPos)) !== -1) {\n if (firstOccurrence === -1) firstOccurrence = searchPos;\n occurrenceCount++;\n searchPos++;\n }\n\n if (occurrenceCount === 0) {\n anchorMethod = 'dropped';\n logger?.error('Entity text not found in resource — dropping', {\n text: entity.exact,\n entityType: entity.entityType,\n llmOffsets: `[${startOffset}:${endOffset}]`,\n anchorMethod,\n resourceStart: exact.substring(0, 200),\n });\n return null;\n }\n\n // Try prefix/suffix-guided re-anchoring if context was provided.\n let recoveredOffset = -1;\n if (entity.prefix || entity.suffix) {\n let p = 0;\n while ((p = exact.indexOf(entity.exact, p)) !== -1) {\n const candidatePrefix = exact.substring(Math.max(0, p - 32), p);\n const candidateSuffix = exact.substring(\n p + entity.exact.length,\n Math.min(exact.length, p + entity.exact.length + 32),\n );\n const prefixMatch = !entity.prefix || candidatePrefix.endsWith(entity.prefix);\n const suffixMatch = !entity.suffix || candidateSuffix.startsWith(entity.suffix);\n if (prefixMatch && suffixMatch) {\n recoveredOffset = p;\n break;\n }\n p++;\n }\n }\n\n if (recoveredOffset !== -1) {\n anchorMethod = 'context-recovered';\n startOffset = recoveredOffset;\n endOffset = recoveredOffset + entity.exact.length;\n logger?.debug('Entity anchored', {\n text: entity.exact,\n entityType: entity.entityType,\n anchorMethod,\n offsetDiff: recoveredOffset - entity.startOffset,\n });\n } else if (occurrenceCount === 1) {\n anchorMethod = 'unique-match';\n startOffset = firstOccurrence;\n endOffset = firstOccurrence + entity.exact.length;\n logger?.debug('Entity anchored', {\n text: entity.exact,\n entityType: entity.entityType,\n anchorMethod,\n offsetDiff: firstOccurrence - entity.startOffset,\n });\n } else {\n // Multiple candidates, no context to disambiguate — risky fallback.\n // We still emit the annotation but flag it so operators can review.\n anchorMethod = 'first-of-many';\n startOffset = firstOccurrence;\n endOffset = firstOccurrence + entity.exact.length;\n logger?.warn('Entity anchored at first of multiple occurrences — may be wrong', {\n text: entity.exact,\n entityType: entity.entityType,\n anchorMethod,\n occurrenceCount,\n chosenOffset: firstOccurrence,\n llmOffsets: `[${entity.startOffset}:${entity.endOffset}]`,\n hasPrefix: !!entity.prefix,\n hasSuffix: !!entity.suffix,\n });\n }\n }\n\n return {\n exact: entity.exact,\n entityType: entity.entityType,\n startOffset: startOffset,\n endOffset: endOffset,\n prefix: entity.prefix,\n suffix: entity.suffix\n };\n }).filter((entity: ExtractedEntity | null): entity is ExtractedEntity => {\n // Filter out nulls and ensure we have valid offsets\n if (entity === null) {\n logger?.debug('Filtered entity: null');\n return false;\n }\n if (entity.startOffset === undefined || entity.endOffset === undefined) {\n logger?.warn('Filtered entity: missing offsets', { text: entity.exact });\n return false;\n }\n if (entity.startOffset < 0) {\n logger?.warn('Filtered entity: negative startOffset', {\n text: entity.exact,\n startOffset: entity.startOffset\n });\n return false;\n }\n if (entity.endOffset > exact.length) {\n logger?.warn('Filtered entity: endOffset exceeds text length', {\n text: entity.exact,\n endOffset: entity.endOffset,\n textLength: exact.length\n });\n return false;\n }\n\n // Verify the text at the offsets matches\n const extractedText = exact.substring(entity.startOffset, entity.endOffset);\n if (extractedText !== entity.exact) {\n logger?.warn('Filtered entity: offset mismatch', {\n expected: entity.exact,\n got: extractedText,\n offsets: `[${entity.startOffset}:${entity.endOffset}]`\n });\n return false;\n }\n\n logger?.debug('Accepted entity', {\n text: entity.exact,\n offsets: `[${entity.startOffset}:${entity.endOffset}]`\n });\n return true;\n });\n } catch (error) {\n logger?.error('Failed to parse entity extraction response', {\n error: error instanceof Error ? error.message : String(error)\n });\n return [];\n }\n}","/**\n * Resource Generation\n *\n * Generates markdown resources from topics using AI inference.\n */\n\nimport { getLocaleEnglishName } from '@semiont/api-client';\nimport type { GatheredContext, Logger } from '@semiont/core';\nimport type { InferenceClient } from '@semiont/inference';\n\n\nfunction getLanguageName(locale: string): string {\n return getLocaleEnglishName(locale) || locale;\n}\n\n/**\n * Generate resource content using inference\n */\nexport async function generateResourceFromTopic(\n topic: string,\n entityTypes: string[],\n client: InferenceClient,\n userPrompt?: string,\n locale?: string,\n context?: GatheredContext,\n temperature?: number,\n maxTokens?: number,\n logger?: Logger\n): Promise<{ title: string; content: string }> {\n logger?.debug('Generating resource from topic', {\n topicPreview: topic.substring(0, 100),\n entityTypes,\n hasUserPrompt: !!userPrompt,\n locale,\n hasContext: !!context,\n temperature,\n maxTokens\n });\n\n // Use provided values or defaults.\n // 500 tokens is the canonical backend default for maxTokens; the UI also initialises\n // its field to 500 as a UX convenience, but the authoritative fallback lives here so\n // that direct API callers get a sensible limit even when they omit the parameter.\n const finalTemperature = temperature ?? 0.7;\n const finalMaxTokens = maxTokens ?? 500;\n\n // Determine language instruction\n const languageInstruction = locale && locale !== 'en'\n ? `\\n\\nIMPORTANT: Write the entire resource in ${getLanguageName(locale)}.`\n : '';\n\n // Build annotation context section if available\n let annotationSection = '';\n if (context) {\n const parts: string[] = [];\n parts.push(`- Annotation motivation: ${context.annotation.motivation}`);\n parts.push(`- Source resource: ${context.sourceResource.name}`);\n // Include body text for commenting/assessing annotations\n const { motivation, body } = context.annotation;\n if (motivation === 'commenting' || motivation === 'assessing') {\n const bodyItem = Array.isArray(body) ? body[0] : body;\n if (bodyItem && 'value' in bodyItem && bodyItem.value) {\n const label = motivation === 'commenting' ? 'Comment' : 'Assessment';\n parts.push(`- ${label}: ${bodyItem.value}`);\n }\n }\n annotationSection = `\\n\\nAnnotation context:\\n${parts.join('\\n')}`;\n }\n\n // Build context section if available\n let contextSection = '';\n if (context?.sourceContext) {\n const { before, selected, after } = context.sourceContext;\n contextSection = `\\n\\nSource document context:\n---\n${before ? `...${before}` : ''}\n**[${selected}]**\n${after ? `${after}...` : ''}\n---\n`;\n }\n\n // Build graph context section if available\n let graphContextSection = '';\n if (context?.graphContext) {\n const gc = context.graphContext;\n const connections = gc.connections ?? [];\n const citedBy = gc.citedBy ?? [];\n const parts: string[] = [];\n\n if (connections.length > 0) {\n const connList = connections\n .map(c => `${c.resourceName}${c.entityTypes?.length ? ` (${c.entityTypes.join(', ')})` : ''}`)\n .join(', ');\n parts.push(`- Connected resources: ${connList}`);\n }\n\n if (gc.citedByCount && gc.citedByCount > 0) {\n const citedNames = citedBy.map(c => c.resourceName).join(', ');\n parts.push(`- This resource is cited by ${gc.citedByCount} other resource${gc.citedByCount > 1 ? 's' : ''}${citedNames ? `: ${citedNames}` : ''}`);\n }\n\n if (gc.siblingEntityTypes && gc.siblingEntityTypes.length > 0) {\n parts.push(`- Related entity types in this document: ${gc.siblingEntityTypes.join(', ')}`);\n }\n\n if (gc.inferredRelationshipSummary) {\n parts.push(`- Relationship summary: ${gc.inferredRelationshipSummary}`);\n }\n\n if (parts.length > 0) {\n graphContextSection = `\\n\\nKnowledge graph context:\\n${parts.join('\\n')}`;\n }\n }\n\n const structureGuidance = finalMaxTokens >= 1000\n ? 'organized into titled sections (## Section) with well-structured paragraphs'\n : 'organized into well-structured paragraphs';\n\n // Simple, direct prompt - just ask for markdown content\n const prompt = `Generate a concise, informative resource about \"${topic}\".\n${entityTypes.length > 0 ? `Focus on these entity types: ${entityTypes.join(', ')}.` : ''}\n${userPrompt ? `Additional context: ${userPrompt}` : ''}${annotationSection}${contextSection}${graphContextSection}${languageInstruction}\n\nRequirements:\n- Start with a clear heading (# Title)\n- Aim for approximately ${finalMaxTokens} tokens of content, ${structureGuidance}\n- Be factual and informative\n- Use markdown formatting\n- Write the response as markdown`;\n\n // Simple parser - just use the response directly as markdown\n const parseResponse = (response: string): { title: string; content: string } => {\n // Clean up any markdown code fences if present\n let content = response.trim();\n if (content.startsWith('```markdown') || content.startsWith('```md')) {\n content = content.slice(content.indexOf('\\n') + 1);\n const endIndex = content.lastIndexOf('```');\n if (endIndex !== -1) {\n content = content.slice(0, endIndex);\n }\n } else if (content.startsWith('```')) {\n content = content.slice(3);\n const endIndex = content.lastIndexOf('```');\n if (endIndex !== -1) {\n content = content.slice(0, endIndex);\n }\n }\n\n content = content.trim();\n\n // Title is provided by the caller (topic), not extracted from generated content\n return {\n title: topic,\n content: content\n };\n };\n\n logger?.debug('Sending prompt to inference', {\n promptLength: prompt.length,\n temperature: finalTemperature,\n maxTokens: finalMaxTokens\n });\n const response = await client.generateText(prompt, finalMaxTokens, finalTemperature);\n logger?.debug('Got response from inference', { responseLength: response.length });\n\n const result = parseResponse(response);\n logger?.debug('Parsed response', {\n hasTitle: !!result.title,\n titleLength: result.title?.length,\n hasContent: !!result.content,\n contentLength: result.content?.length\n });\n\n return result;\n}\n","/**\n * Job Processors — extracted from JobWorker subclasses\n *\n * Pure functions that take content + inference client + params,\n * report progress via callback, and return annotations + results.\n *\n * No EventBus, no JobQueue, no side effects except calling inference.\n * Two callers:\n * 1. In-process JobWorker subclasses (existing path)\n * 2. Remote WorkerVM via worker-process.ts (new path)\n */\n\nimport { AnnotationDetection } from './workers/annotation-detection';\nimport { extractEntities } from './workers/detection/entity-extractor';\nimport { generateResourceFromTopic } from './workers/generation/resource-generation';\nimport { generateAnnotationId } from '@semiont/event-sourcing';\nimport { didToAgent, type ResourceId, type components } from '@semiont/core';\nimport { validateAndCorrectOffsets } from '@semiont/api-client';\nimport type { InferenceClient } from '@semiont/inference';\nimport type {\n HighlightDetectionParams,\n CommentDetectionParams,\n AssessmentDetectionParams,\n DetectionParams,\n TagDetectionParams,\n GenerationParams,\n HighlightDetectionResult,\n CommentDetectionResult,\n AssessmentDetectionResult,\n DetectionResult,\n TagDetectionResult,\n GenerationResult,\n} from './types';\n\ntype Agent = components['schemas']['Agent'];\n\n/**\n * Progress callback. The three positional args satisfy the minimum\n * `JobProgress` required fields (`percentage`, `message`, `stage`).\n * The fourth optional arg carries job-type-specific fields\n * (`currentEntityType`, `completedEntityTypes`, `requestParams`, etc.)\n * that the progress UI renders.\n */\nexport type OnProgress = (\n percentage: number,\n message: string,\n stage: string,\n extra?: Partial<JobProgress>,\n) => void;\n\ntype JobProgress = components['schemas']['JobProgress'];\n\nexport interface ProcessorResult<R> {\n annotations: Record<string, unknown>[];\n result: R;\n}\n\nfunction buildTextAnnotation(\n resourceId: ResourceId,\n userId: string,\n generator: Agent,\n motivation: string,\n match: { exact: string; start: number; end: number; prefix?: string; suffix?: string },\n // Body may be a single AnnotationBody object or a non-empty array of\n // them, OR omitted entirely. W3C treats body as optional; annotations\n // whose motivation alone conveys meaning (highlighting) legitimately\n // skip it. Every other motivation currently passes something; the\n // processor that calls this makes the choice per-motivation.\n body?: Record<string, unknown> | Record<string, unknown>[],\n) {\n return {\n '@context': 'http://www.w3.org/ns/anno.jsonld' as const,\n 'type': 'Annotation' as const,\n 'id': generateAnnotationId(),\n motivation,\n creator: didToAgent(userId),\n generator,\n created: new Date().toISOString(),\n target: {\n type: 'SpecificResource' as const,\n source: resourceId as string,\n selector: [\n { type: 'TextPositionSelector' as const, start: match.start, end: match.end },\n {\n type: 'TextQuoteSelector' as const,\n exact: match.exact,\n ...(match.prefix && { prefix: match.prefix }),\n ...(match.suffix && { suffix: match.suffix }),\n },\n ],\n },\n ...(body !== undefined ? { body } : {}),\n };\n}\n\nexport async function processHighlightJob(\n content: string,\n inferenceClient: InferenceClient,\n params: HighlightDetectionParams,\n userId: string,\n generator: Agent,\n onProgress: OnProgress,\n): Promise<ProcessorResult<HighlightDetectionResult>> {\n onProgress(10, 'Loading resource...', 'analyzing');\n onProgress(30, 'Analyzing text...', 'analyzing');\n\n const highlights = await AnnotationDetection.detectHighlights(\n content, inferenceClient, params.instructions, params.density,\n );\n\n onProgress(60, `Creating ${highlights.length} annotations...`, 'creating');\n\n // Highlights carry no body — motivation:'highlighting' on a target\n // is a complete annotation per the W3C Web Annotation Model.\n const annotations = highlights.map((h) =>\n buildTextAnnotation(params.resourceId, userId, generator, 'highlighting', h),\n );\n\n onProgress(100, `Complete! Created ${annotations.length} highlights`, 'creating');\n\n return {\n annotations,\n result: { highlightsFound: highlights.length, highlightsCreated: annotations.length },\n };\n}\n\nexport async function processCommentJob(\n content: string,\n inferenceClient: InferenceClient,\n params: CommentDetectionParams,\n userId: string,\n generator: Agent,\n onProgress: OnProgress,\n): Promise<ProcessorResult<CommentDetectionResult>> {\n onProgress(10, 'Loading resource...', 'analyzing');\n onProgress(30, 'Analyzing text...', 'analyzing');\n\n const comments = await AnnotationDetection.detectComments(\n content, inferenceClient, params.instructions, params.tone, params.density,\n );\n\n onProgress(60, `Creating ${comments.length} annotations...`, 'creating');\n\n const annotations = comments.map((c) =>\n // Match the pre-#651 CommentAnnotationWorker: include format and\n // language on the body TextualBody. Optional in the schema, but\n // consumers that do language-aware rendering rely on them.\n buildTextAnnotation(params.resourceId, userId, generator, 'commenting', c, [\n { type: 'TextualBody', value: c.comment, purpose: 'commenting', format: 'text/plain', language: 'en' },\n ]),\n );\n\n onProgress(100, `Complete! Created ${annotations.length} comments`, 'creating');\n\n return {\n annotations,\n result: { commentsFound: comments.length, commentsCreated: annotations.length },\n };\n}\n\nexport async function processAssessmentJob(\n content: string,\n inferenceClient: InferenceClient,\n params: AssessmentDetectionParams,\n userId: string,\n generator: Agent,\n onProgress: OnProgress,\n): Promise<ProcessorResult<AssessmentDetectionResult>> {\n onProgress(10, 'Loading resource...', 'analyzing');\n onProgress(30, 'Analyzing text...', 'analyzing');\n\n const assessments = await AnnotationDetection.detectAssessments(\n content, inferenceClient, params.instructions, params.tone, params.density,\n );\n\n onProgress(60, `Creating ${assessments.length} annotations...`, 'creating');\n\n const annotations = assessments.map((a) =>\n // Single-object body with purpose aligned to motivation, matching the\n // pre-#651 AssessmentAnnotationWorker's shape and the majority of\n // persisted assessments. Do not switch to an array or to\n // purpose='describing' — that loses the \"this is an assessment, not\n // a description\" signal and breaks existing readers that access\n // `body.value` directly on the object.\n buildTextAnnotation(params.resourceId, userId, generator, 'assessing', a, {\n type: 'TextualBody', value: a.assessment, purpose: 'assessing', format: 'text/plain', language: 'en',\n }),\n );\n\n onProgress(100, `Complete! Created ${annotations.length} assessments`, 'creating');\n\n return {\n annotations,\n result: { assessmentsFound: assessments.length, assessmentsCreated: annotations.length },\n };\n}\n\nexport async function processReferenceJob(\n content: string,\n inferenceClient: InferenceClient,\n params: DetectionParams,\n userId: string,\n generator: Agent,\n onProgress: OnProgress,\n logger?: import('@semiont/core').Logger,\n): Promise<ProcessorResult<DetectionResult>> {\n const entityTypeNames = params.entityTypes.map(String);\n const requestParams = [{ label: 'Entity types', value: entityTypeNames.join(', ') }];\n const completedEntityTypes: Array<{ entityType: string; foundCount: number }> = [];\n let totalFound = 0;\n let totalEmitted = 0;\n let errors = 0;\n const allAnnotations: Record<string, unknown>[] = [];\n\n onProgress(10, 'Loading resource...', 'analyzing', { requestParams });\n\n for (let i = 0; i < entityTypeNames.length; i++) {\n const entityTypeName = entityTypeNames[i];\n if (!entityTypeName) continue;\n const pct = 20 + Math.round((i / entityTypeNames.length) * 60);\n onProgress(pct, `Detecting ${entityTypeName} entities...`, 'analyzing', {\n currentEntityType: entityTypeName,\n processedEntityTypes: i,\n totalEntityTypes: entityTypeNames.length,\n entitiesFound: totalFound,\n entitiesEmitted: totalEmitted,\n completedEntityTypes: [...completedEntityTypes],\n requestParams,\n });\n\n const extractedEntities = await extractEntities(\n content, [entityTypeName], inferenceClient, params.includeDescriptiveReferences ?? false, logger,\n );\n\n totalFound += extractedEntities.length;\n completedEntityTypes.push({ entityType: entityTypeName, foundCount: extractedEntities.length });\n\n // Unresolved reference body: the entity type as a tagging TextualBody.\n // The bind flow later appends a SpecificResource (purpose: 'linking')\n // via mark:body-updated to produce the resolved shape. Emitting an\n // empty body would break the append contract.\n const unresolvedBody = [{ type: 'TextualBody', value: entityTypeName, purpose: 'tagging' }];\n\n for (const entity of extractedEntities) {\n try {\n const validated = validateAndCorrectOffsets(content, entity.startOffset, entity.endOffset, entity.exact);\n const ann = buildTextAnnotation(\n params.resourceId, userId, generator, 'linking', validated, unresolvedBody,\n );\n allAnnotations.push(ann);\n totalEmitted++;\n } catch {\n errors++;\n }\n }\n }\n\n onProgress(100, `Complete! Created ${totalEmitted} references`, 'creating');\n\n return {\n annotations: allAnnotations,\n result: { totalFound, totalEmitted, errors },\n };\n}\n\nexport async function processTagJob(\n content: string,\n inferenceClient: InferenceClient,\n params: TagDetectionParams,\n userId: string,\n generator: Agent,\n onProgress: OnProgress,\n): Promise<ProcessorResult<TagDetectionResult>> {\n onProgress(10, 'Loading resource...', 'analyzing');\n onProgress(30, 'Analyzing text for tags...', 'analyzing');\n\n const allTags = [];\n for (const category of params.categories) {\n const categoryTags = await AnnotationDetection.detectTags(\n content, inferenceClient, params.schemaId, category,\n );\n allTags.push(...categoryTags);\n }\n const tags = allTags;\n\n onProgress(60, `Creating ${tags.length} tag annotations...`, 'creating');\n\n const byCategory: Record<string, number> = {};\n const annotations = tags.map((t) => {\n const category = t.category ?? 'unknown';\n byCategory[category] = (byCategory[category] ?? 0) + 1;\n // Two-body shape matches the pre-#651 TagAnnotationWorker and every\n // persisted tag annotation: the category as a tagging TextualBody,\n // plus the tagging-schema id as a classifying TextualBody. The\n // classifying body is the only trace of schema provenance in the\n // event log — do not drop it.\n return buildTextAnnotation(params.resourceId, userId, generator, 'tagging', t, [\n { type: 'TextualBody', value: category, purpose: 'tagging', format: 'text/plain', language: 'en' },\n { type: 'TextualBody', value: params.schemaId, purpose: 'classifying', format: 'text/plain' },\n ]);\n });\n\n onProgress(100, `Complete! Created ${annotations.length} tags`, 'creating');\n\n return {\n annotations,\n result: { tagsFound: tags.length, tagsCreated: annotations.length, byCategory },\n };\n}\n\nexport async function processGenerationJob(\n inferenceClient: InferenceClient,\n params: GenerationParams,\n onProgress: OnProgress,\n): Promise<{ content: string; title: string; format: string; result: GenerationResult }> {\n onProgress(20, 'Fetching context...', 'fetching');\n\n const title = params.title ?? 'Untitled';\n const entityTypes = (params.entityTypes ?? []).map(String);\n\n onProgress(40, 'Generating resource...', 'generating');\n\n const generated = await generateResourceFromTopic(\n title,\n entityTypes,\n inferenceClient,\n params.prompt,\n params.language,\n params.context,\n params.temperature,\n params.maxTokens,\n );\n\n onProgress(85, 'Creating resource...', 'creating');\n\n return {\n content: generated.content,\n title: generated.title ?? title,\n format: 'text/markdown',\n result: {\n resourceId: '' as ResourceId,\n resourceName: generated.title ?? title,\n },\n };\n}\n","/**\n * Worker Process Entry Point\n *\n * Standalone Node process that hangs a job-claim loop off a\n * `SemiontSession`'s actor. Receives job assignments, processes them\n * with an inference provider, and emits domain events through\n * `session.client.actor.emit`. All HTTP and SSE goes through the\n * api-client — no raw `fetch`, no hand-rolled multipart, no\n * duplicate actor.\n *\n * Usage:\n * node worker-process.js\n *\n * `createJobClaimAdapter` handles the reactive contract (SSE\n * subscription, claim, completion tracking). This file wires the\n * job processors to the adapter and drives lifecycle emissions.\n */\n\nimport {\n createJobClaimAdapter,\n type JobClaimAdapter,\n type ActiveJob,\n type SemiontSession,\n} from '@semiont/api-client';\nimport { RESOURCE_BROADCAST_TYPES, type EventMap } from '@semiont/core';\nimport type { InferenceClient } from '@semiont/inference';\nimport type { Logger, components } from '@semiont/core';\nimport { deriveStorageUri } from '@semiont/content';\nimport {\n processHighlightJob,\n processCommentJob,\n processAssessmentJob,\n processReferenceJob,\n processTagJob,\n processGenerationJob,\n type OnProgress,\n} from './processors';\n\ntype Agent = components['schemas']['Agent'];\n\nexport interface WorkerEngine {\n inferenceClient: InferenceClient;\n generator: Agent;\n}\n\nexport interface WorkerProcessConfig {\n session: SemiontSession;\n jobTypes: string[];\n /**\n * Per-job-type inference client + generator metadata. Keyed by the\n * job type the worker has subscribed to (`jobTypes`). Each entry lets\n * that job type run on its own model, as configured in\n * `[workers.<job-type>.inference]`.\n */\n engines: Record<string, WorkerEngine>;\n logger: Logger;\n}\n\n/**\n * Route `actor.emit` calls — choosing resource-scoped vs global based\n * on whether the event is a cross-subscriber broadcast. Extracted\n * from the deleted `WorkerVM.emitEvent`; kept here as a module-level\n * helper because `handleJob` uses it a dozen times.\n */\nasync function emitEvent(\n session: SemiontSession,\n channel: keyof EventMap,\n payload: Record<string, unknown>,\n): Promise<void> {\n const isBroadcast = (RESOURCE_BROADCAST_TYPES as readonly string[]).includes(channel as string);\n const resourceScope = isBroadcast ? (payload.resourceId as string | undefined) : undefined;\n await session.client.actor.emit(channel as string, payload, resourceScope);\n}\n\nexport function startWorkerProcess(config: WorkerProcessConfig): JobClaimAdapter {\n const { session, logger } = config;\n const adapter = createJobClaimAdapter({\n actor: session.client.actor,\n jobTypes: config.jobTypes,\n });\n\n adapter.activeJob$.subscribe((job) => {\n if (!job) return;\n logger.info('Processing job', { jobId: job.jobId, type: job.type, resourceId: job.resourceId });\n handleJob(adapter, config, job).catch((error) => {\n const message = error instanceof Error ? error.message : String(error);\n logger.error('Job failed', { jobId: job.jobId, error: message, stack: error instanceof Error ? error.stack : undefined });\n const failAnnotationId = (job.params as { referenceId?: string }).referenceId;\n emitEvent(session, 'job:fail', {\n resourceId: job.resourceId,\n userId: job.userId,\n jobId: job.jobId,\n jobType: job.type,\n ...(failAnnotationId ? { annotationId: failAnnotationId } : {}),\n error: message,\n }).catch(() => {});\n adapter.failJob(job.jobId, message);\n });\n });\n\n adapter.start();\n return adapter;\n}\n\n// Exported for unit testing — the orchestration (claim→fetch→process→emit→complete)\n// is the only thing not otherwise exercised by processors.test.ts.\n// Do not call from outside the worker process.\nexport async function handleJob(\n adapter: JobClaimAdapter,\n config: WorkerProcessConfig,\n job: ActiveJob,\n): Promise<void> {\n const { session } = config;\n const { resourceId, userId, jobId, type: jobType } = job;\n\n // Annotation-scoped jobs (today: generation, triggered from a\n // reference) carry the source annotation through every lifecycle\n // payload so the UI can attach visual feedback to that annotation.\n // Resource-scoped jobs (bulk reference/tag/highlight/comment/\n // assessment detection scanning a whole resource) leave it unset.\n const annotationId = (job.params as { referenceId?: string }).referenceId;\n const lifecycleBase = {\n resourceId, userId, jobId, jobType,\n ...(annotationId ? { annotationId } : {}),\n };\n\n // ── Job lifecycle signaling ───────────────────────────────────────────\n // `job:start` / `job:report-progress` / `job:complete` / `job:fail`\n // are the ONE unified lifecycle family. Start/complete/fail are\n // persisted by Stower; progress is ephemeral UI feedback and Stower\n // ignores it. UI consumers filter by `jobType` and/or `annotationId`\n // in the payload.\n\n await emitEvent(session, 'job:start', lifecycleBase);\n\n const engine = config.engines[jobType];\n if (!engine) {\n adapter.failJob(jobId, `No inference engine configured for job type: ${jobType}`);\n return;\n }\n const { inferenceClient, generator } = engine;\n\n const onProgress: OnProgress = (percentage, message, stage, extra) => {\n emitEvent(session, 'job:report-progress', {\n ...lifecycleBase,\n percentage,\n progress: {\n stage, percentage, message,\n ...(annotationId ? { annotationId } : {}),\n ...(extra ?? {}),\n },\n }).catch(() => {});\n };\n\n const fetchContent = async (): Promise<string> => {\n return await session.client.browse.resourceContent(resourceId as never);\n };\n\n if (jobType === 'highlight-annotation') {\n const content = await fetchContent();\n const { annotations, result } = await processHighlightJob(\n content, inferenceClient, job.params as never, userId, generator, onProgress,\n );\n for (const ann of annotations) {\n await emitEvent(session, 'mark:create', { annotation: ann, userId, resourceId });\n }\n await emitEvent(session, 'job:complete', {\n ...lifecycleBase,\n result: result as never,\n });\n adapter.completeJob();\n\n } else if (jobType === 'comment-annotation') {\n const content = await fetchContent();\n const { annotations, result } = await processCommentJob(\n content, inferenceClient, job.params as never, userId, generator, onProgress,\n );\n for (const ann of annotations) {\n await emitEvent(session, 'mark:create', { annotation: ann, userId, resourceId });\n }\n await emitEvent(session, 'job:complete', {\n ...lifecycleBase,\n result: result as never,\n });\n adapter.completeJob();\n\n } else if (jobType === 'assessment-annotation') {\n const content = await fetchContent();\n const { annotations, result } = await processAssessmentJob(\n content, inferenceClient, job.params as never, userId, generator, onProgress,\n );\n for (const ann of annotations) {\n await emitEvent(session, 'mark:create', { annotation: ann, userId, resourceId });\n }\n await emitEvent(session, 'job:complete', {\n ...lifecycleBase,\n result: result as never,\n });\n adapter.completeJob();\n\n } else if (jobType === 'reference-annotation') {\n const content = await fetchContent();\n const { annotations, result } = await processReferenceJob(\n content, inferenceClient, job.params as never, userId, generator, onProgress,\n );\n for (const ann of annotations) {\n await emitEvent(session, 'mark:create', { annotation: ann, userId, resourceId });\n }\n await emitEvent(session, 'job:complete', {\n ...lifecycleBase,\n result: result as never,\n });\n adapter.completeJob();\n\n } else if (jobType === 'tag-annotation') {\n const content = await fetchContent();\n const { annotations, result } = await processTagJob(\n content, inferenceClient, job.params as never, userId, generator, onProgress,\n );\n for (const ann of annotations) {\n await emitEvent(session, 'mark:create', { annotation: ann, userId, resourceId });\n }\n await emitEvent(session, 'job:complete', {\n ...lifecycleBase,\n result: result as never,\n });\n adapter.completeJob();\n\n } else if (jobType === 'generation') {\n const genResult = await processGenerationJob(\n inferenceClient, job.params as never, onProgress,\n );\n\n // Content never travels on the bus. Upload via the api-client's\n // `client.yield.resource()` — same serializer the /know/compose\n // page uses, so the multipart wire shape has ONE definition.\n // The backend writes content to disk and emits `yield:create`\n // internally; we only learn the new resourceId from the response.\n const genParams = job.params as {\n referenceId?: string;\n prompt?: string;\n language?: string;\n };\n const storageUri = deriveStorageUri(genResult.title, genResult.format);\n\n const { resourceId: newResourceId } = await session.client.yield.resource({\n name: genResult.title,\n file: Buffer.from(genResult.content),\n format: genResult.format,\n storageUri,\n creationMethod: 'generated',\n sourceResourceId: resourceId as unknown as string,\n ...(genParams.referenceId ? { sourceAnnotationId: genParams.referenceId } : {}),\n ...(genParams.prompt ? { generationPrompt: genParams.prompt } : {}),\n ...(genParams.language ? { language: genParams.language } : {}),\n generator,\n });\n\n await emitEvent(session, 'job:complete', {\n ...lifecycleBase,\n result: { resourceId: newResourceId, resourceName: genResult.title } as never,\n });\n adapter.completeJob();\n\n } else {\n adapter.failJob(jobId, `Unknown job type: ${jobType}`);\n }\n}\n","import winston from 'winston';\nimport type { Logger } from '@semiont/core';\n\nexport function createProcessLogger(component: string): Logger {\n const level = process.env.LOG_LEVEL ?? 'info';\n const format = process.env.LOG_FORMAT === 'simple'\n ? winston.format.combine(\n winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }),\n winston.format.errors({ stack: true }),\n winston.format.printf(({ level: lvl, message, timestamp, ...meta }) => {\n const metaStr = Object.keys(meta).length > 0 ? ` ${JSON.stringify(meta)}` : '';\n return `${timestamp} [${lvl.toUpperCase()}] [${component}] ${message}${metaStr}`;\n }),\n )\n : winston.format.combine(\n winston.format.timestamp(),\n winston.format.errors({ stack: true }),\n winston.format.json(),\n );\n\n const logger = winston.createLogger({\n level,\n defaultMeta: { component },\n format,\n transports: [new winston.transports.Console()],\n });\n\n return logger as unknown as Logger;\n}\n","/**\n * Worker Pool Main — standalone entry point\n *\n * Reads configuration from ~/.semiontconfig (TOML) via the canonical\n * `createTomlConfigLoader` from @semiont/core. Authenticates with the\n * KS via shared secret. Starts the worker process on top of a\n * `SemiontSession` so every HTTP call and bus emit goes through the\n * api-client, not raw `fetch`.\n *\n * One inference client is built per distinct `(type, model, apiKey,\n * endpoint)` combination declared in `[workers.<type>.inference]` /\n * `[workers.default.inference]`, and each job type dispatches to the\n * client configured for it.\n *\n * Environment variables (only two):\n * SEMIONT_WORKER_SECRET — shared secret for JWT auth with the KS\n * ANTHROPIC_API_KEY — only when using Anthropic inference\n *\n * Everything else comes from ~/.semiontconfig.\n */\n\nimport { startWorkerProcess, type WorkerEngine } from './worker-process';\nimport {\n createInferenceClient,\n type InferenceClient,\n type InferenceClientConfig,\n} from '@semiont/inference';\nimport { createServer } from 'http';\nimport { readFileSync, existsSync } from 'fs';\nimport { homedir, hostname } from 'os';\nimport { join } from 'path';\nimport {\n createTomlConfigLoader,\n type components,\n type EnvironmentConfig,\n} from '@semiont/core';\nimport {\n SemiontSession,\n InMemorySessionStorage,\n setStoredSession,\n type KnowledgeBase,\n} from '@semiont/api-client';\n\ntype Agent = components['schemas']['Agent'];\n\nconst ALL_JOB_TYPES = [\n 'reference-annotation', 'generation', 'highlight-annotation',\n 'assessment-annotation', 'comment-annotation', 'tag-annotation',\n];\n\n// Shape of each resolved worker inference entry under `_metadata.workers`.\n// The canonical TOML loader populates this by merging the per-worker\n// inference block with the flat `[inference.<type>]` provider section\n// (apiKey, endpoint/baseURL), so every entry here has everything a\n// client factory needs.\ntype ResolvedInference = {\n type: 'anthropic' | 'ollama';\n model: string;\n apiKey?: string;\n endpoint?: string;\n baseURL?: string;\n};\n\n// ── Load config via the canonical TOML loader ─────────────────────────\n\nconst configPath = join(homedir(), '.semiontconfig');\nconst tomlReader = {\n readIfExists: (p: string): string | null => existsSync(p) ? readFileSync(p, 'utf-8') : null,\n};\nconst envConfig = createTomlConfigLoader(\n tomlReader,\n configPath,\n process.env,\n)(null, 'local');\n\n// `_metadata.workers` is the resolver's output — a `WorkerInferenceConfig`\n// keyed by job type (plus `default`) with each entry fully merged with\n// the flat `[inference.<type>]` provider block.\nconst workerInferenceMap = (envConfig._metadata as (EnvironmentConfig['_metadata'] & {\n workers?: Record<string, ResolvedInference>;\n}) | undefined)?.workers;\nif (!workerInferenceMap || Object.keys(workerInferenceMap).length === 0) {\n throw new Error(\n 'No worker inference config found in ~/.semiontconfig. ' +\n 'Add at least [environments.<env>.workers.default.inference] with type = \"...\" and model = \"...\".',\n );\n}\n\nfunction resolveWorker(jobType: string): ResolvedInference {\n const specific = workerInferenceMap![jobType];\n if (specific) return specific;\n const def = workerInferenceMap!['default'];\n if (def) return def;\n throw new Error(\n `No inference config for worker '${jobType}' and no workers.default in ~/.semiontconfig.`,\n );\n}\n\nconst backendPublicURL = envConfig.services?.backend?.publicURL;\nif (!backendPublicURL) {\n throw new Error('services.backend.publicURL is required in ~/.semiontconfig');\n}\nconst backendBaseUrl: string = backendPublicURL;\n\nconst workerSecret = process.env.SEMIONT_WORKER_SECRET ?? '';\nconst healthPort = 9090;\n\nimport { createProcessLogger } from './logger';\n\nconst logger = createProcessLogger('worker');\n\n// ── Build engines map with per-(type,model,apiKey,endpoint) de-dup ────\n\nfunction clientKey(w: ResolvedInference): string {\n return [w.type, w.model, w.apiKey ?? '', w.endpoint ?? '', w.baseURL ?? ''].join('|');\n}\n\nfunction toClientConfig(w: ResolvedInference): InferenceClientConfig {\n return {\n type: w.type,\n model: w.model,\n ...(w.endpoint && { endpoint: w.endpoint }),\n ...(w.baseURL && { baseURL: w.baseURL }),\n ...(w.apiKey && { apiKey: w.apiKey }),\n };\n}\n\nconst clientCache = new Map<string, InferenceClient>();\nconst engines: Record<string, WorkerEngine> = {};\nfor (const jobType of ALL_JOB_TYPES) {\n const w = resolveWorker(jobType);\n const key = clientKey(w);\n let client = clientCache.get(key);\n if (!client) {\n client = createInferenceClient(toClientConfig(w), logger);\n clientCache.set(key, client);\n }\n const generator: Agent = {\n '@type': 'SoftwareAgent',\n name: `worker-pool / ${w.type} ${w.model}`,\n worker: 'worker-pool',\n inferenceProvider: w.type,\n model: w.model,\n };\n engines[jobType] = { inferenceClient: client, generator };\n}\n\n// ── Build a synthetic KB for the worker ──────────────────────────────\n//\n// SemiontSession is KB-scoped: every session is tied to one backend\n// instance identified by protocol/host/port. Workers aren't user-\n// scoped, but they are backend-scoped — they connect to exactly one\n// Semiont backend. Represent that as a synthetic KnowledgeBase whose\n// `email` carries the worker's service-principal identity.\n\nfunction parseBackendUrl(url: string): { protocol: 'http' | 'https'; host: string; port: number } {\n const parsed = new URL(url);\n const protocol = (parsed.protocol.replace(':', '') === 'https' ? 'https' : 'http') as 'http' | 'https';\n const host = parsed.hostname;\n const port = parsed.port\n ? Number(parsed.port)\n : protocol === 'https' ? 443 : 80;\n return { protocol, host, port };\n}\n\n// ── Authenticate: exchange shared secret for a JWT ────────────────────\n\nasync function authenticate(): Promise<string> {\n if (!workerSecret) {\n logger.warn('No SEMIONT_WORKER_SECRET set — using empty token');\n return '';\n }\n\n const response = await fetch(`${backendBaseUrl}/api/tokens/worker`, {\n method: 'POST',\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify({ secret: workerSecret }),\n });\n\n if (!response.ok) {\n throw new Error(`Authentication failed: ${response.status} ${response.statusText}`);\n }\n\n const { token } = await response.json() as { token: string };\n return token;\n}\n\nasync function main() {\n logger.info('Authenticating', { baseUrl: backendBaseUrl });\n const initialToken = await authenticate();\n logger.info('Authenticated');\n\n // Construct a synthetic KB + pre-seed an in-memory storage with the\n // initial token so SemiontSession starts with a ready-to-use token$.\n // The `refresh` callback re-exchanges the shared secret on expiry.\n const { protocol, host, port } = parseBackendUrl(backendBaseUrl);\n const kbId = `worker-${hostname()}`;\n const kb: KnowledgeBase = {\n id: kbId,\n label: `Worker pool @ ${host}`,\n host,\n port,\n protocol,\n email: `worker-pool@${host}`,\n };\n const storage = new InMemorySessionStorage();\n setStoredSession(storage, kbId, { access: initialToken, refresh: '' });\n\n const session = new SemiontSession({\n kb,\n storage,\n refresh: async () => {\n try {\n return await authenticate();\n } catch (err) {\n logger.error('Worker token refresh failed', {\n error: err instanceof Error ? err.message : String(err),\n });\n return null;\n }\n },\n // No validate callback — workers are service principals with no\n // user record to fetch. `session.user$` stays null.\n onError: (err) => {\n logger.error('Session error', { code: err.code, message: err.message });\n },\n });\n await session.ready;\n\n const workerVm = startWorkerProcess({\n session,\n jobTypes: ALL_JOB_TYPES,\n engines,\n logger,\n });\n\n logger.info('Connected', {\n baseUrl: backendBaseUrl,\n engines: Object.fromEntries(\n Object.entries(engines).map(([jt, e]) => [jt, `${e.generator.inferenceProvider} / ${e.generator.model}`]),\n ),\n });\n\n const health = createServer((req, res) => {\n if (req.url === '/health') {\n res.writeHead(200, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ status: 'ok' }));\n } else {\n res.writeHead(404);\n res.end();\n }\n });\n health.listen(healthPort, () => {\n logger.info('Health endpoint ready', { port: healthPort });\n });\n\n const shutdown = async () => {\n logger.info('Shutting down');\n workerVm.dispose();\n await session.dispose();\n health.close();\n process.exit(0);\n };\n\n process.on('SIGTERM', shutdown);\n process.on('SIGINT', shutdown);\n}\n\nmain().catch((error) => {\n logger.error('Fatal', { error: error instanceof Error ? error.message : String(error), stack: error instanceof Error ? error.stack : undefined });\n process.exit(1);\n});\n"]}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@semiont/jobs",
|
|
3
|
-
"version": "0.4.
|
|
3
|
+
"version": "0.4.21",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"description": "Filesystem-based job queue and worker infrastructure",
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -9,6 +9,14 @@
|
|
|
9
9
|
".": {
|
|
10
10
|
"types": "./dist/index.d.ts",
|
|
11
11
|
"import": "./dist/index.js"
|
|
12
|
+
},
|
|
13
|
+
"./worker-main": {
|
|
14
|
+
"types": "./dist/worker-main.d.ts",
|
|
15
|
+
"import": "./dist/worker-main.js"
|
|
16
|
+
},
|
|
17
|
+
"./smelter-main": {
|
|
18
|
+
"types": "./dist/smelter-main.d.ts",
|
|
19
|
+
"import": "./dist/smelter-main.js"
|
|
12
20
|
}
|
|
13
21
|
},
|
|
14
22
|
"files": [
|
|
@@ -26,7 +34,9 @@
|
|
|
26
34
|
"@semiont/content": "*",
|
|
27
35
|
"@semiont/core": "*",
|
|
28
36
|
"@semiont/event-sourcing": "*",
|
|
29
|
-
"@semiont/inference": "*"
|
|
37
|
+
"@semiont/inference": "*",
|
|
38
|
+
"@semiont/vectors": "*",
|
|
39
|
+
"winston": "^3.17.0"
|
|
30
40
|
},
|
|
31
41
|
"devDependencies": {
|
|
32
42
|
"@vitest/coverage-v8": "^4.1.0",
|