learning-agent 0.1.0 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +21 -2
- package/README.md +34 -5
- package/dist/cli.js +758 -16
- package/dist/cli.js.map +1 -1
- package/dist/index.d.ts +2 -2
- package/package.json +10 -19
package/dist/cli.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/types.ts","../src/storage/jsonl.ts","../src/storage/sqlite.ts","../src/capture/quality.ts","../src/capture/triggers.ts","../src/capture/integration.ts","../src/cli-utils.ts","../src/embeddings/model.ts","../src/index.ts","../src/storage/compact.ts","../src/cli.ts"],"names":["createHash","join","dirname","mtime","readFile","mkdir","lines","appendFile","statSync"],"mappings":";;;;;;;;;;;;;AAQO,IAAM,YAAA,GAAe,EAAE,IAAA,CAAK;AAAA,EACjC,iBAAA;AAAA,EACA,iBAAA;AAAA,EACA,cAAA;AAAA,EACA;AACF,CAAC,CAAA;AAGM,IAAM,aAAA,GAAgB,EAAE,MAAA,CAAO;AAAA,EACpC,IAAA,EAAM,EAAE,MAAA,EAAO;AAAA,EACf,MAAA,EAAQ,EAAE,MAAA;AACZ,CAAC,CAAA;AAGM,IAAM,aAAA,GAAgB,EAAE,MAAA,CAAO;AAAA,EACpC,GAAA,EAAK,EAAE,MAAA,EAAO;AAAA,EACd,IAAA,EAAM,EAAE,MAAA;AACV,CAAC,CAAA;AAGM,IAAM,iBAAiB,CAAA,CAAE,IAAA,CAAK,CAAC,MAAA,EAAQ,QAAA,EAAU,KAAK,CAAC,CAAA;AAGvD,IAAM,mBAAmB,CAAA,CAAE,IAAA,CAAK,CAAC,OAAA,EAAS,MAAM,CAAC,CAAA;AAYjD,IAAM,YAAA,GAAe,EAAE,MAAA,CAAO;AAAA;AAAA,EAEnC,EAAA,EAAI,EAAE,MAAA,EAAO;AAAA,EACb,IAAA,EAAM,gBAAA;AAAA,EACN,OAAA,EAAS,EAAE,MAAA,EAAO;AAAA,EAClB,OAAA,EAAS,EAAE,MAAA,EAAO;AAAA;AAAA,EAGlB,IAAA,EAAM,CAAA,CAAE,KAAA,CAAM,CAAA,CAAE,QAAQ,CAAA;AAAA,EACxB,MAAA,EAAQ,YAAA;AAAA,EACR,OAAA,EAAS,aAAA;AAAA,EACT,OAAA,EAAS,EAAE,MAAA,EAAO;AAAA;AAAA,EAClB,SAAA,EAAW,EAAE,OAAA,EAAQ;AAAA;AAAA,EAGrB,UAAA,EAAY,CAAA,CAAE,KAAA,CAAM,CAAA,CAAE,QAAQ,CAAA;AAAA,EAC9B,OAAA,EAAS,CAAA,CAAE,KAAA,CAAM,CAAA,CAAE,QAAQ,CAAA;AAAA;AAAA,EAG3B,QAAA,EAAU,CAAA,CAAE,MAAA,EAAO,CAAE,QAAA,EAAS;AAAA,EAC9B,QAAA,EAAU,eAAe,QAAA,EAAS;AAAA,EAClC,OAAA,EAAS,cAAc,QAAA,EAAS;AAAA;AAAA,EAGhC,OAAA,EAAS,CAAA,CAAE,OAAA,EAAQ,CAAE,QAAA,EAAS;AAAA,EAC9B,cAAA,EAAgB,CAAA,CAAE,MAAA,EAAO,CAAE,QAAA;AAC7B,CAAC,CAAA;AAG8B,EAAE,MAAA,CAAO;AAAA,EACtC,EAAA,EAAI,EAAE,MAAA,EAAO;AAAA,EACb,OAAA,EAAS,CAAA,CAAE,OAAA,CAAQ,IAAI,CAAA;AAAA,EACvB,SAAA,EAAW,EAAE,MAAA;AAAO;AACtB,CAAC;AAeM,SAAS,WAAW,OAAA,EAAyB;AAClD,EAAA,MAAM,IAAA,GAAO,WAAW,QAAQ,CAAA,CAAE,OAAO,OAAO,CAAA,CAAE,OAAO,KAAK,CAAA;AAC9D,EAAA,OAAO,CAAA,CAAA,EAAI,IAAA,CAAK,KAAA,CAAM,CAAA,EAAG,CAAC,CAAC,CAAA,CAAA;AAC7B;;;AClFO,IAAM,YAAA,GAAe,6BAAA;AAgC5B,eAAsB,YAAA,CAAa,UAAkB,MAAA,EAA+B;AAClF,EAAA,MAAM,QAAA,GAAW,IAAA,CAAK,QAAA,EAAU,YAAY,CAAA;AAC5C,EAAA,MAAM,MAAM,OAAA,CAAQ,QAAQ,GAAG,EAAE,SAAA,EAAW,MAAM,CAAA;AAElD,EAAA,MAAM,IAAA,GAAO,IAAA,CAAK,SAAA,CAAU,MAAM,CAAA,GAAI,IAAA;AACtC,EAAA,MAAM,UAAA,CAAW,QAAA,EAAU,IAAA,EAAM,OAAO,CAAA;AAC1C;AAMA,SAAS,aAAA,CACP,IAAA,EACA,UAAA,EACA,MAAA,EACA,YAAA,EACe;AAEf,EAAA,IAAI,MAAA;AACJ,EAAA,IAAI;AACF,IAAA,MAAA,GAAS,IAAA,CAAK,MAAM,IAAI,CAAA;AAAA,EAC1B,SAAS,GAAA,EAAK;AACZ,IAAA,MAAM,UAAA,GAAyB;AAAA,MAC7B,IAAA,EAAM,UAAA;AAAA,MACN,OAAA,EAAS,CAAA,cAAA,EAAkB,GAAA,CAAc,OAAO,CAAA,CAAA;AAAA,MAChD,KAAA,EAAO;AAAA,KACT;AACA,IAAA,IAAI,MAAA,EAAQ;AACV,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,oBAAA,EAAuB,UAAU,CAAA,EAAA,EAAK,UAAA,CAAW,OAAO,CAAA,CAAE,CAAA;AAAA,IAC5E;AACA,IAAA,YAAA,GAAe,UAAU,CAAA;AACzB,IAAA,OAAO,IAAA;AAAA,EACT;AAGA,EAAA,MAAM,MAAA,GAAS,YAAA,CAAa,SAAA,CAAU,MAAM,CAAA;AAC5C,EAAA,IAAI,CAAC,OAAO,OAAA,EAAS;AACnB,IAAA,MAAM,UAAA,GAAyB;AAAA,MAC7B,IAAA,EAAM,UAAA;AAAA,MACN,OAAA,EAAS,CAAA,0BAAA,EAA6B,MAAA,CAAO,KAAA,CAAM,OAAO,CAAA,CAAA;AAAA,MAC1D,OAAO,MAAA,CAAO;AAAA,KAChB;AACA,IAAA,IAAI,MAAA,EAAQ;AACV,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,oBAAA,EAAuB,UAAU,CAAA,EAAA,EAAK,UAAA,CAAW,OAAO,CAAA,CAAE,CAAA;AAAA,IAC5E;AACA,IAAA,YAAA,GAAe,UAAU,CAAA;AACzB,IAAA,OAAO,IAAA;AAAA,EACT;AAEA,EAAA,OAAO,MAAA,CAAO,IAAA;AAChB;AAWA,eAAsB,WAAA,CACpB,QAAA,EACA,OAAA,GAA8B,EAAC,EACH;AAC5B,EAAA,MAAM,EAAE,MAAA,GAAS,KAAA,EAAO,YAAA,EAAa,GAAI,OAAA;AACzC,EAAA,MAAM,QAAA,GAAW,IAAA,CAAK,QAAA,EAAU,YAAY,CAAA;AAE5C,EAAA,IAAI,OAAA;AACJ,EAAA,IAAI;AACF,IAAA,OAAA,GAAU,MAAM,QAAA,CAAS,QAAA,EAAU,OAAO,CAAA;AAAA,EAC5C,SAAS,GAAA,EAAK;AACZ,IAAA,IAAK,GAAA,CAA8B,SAAS,QAAA,EAAU;AACpD,MAAA,OAAO,EAAE,OAAA,EAAS,EAAC,EAAG,cAAc,CAAA,EAAE;AAAA,IACxC;AACA,IAAA,MAAM,GAAA;AAAA,EACR;AAEA,EAAA,MAAM,OAAA,uBAAc,GAAA,EAAoB;AACxC,EAAA,IAAI,YAAA,GAAe,CAAA;AAEnB,EAAA,MAAM,KAAA,GAAQ,OAAA,CAAQ,KAAA,CAAM,IAAI,CAAA;AAChC,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,KAAA,CAAM,QAAQ,CAAA,EAAA,EAAK;AACrC,IAAA,MAAM,OAAA,GAAU,KAAA,CAAM,CAAC,CAAA,CAAG,IAAA,EAAK;AAC/B,IAAA,IAAI,CAAC,OAAA,EAAS;AAEd,IAAA,MAAM,SAAS,aAAA,CAAc,OAAA,EAAS,CAAA,GAAI,CAAA,EAAG,QAAQ,YAAY,CAAA;AACjE,IAAA,IAAI,CAAC,MAAA,EAAQ;AACX,MAAA,YAAA,EAAA;AACA,MAAA;AAAA,IACF;AAEA,IAAA,IAAI,OAAO,OAAA,EAAS;AAClB,MAAA,OAAA,CAAQ,MAAA,CAAO,OAAO,EAAE,CAAA;AAAA,IAC1B,CAAA,MAAO;AACL,MAAA,OAAA,CAAQ,GAAA,CAAI,MAAA,CAAO,EAAA,EAAI,MAAM,CAAA;AAAA,IAC/B;AAAA,EACF;AAEA,EAAA,OAAO,EAAE,SAAS,KAAA,CAAM,IAAA,CAAK,QAAQ,MAAA,EAAQ,GAAG,YAAA,EAAa;AAC/D;;;AC/HO,IAAM,OAAA,GAAU,+BAAA;AAGvB,IAAM,UAAA,GAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,CAAA;AAoEnB,SAAS,aAAa,QAAA,EAA8B;AAClD,EAAA,QAAA,CAAS,KAAK,UAAU,CAAA;AAC1B;AAEA,IAAI,EAAA,GAA0B,IAAA;AAMvB,SAAS,WAAA,CAAY,SAAiB,OAAA,EAAyB;AACpE,EAAA,OAAOA,UAAAA,CAAW,QAAQ,CAAA,CAAE,MAAA,CAAO,CAAA,EAAG,OAAO,CAAA,CAAA,EAAI,OAAO,CAAA,CAAE,CAAA,CAAE,MAAA,CAAO,KAAK,CAAA;AAC1E;AAqBO,SAAS,OAAO,QAAA,EAAgC;AACrD,EAAA,IAAI,IAAI,OAAO,EAAA;AAEf,EAAA,MAAM,MAAA,GAASC,IAAAA,CAAK,QAAA,EAAU,OAAO,CAAA;AAGrC,EAAA,MAAM,GAAA,GAAMC,QAAQ,MAAM,CAAA;AAC1B,EAAA,SAAA,CAAU,GAAA,EAAK,EAAE,SAAA,EAAW,IAAA,EAAM,CAAA;AAElC,EAAA,EAAA,GAAK,IAAI,SAAS,MAAM,CAAA;AAGxB,EAAA,EAAA,CAAG,OAAO,oBAAoB,CAAA;AAE9B,EAAA,YAAA,CAAa,EAAE,CAAA;AAEf,EAAA,OAAO,EAAA;AACT;AA0HA,SAAS,YAAY,GAAA,EAAwB;AAC3C,EAAA,MAAM,MAAA,GAAiB;AAAA,IACrB,IAAI,GAAA,CAAI,EAAA;AAAA,IACR,MAAM,GAAA,CAAI,IAAA;AAAA,IACV,SAAS,GAAA,CAAI,OAAA;AAAA,IACb,SAAS,GAAA,CAAI,OAAA;AAAA,IACb,IAAA,EAAM,GAAA,CAAI,IAAA,GAAO,GAAA,CAAI,IAAA,CAAK,KAAA,CAAM,GAAG,CAAA,CAAE,MAAA,CAAO,OAAO,CAAA,GAAI,EAAC;AAAA,IACxD,QAAQ,GAAA,CAAI,MAAA;AAAA,IACZ,OAAA,EAAS,IAAA,CAAK,KAAA,CAAM,GAAA,CAAI,OAAO,CAAA;AAAA,IAC/B,UAAA,EAAY,IAAA,CAAK,KAAA,CAAM,GAAA,CAAI,UAAU,CAAA;AAAA,IACrC,OAAA,EAAS,IAAA,CAAK,KAAA,CAAM,GAAA,CAAI,OAAO,CAAA;AAAA,IAC/B,SAAS,GAAA,CAAI,OAAA;AAAA,IACb,SAAA,EAAW,IAAI,SAAA,KAAc;AAAA,GAC/B;AAGA,EAAA,IAAI,GAAA,CAAI,aAAa,IAAA,EAAM;AACzB,IAAA,MAAA,CAAO,WAAW,GAAA,CAAI,QAAA;AAAA,EACxB;AACA,EAAA,IAAI,GAAA,CAAI,aAAa,IAAA,EAAM;AACzB,IAAA,MAAA,CAAO,WAAW,GAAA,CAAI,QAAA;AAAA,EACxB;AACA,EAAA,IAAI,GAAA,CAAI,YAAY,CAAA,EAAG;AACrB,IAAA,MAAA,CAAO,OAAA,GAAU,IAAA;AAAA,EACnB;AACA,EAAA,IAAI,GAAA,CAAI,kBAAkB,CAAA,EAAG;AAC3B,IAAA,MAAA,CAAO,iBAAiB,GAAA,CAAI,eAAA;AAAA,EAC9B;AAEA,EAAA,OAAO,MAAA;AACT;AAWA,SAAS,wBAAwB,QAAA,EAA0D;AACzF,EAAA,MAAM,KAAA,uBAAY,GAAA,EAAiC;AACnD,EAAA,MAAM,IAAA,GAAO,QAAA,CACV,OAAA,CAAQ,6EAA6E,EACrF,GAAA,EAAI;AAEP,EAAA,KAAA,MAAW,OAAO,IAAA,EAAM;AACtB,IAAA,IAAI,GAAA,CAAI,SAAA,IAAa,GAAA,CAAI,YAAA,EAAc;AACrC,MAAA,KAAA,CAAM,GAAA,CAAI,GAAA,CAAI,EAAA,EAAI,EAAE,SAAA,EAAW,IAAI,SAAA,EAAW,WAAA,EAAa,GAAA,CAAI,YAAA,EAAc,CAAA;AAAA,IAC/E;AAAA,EACF;AACA,EAAA,OAAO,KAAA;AACT;AAGA,IAAM,iBAAA,GAAoB;AAAA;AAAA;AAAA,CAAA;AAQ1B,SAAS,cAAc,QAAA,EAAiC;AACtD,EAAA,MAAM,SAAA,GAAYD,IAAAA,CAAK,QAAA,EAAU,YAAY,CAAA;AAC7C,EAAA,IAAI;AACF,IAAA,MAAM,IAAA,GAAO,SAAS,SAAS,CAAA;AAC/B,IAAA,OAAO,IAAA,CAAK,OAAA;AAAA,EACd,CAAA,CAAA,MAAQ;AACN,IAAA,OAAO,IAAA;AAAA,EACT;AACF;AAKA,SAAS,iBAAiB,QAAA,EAAuC;AAC/D,EAAA,MAAM,MAAM,QAAA,CACT,OAAA,CAAQ,0CAA0C,CAAA,CAClD,IAAI,iBAAiB,CAAA;AACxB,EAAA,OAAO,GAAA,GAAM,UAAA,CAAW,GAAA,CAAI,KAAK,CAAA,GAAI,IAAA;AACvC;AAKA,SAAS,gBAAA,CAAiB,UAAwB,KAAA,EAAqB;AACrE,EAAA,QAAA,CACG,QAAQ,4DAA4D,CAAA,CACpE,IAAI,iBAAA,EAAmB,KAAA,CAAM,UAAU,CAAA;AAC5C;AAOA,eAAsB,aAAa,QAAA,EAAiC;AAClE,EAAA,MAAM,QAAA,GAAW,OAAO,QAAQ,CAAA;AAChC,EAAA,MAAM,EAAE,OAAA,EAAQ,GAAI,MAAM,YAAY,QAAQ,CAAA;AAE9C,EAAA,MAAM,gBAAA,GAAmB,wBAAwB,QAAQ,CAAA;AACzD,EAAA,QAAA,CAAS,KAAK,qBAAqB,CAAA;AAEnC,EAAA,IAAI,OAAA,CAAQ,WAAW,CAAA,EAAG;AAExB,IAAA,MAAME,MAAAA,GAAQ,cAAc,QAAQ,CAAA;AACpC,IAAA,IAAIA,WAAU,IAAA,EAAM;AAClB,MAAA,gBAAA,CAAiB,UAAUA,MAAK,CAAA;AAAA,IAClC;AACA,IAAA;AAAA,EACF;AAEA,EAAA,MAAM,MAAA,GAAS,QAAA,CAAS,OAAA,CAAQ,iBAAiB,CAAA;AACjD,EAAA,MAAM,UAAA,GAAa,QAAA,CAAS,WAAA,CAAY,CAAC,KAAA,KAAoB;AAC3D,IAAA,KAAA,MAAW,UAAU,KAAA,EAAO;AAC1B,MAAA,MAAM,OAAA,GAAU,WAAA,CAAY,MAAA,CAAO,OAAA,EAAS,OAAO,OAAO,CAAA;AAC1D,MAAA,MAAM,MAAA,GAAS,gBAAA,CAAiB,GAAA,CAAI,MAAA,CAAO,EAAE,CAAA;AAC7C,MAAA,MAAM,aAAA,GAAgB,MAAA,IAAU,MAAA,CAAO,WAAA,KAAgB,OAAA;AAEvD,MAAA,MAAA,CAAO,GAAA,CAAI;AAAA,QACT,IAAI,MAAA,CAAO,EAAA;AAAA,QACX,MAAM,MAAA,CAAO,IAAA;AAAA,QACb,SAAS,MAAA,CAAO,OAAA;AAAA,QAChB,SAAS,MAAA,CAAO,OAAA;AAAA,QAChB,QAAA,EAAU,OAAO,QAAA,IAAY,IAAA;AAAA,QAC7B,QAAA,EAAU,OAAO,QAAA,IAAY,IAAA;AAAA,QAC7B,IAAA,EAAM,MAAA,CAAO,IAAA,CAAK,IAAA,CAAK,GAAG,CAAA;AAAA,QAC1B,QAAQ,MAAA,CAAO,MAAA;AAAA,QACf,OAAA,EAAS,IAAA,CAAK,SAAA,CAAU,MAAA,CAAO,OAAO,CAAA;AAAA,QACtC,UAAA,EAAY,IAAA,CAAK,SAAA,CAAU,MAAA,CAAO,UAAU,CAAA;AAAA,QAC5C,OAAA,EAAS,IAAA,CAAK,SAAA,CAAU,MAAA,CAAO,OAAO,CAAA;AAAA,QACtC,SAAS,MAAA,CAAO,OAAA;AAAA,QAChB,SAAA,EAAW,MAAA,CAAO,SAAA,GAAY,CAAA,GAAI,CAAA;AAAA,QAClC,OAAA,EAAS,MAAA,CAAO,OAAA,GAAU,CAAA,GAAI,CAAA;AAAA,QAC9B,eAAA,EAAiB,OAAO,cAAA,IAAkB,CAAA;AAAA,QAC1C,cAAA,EAAgB,IAAA;AAAA;AAAA,QAChB,SAAA,EAAW,aAAA,GAAgB,MAAA,CAAO,SAAA,GAAY,IAAA;AAAA,QAC9C,YAAA,EAAc,aAAA,GAAgB,MAAA,CAAO,WAAA,GAAc;AAAA,OACpD,CAAA;AAAA,IACH;AAAA,EACF,CAAC,CAAA;AAED,EAAA,UAAA,CAAW,OAAO,CAAA;AAGlB,EAAA,MAAM,KAAA,GAAQ,cAAc,QAAQ,CAAA;AACpC,EAAA,IAAI,UAAU,IAAA,EAAM;AAClB,IAAA,gBAAA,CAAiB,UAAU,KAAK,CAAA;AAAA,EAClC;AACF;AAYA,eAAsB,YAAA,CACpB,QAAA,EACA,OAAA,GAAuB,EAAC,EACN;AAClB,EAAA,MAAM,EAAE,KAAA,GAAQ,KAAA,EAAM,GAAI,OAAA;AAG1B,EAAA,MAAM,UAAA,GAAa,cAAc,QAAQ,CAAA;AACzC,EAAA,IAAI,UAAA,KAAe,IAAA,IAAQ,CAAC,KAAA,EAAO;AAEjC,IAAA,OAAO,KAAA;AAAA,EACT;AAEA,EAAA,MAAM,QAAA,GAAW,OAAO,QAAQ,CAAA;AAChC,EAAA,MAAM,aAAA,GAAgB,iBAAiB,QAAQ,CAAA;AAG/C,EAAA,MAAM,eAAe,KAAA,IAAS,aAAA,KAAkB,IAAA,IAAS,UAAA,KAAe,QAAQ,UAAA,GAAa,aAAA;AAE7F,EAAA,IAAI,YAAA,EAAc;AAChB,IAAA,MAAM,aAAa,QAAQ,CAAA;AAC3B,IAAA,OAAO,IAAA;AAAA,EACT;AAEA,EAAA,OAAO,KAAA;AACT;AAOA,eAAsB,aAAA,CACpB,QAAA,EACA,KAAA,EACA,KAAA,EACmB;AACnB,EAAA,MAAM,QAAA,GAAW,OAAO,QAAQ,CAAA;AAGhC,EAAA,MAAM,WAAA,GAAc,QAAA,CAAS,OAAA,CAAQ,qCAAqC,EAAE,GAAA,EAAI;AAGhF,EAAA,IAAI,WAAA,CAAY,GAAA,KAAQ,CAAA,EAAG,OAAO,EAAC;AAGnC,EAAA,MAAM,OAAO,QAAA,CACV,OAAA;AAAA,IACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAAA;AAAA,GAOF,CACC,GAAA,CAAI,KAAA,EAAO,KAAK,CAAA;AAGnB,EAAA,IAAI,IAAA,CAAK,SAAS,CAAA,EAAG;AACnB,IAAA,uBAAA,CAAwB,UAAU,IAAA,CAAK,GAAA,CAAI,CAAC,CAAA,KAAM,CAAA,CAAE,EAAE,CAAC,CAAA;AAAA,EACzD;AAEA,EAAA,OAAO,IAAA,CAAK,IAAI,WAAW,CAAA;AAC7B;AAcO,SAAS,uBAAA,CAAwB,UAAkB,SAAA,EAA2B;AACnF,EAAA,IAAI,SAAA,CAAU,WAAW,CAAA,EAAG;AAE5B,EAAA,MAAM,QAAA,GAAW,OAAO,QAAQ,CAAA;AAChC,EAAA,MAAM,GAAA,GAAA,iBAAM,IAAI,IAAA,EAAK,EAAE,WAAA,EAAY;AAEnC,EAAA,MAAM,MAAA,GAAS,SAAS,OAAA,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,EAAA,CAK/B,CAAA;AAED,EAAA,MAAM,UAAA,GAAa,QAAA,CAAS,WAAA,CAAY,CAAC,GAAA,KAAkB;AACzD,IAAA,KAAA,MAAW,MAAM,GAAA,EAAK;AACpB,MAAA,MAAA,CAAO,GAAA,CAAI,KAAK,EAAE,CAAA;AAAA,IACpB;AAAA,EACF,CAAC,CAAA;AAED,EAAA,UAAA,CAAW,SAAS,CAAA;AACtB;AAMO,SAAS,kBAAkB,QAAA,EAAmC;AACnE,EAAA,MAAM,QAAA,GAAW,OAAO,QAAQ,CAAA;AAEhC,EAAA,MAAM,IAAA,GAAO,QAAA,CACV,OAAA,CAAQ,yDAAyD,EACjE,GAAA,EAAI;AAEP,EAAA,OAAO,IAAA,CAAK,GAAA,CAAI,CAAC,GAAA,MAAS;AAAA,IACxB,IAAI,GAAA,CAAI,EAAA;AAAA,IACR,OAAO,GAAA,CAAI,eAAA;AAAA,IACX,eAAe,GAAA,CAAI;AAAA,GACrB,CAAE,CAAA;AACJ;;;AChhBA,IAAM,4BAAA,GAA+B,GAAA;AAkBrC,eAAsB,OAAA,CACpB,QAAA,EACA,OAAA,EACA,OAAA,GAA0B,EAAC,EACH;AACxB,EAAA,MAAM,SAAA,GAAY,QAAQ,SAAA,IAAa,4BAAA;AAGvC,EAAA,MAAM,aAAa,QAAQ,CAAA;AAG3B,EAAA,MAAM,KAAA,GAAQ,QACX,WAAA,EAAY,CACZ,QAAQ,cAAA,EAAgB,EAAE,EAC1B,KAAA,CAAM,KAAK,EACX,MAAA,CAAO,CAAC,MAAM,CAAA,CAAE,MAAA,GAAS,CAAC,CAAA,CAC1B,KAAA,CAAM,GAAG,CAAC,CAAA;AAEb,EAAA,IAAI,KAAA,CAAM,WAAW,CAAA,EAAG;AACtB,IAAA,OAAO,EAAE,OAAO,IAAA,EAAK;AAAA,EACvB;AAGA,EAAA,MAAM,WAAA,GAAc,KAAA,CAAM,IAAA,CAAK,MAAM,CAAA;AACrC,EAAA,MAAM,OAAA,GAAU,MAAM,aAAA,CAAc,QAAA,EAAU,aAAa,EAAE,CAAA;AAE7D,EAAA,IAAI,OAAA,CAAQ,WAAW,CAAA,EAAG;AACxB,IAAA,OAAO,EAAE,OAAO,IAAA,EAAK;AAAA,EACvB;AAGA,EAAA,MAAM,YAAA,GAAe,IAAI,GAAA,CAAI,OAAA,CAAQ,aAAY,CAAE,KAAA,CAAM,KAAK,CAAC,CAAA;AAE/D,EAAA,KAAA,MAAW,UAAU,OAAA,EAAS;AAC5B,IAAA,MAAM,WAAA,GAAc,IAAI,GAAA,CAAI,MAAA,CAAO,QAAQ,WAAA,EAAY,CAAE,KAAA,CAAM,KAAK,CAAC,CAAA;AAGrE,IAAA,MAAM,YAAA,GAAe,CAAC,GAAG,YAAY,CAAA,CAAE,MAAA,CAAO,CAAC,CAAA,KAAM,WAAA,CAAY,GAAA,CAAI,CAAC,CAAC,CAAA,CAAE,MAAA;AACzE,IAAA,MAAM,KAAA,GAAA,qBAAY,GAAA,CAAI,CAAC,GAAG,YAAA,EAAc,GAAG,WAAW,CAAC,CAAA,EAAE,IAAA;AACzD,IAAA,MAAM,UAAA,GAAa,KAAA,GAAQ,CAAA,GAAI,YAAA,GAAe,KAAA,GAAQ,CAAA;AAEtD,IAAA,IAAI,cAAc,SAAA,EAAW;AAC3B,MAAA,OAAO;AAAA,QACL,KAAA,EAAO,KAAA;AAAA,QACP,QAAQ,CAAA,gCAAA,EAAmC,MAAA,CAAO,QAAQ,KAAA,CAAM,CAAA,EAAG,EAAE,CAAC,CAAA,IAAA,CAAA;AAAA,QACtE,YAAY,MAAA,CAAO;AAAA,OACrB;AAAA,IACF;AAGA,IAAA,IAAI,OAAO,OAAA,CAAQ,WAAA,EAAY,KAAM,OAAA,CAAQ,aAAY,EAAG;AAC1D,MAAA,OAAO;AAAA,QACL,KAAA,EAAO,KAAA;AAAA,QACP,MAAA,EAAQ,CAAA,qBAAA,CAAA;AAAA,QACR,YAAY,MAAA,CAAO;AAAA,OACrB;AAAA,IACF;AAAA,EACF;AAEA,EAAA,OAAO,EAAE,OAAO,IAAA,EAAK;AACvB;AAGA,IAAM,cAAA,GAAiB,CAAA;AAGvB,IAAM,cAAA,GAAiB;AAAA,EACrB,mBAAA;AAAA,EACA,iBAAA;AAAA,EACA,kBAAA;AAAA,EACA,gBAAA;AAAA,EACA,aAAA;AAAA,EACA;AACF,CAAA;AAGA,IAAM,0BAAA,GAA6B,sCAAA;AAY5B,SAAS,WAAW,OAAA,EAAoC;AAE7D,EAAA,MAAM,KAAA,GAAQ,OAAA,CAAQ,IAAA,EAAK,CAAE,KAAA,CAAM,KAAK,CAAA,CAAE,MAAA,CAAO,CAAC,CAAA,KAAM,CAAA,CAAE,MAAA,GAAS,CAAC,CAAA;AACpE,EAAA,IAAI,KAAA,CAAM,SAAS,cAAA,EAAgB;AACjC,IAAA,OAAO,EAAE,QAAA,EAAU,KAAA,EAAO,MAAA,EAAQ,uCAAA,EAAwC;AAAA,EAC5E;AAGA,EAAA,KAAA,MAAW,WAAW,cAAA,EAAgB;AACpC,IAAA,IAAI,OAAA,CAAQ,IAAA,CAAK,OAAO,CAAA,EAAG;AACzB,MAAA,OAAO,EAAE,QAAA,EAAU,KAAA,EAAO,MAAA,EAAQ,iCAAA,EAAkC;AAAA,IACtE;AAAA,EACF;AAGA,EAAA,IAAI,0BAAA,CAA2B,IAAA,CAAK,OAAO,CAAA,EAAG;AAC5C,IAAA,OAAO,EAAE,QAAA,EAAU,KAAA,EAAO,MAAA,EAAQ,iCAAA,EAAkC;AAAA,EACtE;AAEA,EAAA,OAAO,EAAE,UAAU,IAAA,EAAK;AAC1B;AAGA,IAAM,eAAA,GAAkB;AAAA,EACtB,8BAAA;AAAA;AAAA,EACA,8BAAA;AAAA;AAAA,EACA,yBAAA;AAAA;AAAA,EACA,2BAAA;AAAA;AAAA,EACA,2BAAA;AAAA;AAAA,EACA,0BAAA;AAAA;AAAA,EACA;AAAA;AACF,CAAA;AAYO,SAAS,aAAa,OAAA,EAAsC;AAEjE,EAAA,KAAA,MAAW,WAAW,eAAA,EAAiB;AACrC,IAAA,IAAI,OAAA,CAAQ,IAAA,CAAK,OAAO,CAAA,EAAG;AACzB,MAAA,OAAO,EAAE,YAAY,IAAA,EAAK;AAAA,IAC5B;AAAA,EACF;AAEA,EAAA,OAAO,EAAE,UAAA,EAAY,KAAA,EAAO,MAAA,EAAQ,qCAAA,EAAsC;AAC5E;AAYA,eAAsB,aAAA,CACpB,UACA,OAAA,EACwB;AAExB,EAAA,MAAM,cAAA,GAAiB,WAAW,OAAO,CAAA;AACzC,EAAA,IAAI,CAAC,eAAe,QAAA,EAAU;AAC5B,IAAA,OAAO,EAAE,aAAA,EAAe,KAAA,EAAO,MAAA,EAAQ,eAAe,MAAA,EAAO;AAAA,EAC/D;AAGA,EAAA,MAAM,gBAAA,GAAmB,aAAa,OAAO,CAAA;AAC7C,EAAA,IAAI,CAAC,iBAAiB,UAAA,EAAY;AAChC,IAAA,OAAO,EAAE,aAAA,EAAe,KAAA,EAAO,MAAA,EAAQ,iBAAiB,MAAA,EAAO;AAAA,EACjE;AAGA,EAAA,MAAM,aAAA,GAAgB,MAAM,OAAA,CAAQ,QAAA,EAAU,OAAO,CAAA;AACrD,EAAA,IAAI,CAAC,cAAc,KAAA,EAAO;AACxB,IAAA,OAAO,EAAE,aAAA,EAAe,KAAA,EAAO,MAAA,EAAQ,cAAc,MAAA,EAAO;AAAA,EAC9D;AAEA,EAAA,OAAO,EAAE,eAAe,IAAA,EAAK;AAC/B;;;ACpLA,IAAM,wBAAA,GAA2B;AAAA,EAC/B,gBAAA;AAAA;AAAA,EACA,YAAA;AAAA;AAAA,EACA,eAAA;AAAA;AAAA,EACA,eAAA;AAAA;AAAA,EACA;AAAA;AACF,CAAA;AAWO,SAAS,qBAAqB,OAAA,EAAsD;AACzF,EAAA,MAAM,EAAE,QAAA,EAAU,OAAA,EAAQ,GAAI,OAAA;AAE9B,EAAA,IAAI,QAAA,CAAS,SAAS,CAAA,EAAG;AACvB,IAAA,OAAO,IAAA;AAAA,EACT;AAGA,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,CAAS,QAAQ,CAAA,EAAA,EAAK;AACxC,IAAA,MAAM,OAAA,GAAU,SAAS,CAAC,CAAA;AAC1B,IAAA,IAAI,CAAC,OAAA,EAAS;AAEd,IAAA,KAAA,MAAW,WAAW,wBAAA,EAA0B;AAC9C,MAAA,IAAI,OAAA,CAAQ,IAAA,CAAK,OAAO,CAAA,EAAG;AACzB,QAAA,OAAO;AAAA,UACL,OAAA,EAAS,CAAA,uBAAA,EAA0B,OAAA,CAAQ,MAAM,CAAA,CAAA;AAAA,UACjD,iBAAA,EAAmB,OAAA;AAAA,UACnB;AAAA,SACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,EAAA,OAAO,IAAA;AACT;AA6BO,SAAS,qBAAqB,OAAA,EAAqD;AACxF,EAAA,MAAM,EAAE,OAAM,GAAI,OAAA;AAElB,EAAA,IAAI,KAAA,CAAM,SAAS,CAAA,EAAG;AACpB,IAAA,OAAO,IAAA;AAAA,EACT;AAGA,EAAA,KAAA,IAAS,IAAI,CAAA,EAAG,CAAA,IAAK,KAAA,CAAM,MAAA,GAAS,GAAG,CAAA,EAAA,EAAK;AAC1C,IAAA,MAAM,KAAA,GAAQ,MAAM,CAAC,CAAA;AACrB,IAAA,MAAM,MAAA,GAAS,KAAA,CAAM,CAAA,GAAI,CAAC,CAAA;AAC1B,IAAA,MAAM,KAAA,GAAQ,KAAA,CAAM,CAAA,GAAI,CAAC,CAAA;AAEzB,IAAA,IAAI,CAAC,KAAA,IAAS,CAAC,MAAA,IAAU,CAAC,KAAA,EAAO;AAGjC,IAAA,IACE,KAAA,CAAM,IAAA,KAAS,MAAA,CAAO,IAAA,IACtB,OAAO,IAAA,KAAS,KAAA,CAAM,IAAA,IACtB,KAAA,CAAM,OAAA,IACN,CAAC,MAAA,CAAO,OAAA,IACR,MAAM,OAAA,EACN;AACA,MAAA,OAAO;AAAA,QACL,MAAM,KAAA,CAAM,IAAA;AAAA,QACZ,OAAA,EAAS,CAAA,mBAAA,EAAsB,KAAA,CAAM,IAAI,CAAA;AAAA,OAC3C;AAAA,IACF;AAAA,EACF;AAEA,EAAA,OAAO,IAAA;AACT;AAyBO,SAAS,kBAAkB,UAAA,EAAoD;AACpF,EAAA,IAAI,WAAW,MAAA,EAAQ;AACrB,IAAA,OAAO,IAAA;AAAA,EACT;AAGA,EAAA,MAAM,KAAA,GAAQ,UAAA,CAAW,MAAA,CAAO,KAAA,CAAM,IAAI,CAAA,CAAE,MAAA,CAAO,CAAC,IAAA,KAAS,IAAA,CAAK,IAAA,EAAK,CAAE,SAAS,CAAC,CAAA;AACnF,EAAA,MAAM,SAAA,GAAY,KAAA,CAAM,IAAA,CAAK,CAAC,IAAA,KAAS,oBAAA,CAAqB,IAAA,CAAK,IAAI,CAAC,CAAA,IAAK,KAAA,CAAM,CAAC,CAAA,IAAK,EAAA;AAEvF,EAAA,OAAO;AAAA,IACL,UAAU,UAAA,CAAW,QAAA;AAAA,IACrB,aAAa,UAAA,CAAW,MAAA;AAAA,IACxB,OAAA,EAAS,mBAAmB,UAAA,CAAW,QAAQ,KAAK,SAAA,CAAU,KAAA,CAAM,CAAA,EAAG,GAAG,CAAC,CAAA;AAAA,GAC7E;AACF;;;ACrGA,eAAsB,gBAAA,CACpB,UACA,KAAA,EACiC;AACjC,EAAA,MAAM,QAAA,GAAW,YAAY,KAAK,CAAA;AAClC,EAAA,IAAI,CAAC,QAAA,EAAU;AACb,IAAA,OAAO,IAAA;AAAA,EACT;AAEA,EAAA,MAAM,EAAE,OAAA,EAAS,MAAA,EAAQ,eAAA,EAAgB,GAAI,QAAA;AAG7C,EAAA,MAAM,OAAA,GAAU,MAAM,aAAA,CAAc,QAAA,EAAU,eAAe,CAAA;AAC7D,EAAA,IAAI,CAAC,QAAQ,aAAA,EAAe;AAC1B,IAAA,OAAO,IAAA;AAAA,EACT;AAEA,EAAA,OAAO,EAAE,OAAA,EAAS,MAAA,EAAQ,eAAA,EAAgB;AAC5C;AAYA,SAAS,YAAY,KAAA,EAA4C;AAC/D,EAAA,QAAQ,MAAM,IAAA;AAAM,IAClB,KAAK,MAAA;AACH,MAAA,OAAO,wBAAA,CAAyB,MAAM,IAAI,CAAA;AAAA,IAC5C,KAAK,MAAA;AACH,MAAA,OAAO,wBAAA,CAAyB,MAAM,IAAI,CAAA;AAAA,IAC5C,KAAK,MAAA;AACH,MAAA,OAAO,qBAAA,CAAsB,MAAM,IAAI,CAAA;AAAA;AAE7C;AAKA,SAAS,yBAAyB,IAAA,EAA6C;AAC7E,EAAA,MAAM,MAAA,GAAS,qBAAqB,IAAI,CAAA;AACxC,EAAA,IAAI,CAAC,MAAA,EAAQ;AACX,IAAA,OAAO,IAAA;AAAA,EACT;AAEA,EAAA,OAAO;AAAA,IACL,SAAS,MAAA,CAAO,OAAA;AAAA,IAChB,MAAA,EAAQ,iBAAA;AAAA,IACR,iBAAiB,MAAA,CAAO;AAAA,GAC1B;AACF;AAKA,SAAS,yBAAyB,IAAA,EAAwC;AACxE,EAAA,MAAM,MAAA,GAAS,qBAAqB,IAAI,CAAA;AACxC,EAAA,IAAI,CAAC,MAAA,EAAQ;AACX,IAAA,OAAO,IAAA;AAAA,EACT;AAEA,EAAA,OAAO;AAAA,IACL,SAAS,MAAA,CAAO,OAAA;AAAA,IAChB,MAAA,EAAQ,iBAAA;AAAA;AAAA,IAER,eAAA,EAAiB,CAAA,MAAA,EAAS,MAAA,CAAO,IAAI,CAAA,iCAAA;AAAA,GACvC;AACF;AAKA,SAAS,sBAAsB,IAAA,EAAuC;AACpE,EAAA,MAAM,MAAA,GAAS,kBAAkB,IAAI,CAAA;AACrC,EAAA,IAAI,CAAC,MAAA,EAAQ;AACX,IAAA,OAAO,IAAA;AAAA,EACT;AAEA,EAAA,OAAO;AAAA,IACL,SAAS,MAAA,CAAO,OAAA;AAAA,IAChB,MAAA,EAAQ,cAAA;AAAA,IACR,iBAAiB,MAAA,CAAO;AAAA,GAC1B;AACF;AAGA,IAAM,8BAAc,IAAI,GAAA,CAAY,CAAC,MAAA,EAAQ,MAAA,EAAQ,MAAM,CAAC,CAAA;AAS5D,eAAsB,eAAe,QAAA,EAA2C;AAC9E,EAAA,MAAM,OAAA,GAAU,MAAS,EAAA,CAAA,QAAA,CAAS,QAAA,EAAU,OAAO,CAAA;AACnD,EAAA,MAAM,IAAA,GAAO,IAAA,CAAK,KAAA,CAAM,OAAO,CAAA;AAE/B,EAAA,IAAI,CAAC,WAAA,CAAY,GAAA,CAAI,IAAA,CAAK,IAAI,CAAA,EAAG;AAC/B,IAAA,MAAM,IAAI,KAAA,CAAM,CAAA,wBAAA,EAA2B,IAAA,CAAK,IAAI,CAAA,kCAAA,CAAoC,CAAA;AAAA,EAC1F;AAEA,EAAA,OAAO,IAAA;AACT;;;ACjKO,SAAS,YAAY,KAAA,EAAuB;AACjD,EAAA,IAAI,KAAA,KAAU,GAAG,OAAO,KAAA;AACxB,EAAA,IAAI,KAAA,GAAQ,IAAA,EAAM,OAAO,CAAA,EAAG,KAAK,CAAA,EAAA,CAAA;AACjC,EAAA,MAAM,KAAK,KAAA,GAAQ,IAAA;AACnB,EAAA,IAAI,KAAK,IAAA,EAAM,OAAO,GAAG,EAAA,CAAG,OAAA,CAAQ,CAAC,CAAC,CAAA,GAAA,CAAA;AACtC,EAAA,MAAM,KAAK,EAAA,GAAK,IAAA;AAChB,EAAA,OAAO,CAAA,EAAG,EAAA,CAAG,OAAA,CAAQ,CAAC,CAAC,CAAA,GAAA,CAAA;AACzB;AAUO,SAAS,UAAA,CAAW,OAAe,IAAA,EAAsB;AAC9D,EAAA,MAAM,MAAA,GAAS,QAAA,CAAS,KAAA,EAAO,EAAE,CAAA;AACjC,EAAA,IAAI,MAAA,CAAO,KAAA,CAAM,MAAM,CAAA,IAAK,UAAU,CAAA,EAAG;AACvC,IAAA,MAAM,IAAI,KAAA,CAAM,CAAA,QAAA,EAAW,IAAI,CAAA,4BAAA,CAA8B,CAAA;AAAA,EAC/D;AACA,EAAA,OAAO,MAAA;AACT;AAOO,SAAS,WAAA,GAAsB;AACpC,EAAA,OAAO,OAAA,CAAQ,GAAA,CAAI,qBAAqB,CAAA,IAAK,QAAQ,GAAA,EAAI;AAC3D;AChB0BF,IAAAA,CAAK,OAAA,EAAQ,EAAG,mBAAmB,QAAQ;;;AC0D9D,IAAM,OAAA,GAAU,OAAA;ACrEhB,IAAM,WAAA,GAAc,yBAAA;AAGpB,IAAM,mBAAA,GAAsB,GAAA;AAG5B,IAAM,gBAAA,GAAmB,EAAA;AAGhC,IAAM,UAAA,GAAa,GAAA,GAAO,EAAA,GAAK,EAAA,GAAK,EAAA;AAGpC,IAAM,kBAAA,GAAqB,CAAA;AAG3B,IAAM,gBAAA,GAAmB,CAAA;AAkBlB,SAAS,cAAA,CAAe,UAAkB,IAAA,EAAoB;AACnE,EAAA,MAAM,IAAA,GAAO,KAAK,WAAA,EAAY;AAC9B,EAAA,MAAM,KAAA,GAAQ,OAAO,IAAA,CAAK,QAAA,KAAa,kBAAkB,CAAA,CAAE,QAAA,CAAS,gBAAA,EAAkB,GAAG,CAAA;AACzF,EAAA,OAAOA,KAAK,QAAA,EAAU,WAAA,EAAa,GAAG,IAAI,CAAA,CAAA,EAAI,KAAK,CAAA,MAAA,CAAQ,CAAA;AAC7D;AAMA,eAAe,mBACb,QAAA,EAC0E;AAC1E,EAAA,MAAM,QAAA,GAAWA,IAAAA,CAAK,QAAA,EAAU,YAAY,CAAA;AAC5C,EAAA,IAAI,OAAA;AACJ,EAAA,IAAI;AACF,IAAA,OAAA,GAAU,MAAMG,QAAAA,CAAS,QAAA,EAAU,OAAO,CAAA;AAAA,EAC5C,CAAA,CAAA,MAAQ;AACN,IAAA,OAAO,EAAC;AAAA,EACV;AAEA,EAAA,MAAM,UAA2E,EAAC;AAClF,EAAA,KAAA,MAAW,IAAA,IAAQ,OAAA,CAAQ,KAAA,CAAM,IAAI,CAAA,EAAG;AACtC,IAAA,MAAM,OAAA,GAAU,KAAK,IAAA,EAAK;AAC1B,IAAA,IAAI,CAAC,OAAA,EAAS;AAEd,IAAA,IAAI;AACF,MAAA,MAAM,MAAA,GAAS,IAAA,CAAK,KAAA,CAAM,OAAO,CAAA;AACjC,MAAA,OAAA,CAAQ,IAAA,CAAK,EAAE,IAAA,EAAM,OAAA,EAAS,QAAQ,CAAA;AAAA,IACxC,CAAA,CAAA,MAAQ;AACN,MAAA,OAAA,CAAQ,KAAK,EAAE,IAAA,EAAM,OAAA,EAAS,MAAA,EAAQ,MAAM,CAAA;AAAA,IAC9C;AAAA,EACF;AACA,EAAA,OAAO,OAAA;AACT;AAKA,eAAsB,gBAAgB,QAAA,EAAmC;AACvE,EAAA,MAAM,KAAA,GAAQ,MAAM,kBAAA,CAAmB,QAAQ,CAAA;AAC/C,EAAA,IAAI,KAAA,GAAQ,CAAA;AACZ,EAAA,KAAA,MAAW,EAAE,MAAA,EAAO,IAAK,KAAA,EAAO;AAC9B,IAAA,IAAI,MAAA,IAAU,MAAA,CAAO,SAAS,CAAA,KAAM,IAAA,EAAM;AACxC,MAAA,KAAA,EAAA;AAAA,IACF;AAAA,EACF;AACA,EAAA,OAAO,KAAA;AACT;AAKA,eAAsB,gBAAgB,QAAA,EAAoC;AACxE,EAAA,MAAM,KAAA,GAAQ,MAAM,eAAA,CAAgB,QAAQ,CAAA;AAC5C,EAAA,OAAO,KAAA,IAAS,mBAAA;AAClB;AAMA,eAAsB,yBAAyB,QAAA,EAAmC;AAChF,EAAA,MAAM,QAAA,GAAWH,IAAAA,CAAK,QAAA,EAAU,YAAY,CAAA;AAC5C,EAAA,MAAM,WAAW,QAAA,GAAW,MAAA;AAG5B,EAAA,MAAM,EAAE,OAAA,EAAQ,GAAI,MAAM,YAAY,QAAQ,CAAA;AAG9C,EAAA,MAAM,cAAA,GAAiB,MAAM,eAAA,CAAgB,QAAQ,CAAA;AAGrD,EAAA,MAAMI,MAAMH,OAAAA,CAAQ,QAAQ,GAAG,EAAE,SAAA,EAAW,MAAM,CAAA;AAGlD,EAAA,MAAM,KAAA,GAAQ,QAAQ,GAAA,CAAI,CAAC,WAAW,IAAA,CAAK,SAAA,CAAU,MAAM,CAAA,GAAI,IAAI,CAAA;AACnE,EAAA,MAAM,UAAU,QAAA,EAAU,KAAA,CAAM,IAAA,CAAK,EAAE,GAAG,OAAO,CAAA;AAGjD,EAAA,MAAM,MAAA,CAAO,UAAU,QAAQ,CAAA;AAE/B,EAAA,OAAO,cAAA;AACT;AAUA,SAAS,aAAA,CAAc,QAAgB,GAAA,EAAoB;AACzD,EAAA,MAAM,OAAA,GAAU,IAAI,IAAA,CAAK,MAAA,CAAO,OAAO,CAAA;AACvC,EAAA,MAAM,KAAA,GAAQ,GAAA,CAAI,OAAA,EAAQ,GAAI,QAAQ,OAAA,EAAQ;AAC9C,EAAA,MAAM,UAAU,KAAA,GAAQ,UAAA;AAGxB,EAAA,OAAO,UAAU,gBAAA,KAAqB,MAAA,CAAO,cAAA,KAAmB,MAAA,IAAa,OAAO,cAAA,KAAmB,CAAA,CAAA;AACzG;AAOA,eAAsB,kBAAkB,QAAA,EAAmC;AACzE,EAAA,MAAM,EAAE,OAAA,EAAQ,GAAI,MAAM,YAAY,QAAQ,CAAA;AAC9C,EAAA,MAAM,GAAA,uBAAU,IAAA,EAAK;AAErB,EAAA,MAAM,YAAsB,EAAC;AAC7B,EAAA,MAAM,SAAmB,EAAC;AAE1B,EAAA,KAAA,MAAW,UAAU,OAAA,EAAS;AAC5B,IAAA,IAAI,aAAA,CAAc,MAAA,EAAQ,GAAG,CAAA,EAAG;AAC9B,MAAA,SAAA,CAAU,KAAK,MAAM,CAAA;AAAA,IACvB,CAAA,MAAO;AACL,MAAA,MAAA,CAAO,KAAK,MAAM,CAAA;AAAA,IACpB;AAAA,EACF;AAEA,EAAA,IAAI,SAAA,CAAU,WAAW,CAAA,EAAG;AAC1B,IAAA,OAAO,CAAA;AAAA,EACT;AAGA,EAAA,MAAM,aAAA,uBAAoB,GAAA,EAAsB;AAChD,EAAA,KAAA,MAAW,UAAU,SAAA,EAAW;AAC9B,IAAA,MAAM,OAAA,GAAU,IAAI,IAAA,CAAK,MAAA,CAAO,OAAO,CAAA;AACvC,IAAA,MAAM,WAAA,GAAc,cAAA,CAAe,QAAA,EAAU,OAAO,CAAA;AACpD,IAAA,MAAM,KAAA,GAAQ,aAAA,CAAc,GAAA,CAAI,WAAW,KAAK,EAAC;AACjD,IAAA,KAAA,CAAM,KAAK,MAAM,CAAA;AACjB,IAAA,aAAA,CAAc,GAAA,CAAI,aAAa,KAAK,CAAA;AAAA,EACtC;AAGA,EAAA,MAAM,UAAA,GAAaD,IAAAA,CAAK,QAAA,EAAU,WAAW,CAAA;AAC7C,EAAA,MAAMI,KAAAA,CAAM,UAAA,EAAY,EAAE,SAAA,EAAW,MAAM,CAAA;AAG3C,EAAA,KAAA,MAAW,CAAC,WAAA,EAAa,cAAc,CAAA,IAAK,aAAA,EAAe;AACzD,IAAA,MAAMC,MAAAA,GAAQ,cAAA,CAAe,GAAA,CAAI,CAAC,CAAA,KAAM,IAAA,CAAK,SAAA,CAAU,CAAC,CAAA,GAAI,IAAI,CAAA,CAAE,IAAA,CAAK,EAAE,CAAA;AACzE,IAAA,MAAMC,UAAAA,CAAW,WAAA,EAAaD,MAAAA,EAAO,OAAO,CAAA;AAAA,EAC9C;AAGA,EAAA,MAAM,QAAA,GAAWL,IAAAA,CAAK,QAAA,EAAU,YAAY,CAAA;AAC5C,EAAA,MAAM,WAAW,QAAA,GAAW,MAAA;AAC5B,EAAA,MAAMI,MAAMH,OAAAA,CAAQ,QAAQ,GAAG,EAAE,SAAA,EAAW,MAAM,CAAA;AAElD,EAAA,MAAM,KAAA,GAAQ,OAAO,GAAA,CAAI,CAAC,WAAW,IAAA,CAAK,SAAA,CAAU,MAAM,CAAA,GAAI,IAAI,CAAA;AAClE,EAAA,MAAM,UAAU,QAAA,EAAU,KAAA,CAAM,IAAA,CAAK,EAAE,GAAG,OAAO,CAAA;AACjD,EAAA,MAAM,MAAA,CAAO,UAAU,QAAQ,CAAA;AAE/B,EAAA,OAAO,SAAA,CAAU,MAAA;AACnB;AAKA,eAAsB,QAAQ,QAAA,EAA0C;AAEtE,EAAA,MAAM,gBAAA,GAAmB,MAAM,eAAA,CAAgB,QAAQ,CAAA;AAGvD,EAAA,MAAM,QAAA,GAAW,MAAM,iBAAA,CAAkB,QAAQ,CAAA;AAGjD,EAAA,MAAM,sBAAA,GAAyB,MAAM,eAAA,CAAgB,QAAQ,CAAA;AAC7D,EAAA,MAAM,yBAAyB,QAAQ,CAAA;AAIvC,EAAA,MAAM,iBAAA,GAAoB,QAAA,GAAW,CAAA,GAAI,gBAAA,GAAmB,sBAAA;AAG5D,EAAA,MAAM,EAAE,OAAA,EAAQ,GAAI,MAAM,YAAY,QAAQ,CAAA;AAE9C,EAAA,OAAO;AAAA,IACL,QAAA;AAAA,IACA,iBAAA;AAAA,IACA,kBAAkB,OAAA,CAAQ;AAAA,GAC5B;AACF;;;AC1MA,IAAM,GAAA,GAAM;AAAA,EACV,OAAA,EAAS,CAAC,GAAA,KAAsB,OAAA,CAAQ,IAAI,KAAA,CAAM,KAAA,CAAM,MAAM,CAAA,EAAG,GAAG,CAAA;AAAA,EACpE,KAAA,EAAO,CAAC,GAAA,KAAsB,OAAA,CAAQ,MAAM,KAAA,CAAM,GAAA,CAAI,SAAS,CAAA,EAAG,GAAG,CAAA;AAAA,EACrE,IAAA,EAAM,CAAC,GAAA,KAAsB,OAAA,CAAQ,IAAI,KAAA,CAAM,IAAA,CAAK,QAAQ,CAAA,EAAG,GAAG,CAAA;AAAA,EAClE,IAAA,EAAM,CAAC,GAAA,KAAsB,OAAA,CAAQ,IAAI,KAAA,CAAM,MAAA,CAAO,QAAQ,CAAA,EAAG,GAAG;AACtE,CAAA;AAWA,SAAS,cAAc,GAAA,EAA0B;AAC/C,EAAA,MAAM,IAAA,GAAO,IAAI,eAAA,EAAgB;AACjC,EAAA,OAAO;AAAA,IACL,OAAA,EAAS,KAAK,OAAA,IAAW,KAAA;AAAA,IACzB,KAAA,EAAO,KAAK,KAAA,IAAS;AAAA,GACvB;AACF;AAGA,IAAM,oBAAA,GAAuB,IAAA;AAG7B,IAAM,kBAAA,GAAqB,IAAA;AAE3B,IAAM,OAAA,GAAU,IAAI,OAAA,EAAQ;AAG5B,OAAA,CACG,OAAO,eAAA,EAAiB,sBAAsB,CAAA,CAC9C,MAAA,CAAO,eAAe,+BAA+B,CAAA;AAExD,OAAA,CACG,KAAK,gBAAgB,CAAA,CACrB,YAAY,mDAAmD,CAAA,CAC/D,QAAQ,OAAO,CAAA;AAElB,OAAA,CACG,OAAA,CAAQ,iBAAiB,CAAA,CACzB,WAAA,CAAY,sBAAsB,CAAA,CAClC,MAAA,CAAO,sBAAA,EAAwB,4BAA4B,CAAA,CAC3D,MAAA,CAAO,iBAAiB,sBAAA,EAAwB,EAAE,EAClD,MAAA,CAAO,WAAA,EAAa,mBAAmB,CAAA,CACvC,MAAA,CAAO,eAA+B,OAAA,EAAiB,OAAA,EAA4D;AAClH,EAAA,MAAM,WAAW,WAAA,EAAY;AAC7B,EAAA,MAAM,EAAE,KAAA,EAAM,GAAI,aAAA,CAAc,IAAI,CAAA;AAEpC,EAAA,MAAM,MAAA,GAAiB;AAAA,IACrB,EAAA,EAAI,WAAW,OAAO,CAAA;AAAA,IACtB,IAAA,EAAM,OAAA;AAAA,IACN,OAAA,EAAS,QAAQ,OAAA,IAAW,gBAAA;AAAA,IAC5B,OAAA;AAAA,IACA,IAAA,EAAM,OAAA,CAAQ,IAAA,GAAO,OAAA,CAAQ,KAAK,KAAA,CAAM,GAAG,CAAA,CAAE,GAAA,CAAI,CAAC,CAAA,KAAM,CAAA,CAAE,IAAA,EAAM,IAAI,EAAC;AAAA,IACrE,MAAA,EAAQ,QAAA;AAAA,IACR,OAAA,EAAS;AAAA,MACP,IAAA,EAAM,KAAA;AAAA,MACN,MAAA,EAAQ;AAAA,KACV;AAAA,IACA,OAAA,EAAA,iBAAS,IAAI,IAAA,EAAK,EAAE,WAAA,EAAY;AAAA,IAChC,SAAA,EAAW,QAAQ,GAAA,IAAO,KAAA;AAAA,IAC1B,YAAY,EAAC;AAAA,IACb,SAAS;AAAC,GACZ;AAEA,EAAA,MAAM,YAAA,CAAa,UAAU,MAAM,CAAA;AACnC,EAAA,GAAA,CAAI,OAAA,CAAQ,CAAA,SAAA,EAAY,OAAO,CAAA,CAAE,CAAA;AACjC,EAAA,IAAI,CAAC,KAAA,EAAO;AACV,IAAA,OAAA,CAAQ,IAAI,CAAA,IAAA,EAAO,KAAA,CAAM,IAAI,MAAA,CAAO,EAAE,CAAC,CAAA,CAAE,CAAA;AAAA,EAC3C;AACF,CAAC,CAAA;AAEH,OAAA,CACG,OAAA,CAAQ,gBAAgB,CAAA,CACxB,WAAA,CAAY,2BAA2B,CAAA,CACvC,MAAA,CAAO,sBAAA,EAAwB,iBAAA,EAAmB,oBAAoB,CAAA,CACtE,MAAA,CAAO,eAA+B,OAAe,OAAA,EAA4B;AAChF,EAAA,MAAM,WAAW,WAAA,EAAY;AAC7B,EAAA,MAAM,KAAA,GAAQ,UAAA,CAAW,OAAA,CAAQ,KAAA,EAAO,OAAO,CAAA;AAC/C,EAAA,MAAM,EAAE,OAAA,EAAS,KAAA,EAAM,GAAI,cAAc,IAAI,CAAA;AAG7C,EAAA,MAAM,aAAa,QAAQ,CAAA;AAE3B,EAAA,MAAM,OAAA,GAAU,MAAM,aAAA,CAAc,QAAA,EAAU,OAAO,KAAK,CAAA;AAE1D,EAAA,IAAI,OAAA,CAAQ,WAAW,CAAA,EAAG;AACxB,IAAA,OAAA,CAAQ,IAAI,uFAAuF,CAAA;AACnG,IAAA;AAAA,EACF;AAEA,EAAA,IAAI,CAAC,KAAA,EAAO;AACV,IAAA,GAAA,CAAI,IAAA,CAAK,CAAA,MAAA,EAAS,OAAA,CAAQ,MAAM,CAAA;AAAA,CAAe,CAAA;AAAA,EACjD;AACA,EAAA,KAAA,MAAW,UAAU,OAAA,EAAS;AAC5B,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,CAAA,EAAI,KAAA,CAAM,IAAA,CAAK,MAAA,CAAO,EAAE,CAAC,CAAA,EAAA,EAAK,MAAA,CAAO,OAAO,CAAA,CAAE,CAAA;AAC1D,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,WAAA,EAAc,MAAA,CAAO,OAAO,CAAA,CAAE,CAAA;AAC1C,IAAA,IAAI,OAAA,IAAW,OAAO,OAAA,EAAS;AAC7B,MAAA,OAAA,CAAQ,GAAA,CAAI,cAAc,MAAA,CAAO,OAAA,CAAQ,IAAI,CAAA,GAAA,EAAM,MAAA,CAAO,OAAA,CAAQ,MAAM,CAAA,CAAE,CAAA;AAC1E,MAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,WAAA,EAAc,MAAA,CAAO,OAAO,CAAA,CAAE,CAAA;AAAA,IAC5C;AACA,IAAA,IAAI,MAAA,CAAO,IAAA,CAAK,MAAA,GAAS,CAAA,EAAG;AAC1B,MAAA,OAAA,CAAQ,IAAI,CAAA,QAAA,EAAW,MAAA,CAAO,KAAK,IAAA,CAAK,IAAI,CAAC,CAAA,CAAE,CAAA;AAAA,IACjD;AACA,IAAA,OAAA,CAAQ,GAAA,EAAI;AAAA,EACd;AACF,CAAC,CAAA;AAEH,OAAA,CACG,OAAA,CAAQ,MAAM,CAAA,CACd,WAAA,CAAY,kBAAkB,CAAA,CAC9B,MAAA,CAAO,sBAAA,EAAwB,iBAAA,EAAmB,kBAAkB,CAAA,CACpE,MAAA,CAAO,eAA+B,OAAA,EAA4B;AACjE,EAAA,MAAM,WAAW,WAAA,EAAY;AAC7B,EAAA,MAAM,KAAA,GAAQ,UAAA,CAAW,OAAA,CAAQ,KAAA,EAAO,OAAO,CAAA;AAC/C,EAAA,MAAM,EAAE,OAAA,EAAS,KAAA,EAAM,GAAI,cAAc,IAAI,CAAA;AAE7C,EAAA,MAAM,EAAE,OAAA,EAAS,YAAA,EAAa,GAAI,MAAM,YAAY,QAAQ,CAAA;AAE5D,EAAA,IAAI,OAAA,CAAQ,WAAW,CAAA,EAAG;AACxB,IAAA,OAAA,CAAQ,IAAI,+DAA+D,CAAA;AAC3E,IAAA,IAAI,eAAe,CAAA,EAAG;AACpB,MAAA,GAAA,CAAI,IAAA,CAAK,CAAA,EAAG,YAAY,CAAA,6BAAA,CAA+B,CAAA;AAAA,IACzD;AACA,IAAA;AAAA,EACF;AAEA,EAAA,MAAM,MAAA,GAAS,OAAA,CAAQ,KAAA,CAAM,CAAA,EAAG,KAAK,CAAA;AAGrC,EAAA,IAAI,CAAC,KAAA,EAAO;AACV,IAAA,GAAA,CAAI,KAAK,CAAA,QAAA,EAAW,MAAA,CAAO,MAAM,CAAA,IAAA,EAAO,QAAQ,MAAM,CAAA;AAAA,CAAe,CAAA;AAAA,EACvE;AAEA,EAAA,KAAA,MAAW,UAAU,MAAA,EAAQ;AAC3B,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,CAAA,EAAI,KAAA,CAAM,IAAA,CAAK,MAAA,CAAO,EAAE,CAAC,CAAA,EAAA,EAAK,MAAA,CAAO,OAAO,CAAA,CAAE,CAAA;AAC1D,IAAA,IAAI,OAAA,EAAS;AACX,MAAA,OAAA,CAAQ,IAAI,CAAA,QAAA,EAAW,MAAA,CAAO,IAAI,CAAA,WAAA,EAAc,MAAA,CAAO,MAAM,CAAA,CAAE,CAAA;AAC/D,MAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,WAAA,EAAc,MAAA,CAAO,OAAO,CAAA,CAAE,CAAA;AAC1C,MAAA,IAAI,OAAO,OAAA,EAAS;AAClB,QAAA,OAAA,CAAQ,GAAA,CAAI,cAAc,MAAA,CAAO,OAAA,CAAQ,IAAI,CAAA,GAAA,EAAM,MAAA,CAAO,OAAA,CAAQ,MAAM,CAAA,CAAE,CAAA;AAAA,MAC5E;AAAA,IACF,CAAA,MAAO;AACL,MAAA,OAAA,CAAQ,IAAI,CAAA,QAAA,EAAW,MAAA,CAAO,IAAI,CAAA,WAAA,EAAc,MAAA,CAAO,MAAM,CAAA,CAAE,CAAA;AAAA,IACjE;AACA,IAAA,IAAI,MAAA,CAAO,IAAA,CAAK,MAAA,GAAS,CAAA,EAAG;AAC1B,MAAA,OAAA,CAAQ,IAAI,CAAA,QAAA,EAAW,MAAA,CAAO,KAAK,IAAA,CAAK,IAAI,CAAC,CAAA,CAAE,CAAA;AAAA,IACjD;AACA,IAAA,OAAA,CAAQ,GAAA,EAAI;AAAA,EACd;AAEA,EAAA,IAAI,eAAe,CAAA,EAAG;AACpB,IAAA,GAAA,CAAI,IAAA,CAAK,CAAA,EAAG,YAAY,CAAA,6BAAA,CAA+B,CAAA;AAAA,EACzD;AACF,CAAC,CAAA;AAEH,OAAA,CACG,OAAA,CAAQ,SAAS,CAAA,CACjB,WAAA,CAAY,iCAAiC,CAAA,CAC7C,MAAA,CAAO,aAAA,EAAe,iCAAiC,CAAA,CACvD,MAAA,CAAO,OAAO,OAAA,KAAiC;AAC9C,EAAA,MAAM,WAAW,WAAA,EAAY;AAC7B,EAAA,IAAI,QAAQ,KAAA,EAAO;AACjB,IAAA,OAAA,CAAQ,IAAI,0BAA0B,CAAA;AACtC,IAAA,MAAM,aAAa,QAAQ,CAAA;AAC3B,IAAA,OAAA,CAAQ,IAAI,gBAAgB,CAAA;AAAA,EAC9B,CAAA,MAAO;AACL,IAAA,MAAM,OAAA,GAAU,MAAM,YAAA,CAAa,QAAQ,CAAA;AAC3C,IAAA,IAAI,OAAA,EAAS;AACX,MAAA,OAAA,CAAQ,IAAI,gCAAgC,CAAA;AAAA,IAC9C,CAAA,MAAO;AACL,MAAA,OAAA,CAAQ,IAAI,sBAAsB,CAAA;AAAA,IACpC;AAAA,EACF;AACF,CAAC,CAAA;AAEH,OAAA,CACG,QAAQ,QAAQ,CAAA,CAChB,WAAA,CAAY,qCAAqC,EACjD,cAAA,CAAe,gBAAA,EAAkB,yBAAyB,CAAA,CAC1D,OAAO,QAAA,EAAU,oCAAoC,EACrD,MAAA,CAAO,QAAA,EAAU,uBAAuB,CAAA,CACxC,MAAA;AAAA,EACC,OAAO,OAAA,KAA+D;AACpE,IAAA,MAAM,WAAW,WAAA,EAAY;AAE7B,IAAA,MAAM,KAAA,GAAQ,MAAM,cAAA,CAAe,OAAA,CAAQ,KAAK,CAAA;AAChD,IAAA,MAAM,MAAA,GAAS,MAAM,gBAAA,CAAiB,QAAA,EAAU,KAAK,CAAA;AAErD,IAAA,IAAI,CAAC,MAAA,EAAQ;AACX,MAAA,IAAI,QAAQ,IAAA,EAAM;AAChB,QAAA,OAAA,CAAQ,IAAI,IAAA,CAAK,SAAA,CAAU,EAAE,QAAA,EAAU,KAAA,EAAO,CAAC,CAAA;AAAA,MACjD,CAAA,MAAO;AACL,QAAA,OAAA,CAAQ,IAAI,+BAA+B,CAAA;AAAA,MAC7C;AACA,MAAA;AAAA,IACF;AAEA,IAAA,IAAI,QAAQ,IAAA,EAAM;AAChB,MAAA,OAAA,CAAQ,GAAA,CAAI,KAAK,SAAA,CAAU,EAAE,UAAU,IAAA,EAAM,GAAG,MAAA,EAAQ,CAAC,CAAA;AACzD,MAAA;AAAA,IACF;AAEA,IAAA,OAAA,CAAQ,IAAI,4BAA4B,CAAA;AACxC,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,WAAA,EAAc,MAAA,CAAO,OAAO,CAAA,CAAE,CAAA;AAC1C,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,UAAA,EAAa,MAAA,CAAO,MAAM,CAAA,CAAE,CAAA;AACxC,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,YAAA,EAAe,MAAA,CAAO,eAAe,CAAA,CAAE,CAAA;AAEnD,IAAA,IAAI,QAAQ,IAAA,EAAM;AAChB,MAAA,MAAM,MAAA,GAAiB;AAAA,QACrB,EAAA,EAAI,UAAA,CAAW,MAAA,CAAO,eAAe,CAAA;AAAA,QACrC,IAAA,EAAM,OAAA;AAAA,QACN,SAAS,MAAA,CAAO,OAAA;AAAA,QAChB,SAAS,MAAA,CAAO,eAAA;AAAA,QAChB,MAAM,EAAC;AAAA,QACP,QAAQ,MAAA,CAAO,MAAA;AAAA,QACf,OAAA,EAAS,EAAE,IAAA,EAAM,QAAA,EAAU,QAAQ,cAAA,EAAe;AAAA,QAClD,OAAA,EAAA,iBAAS,IAAI,IAAA,EAAK,EAAE,WAAA,EAAY;AAAA,QAChC,SAAA,EAAW,KAAA;AAAA,QACX,YAAY,EAAC;AAAA,QACb,SAAS;AAAC,OACZ;AAEA,MAAA,MAAM,YAAA,CAAa,UAAU,MAAM,CAAA;AACnC,MAAA,OAAA,CAAQ,GAAA,CAAI;AAAA,iBAAA,EAAsB,MAAA,CAAO,EAAE,CAAA,CAAE,CAAA;AAAA,IAC/C;AAAA,EACF;AACF,CAAA;AAEF,OAAA,CACG,QAAQ,SAAS,CAAA,CACjB,WAAA,CAAY,4DAA4D,EACxE,MAAA,CAAO,aAAA,EAAe,wCAAwC,CAAA,CAC9D,OAAO,WAAA,EAAa,gDAAgD,CAAA,CACpE,MAAA,CAAO,OAAO,OAAA,KAAmD;AAChE,EAAA,MAAM,WAAW,WAAA,EAAY;AAE7B,EAAA,MAAM,UAAA,GAAa,MAAM,eAAA,CAAgB,QAAQ,CAAA;AACjD,EAAA,MAAM,KAAA,GAAQ,MAAM,eAAA,CAAgB,QAAQ,CAAA;AAE5C,EAAA,IAAI,QAAQ,MAAA,EAAQ;AAClB,IAAA,OAAA,CAAQ,IAAI,sCAAsC,CAAA;AAClD,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,kBAAA,EAAqB,UAAU,CAAA,CAAE,CAAA;AAC7C,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,mBAAA,EAAsB,KAAA,GAAQ,KAAA,GAAQ,IAAI,CAAA,CAAE,CAAA;AACxD,IAAA;AAAA,EACF;AAEA,EAAA,IAAI,CAAC,KAAA,IAAS,CAAC,OAAA,CAAQ,KAAA,EAAO;AAC5B,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,uBAAA,EAA0B,UAAU,CAAA,0BAAA,EAA6B,mBAAmB,CAAA,EAAA,CAAI,CAAA;AACpG,IAAA,OAAA,CAAQ,IAAI,gCAAgC,CAAA;AAC5C,IAAA;AAAA,EACF;AAEA,EAAA,OAAA,CAAQ,IAAI,uBAAuB,CAAA;AACnC,EAAA,MAAM,MAAA,GAAS,MAAM,OAAA,CAAQ,QAAQ,CAAA;AAErC,EAAA,OAAA,CAAQ,IAAI,wBAAwB,CAAA;AACpC,EAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,YAAA,EAAe,MAAA,CAAO,QAAQ,CAAA,UAAA,CAAY,CAAA;AACtD,EAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,sBAAA,EAAyB,MAAA,CAAO,iBAAiB,CAAA,CAAE,CAAA;AAC/D,EAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,qBAAA,EAAwB,MAAA,CAAO,gBAAgB,CAAA,CAAE,CAAA;AAG7D,EAAA,MAAM,aAAa,QAAQ,CAAA;AAC3B,EAAA,OAAA,CAAQ,IAAI,kBAAkB,CAAA;AAChC,CAAC,CAAA;AAEH,OAAA,CACG,QAAQ,QAAQ,CAAA,CAChB,WAAA,CAAY,kCAAkC,EAC9C,MAAA,CAAO,gBAAA,EAAkB,wDAAwD,CAAA,CACjF,OAAO,eAAA,EAAiB,4CAA4C,CAAA,CACpE,MAAA,CAAO,OAAO,OAAA,KAA+C;AAC5D,EAAA,MAAM,WAAW,WAAA,EAAY;AAE7B,EAAA,MAAM,EAAE,OAAA,EAAQ,GAAI,MAAM,YAAY,QAAQ,CAAA;AAE9C,EAAA,IAAI,QAAA,GAAW,OAAA;AAGf,EAAA,IAAI,QAAQ,KAAA,EAAO;AACjB,IAAA,MAAM,SAAA,GAAY,IAAI,IAAA,CAAK,OAAA,CAAQ,KAAK,CAAA;AACxC,IAAA,IAAI,MAAA,CAAO,KAAA,CAAM,SAAA,CAAU,OAAA,EAAS,CAAA,EAAG;AACrC,MAAA,OAAA,CAAQ,KAAA,CAAM,CAAA,qBAAA,EAAwB,OAAA,CAAQ,KAAK,CAAA,wCAAA,CAA0C,CAAA;AAC7F,MAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,IAChB;AACA,IAAA,QAAA,GAAW,QAAA,CAAS,OAAO,CAAC,MAAA,KAAW,IAAI,IAAA,CAAK,MAAA,CAAO,OAAO,CAAA,IAAK,SAAS,CAAA;AAAA,EAC9E;AAGA,EAAA,IAAI,QAAQ,IAAA,EAAM;AAChB,IAAA,MAAM,UAAA,GAAa,OAAA,CAAQ,IAAA,CAAK,KAAA,CAAM,GAAG,CAAA,CAAE,GAAA,CAAI,CAAC,CAAA,KAAM,CAAA,CAAE,IAAA,EAAM,CAAA;AAC9D,IAAA,QAAA,GAAW,QAAA,CAAS,MAAA,CAAO,CAAC,MAAA,KAAW,MAAA,CAAO,IAAA,CAAK,IAAA,CAAK,CAAC,GAAA,KAAQ,UAAA,CAAW,QAAA,CAAS,GAAG,CAAC,CAAC,CAAA;AAAA,EAC5F;AAGA,EAAA,OAAA,CAAQ,IAAI,IAAA,CAAK,SAAA,CAAU,QAAA,EAAU,IAAA,EAAM,CAAC,CAAC,CAAA;AAC/C,CAAC,CAAA;AAEH,OAAA,CACG,OAAA,CAAQ,eAAe,CAAA,CACvB,WAAA,CAAY,kCAAkC,CAAA,CAC9C,MAAA,CAAO,OAAO,IAAA,KAAiB;AAC9B,EAAA,MAAM,WAAW,WAAA,EAAY;AAG7B,EAAA,IAAI,OAAA;AACJ,EAAA,IAAI;AACF,IAAA,MAAM,EAAE,QAAA,EAAAE,SAAAA,EAAS,GAAI,MAAM,OAAO,aAAkB,CAAA;AACpD,IAAA,OAAA,GAAU,MAAMA,SAAAA,CAAS,IAAA,EAAM,OAAO,CAAA;AAAA,EACxC,SAAS,GAAA,EAAK;AACZ,IAAA,MAAM,OAAQ,GAAA,CAA8B,IAAA;AAC5C,IAAA,IAAI,SAAS,QAAA,EAAU;AACrB,MAAA,OAAA,CAAQ,KAAA,CAAM,CAAA,uBAAA,EAA0B,IAAI,CAAA,CAAE,CAAA;AAAA,IAChD,CAAA,MAAO;AACL,MAAA,OAAA,CAAQ,KAAA,CAAM,CAAA,oBAAA,EAAwB,GAAA,CAAc,OAAO,CAAA,CAAE,CAAA;AAAA,IAC/D;AACA,IAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,EAChB;AAGA,EAAA,MAAM,EAAE,OAAA,EAAS,eAAA,EAAgB,GAAI,MAAM,YAAY,QAAQ,CAAA;AAC/D,EAAA,MAAM,WAAA,GAAc,IAAI,GAAA,CAAI,eAAA,CAAgB,IAAI,CAAC,CAAA,KAAM,CAAA,CAAE,EAAE,CAAC,CAAA;AAG5D,EAAA,MAAM,KAAA,GAAQ,OAAA,CAAQ,KAAA,CAAM,IAAI,CAAA;AAChC,EAAA,IAAI,QAAA,GAAW,CAAA;AACf,EAAA,IAAI,OAAA,GAAU,CAAA;AACd,EAAA,IAAI,OAAA,GAAU,CAAA;AAEd,EAAA,KAAA,MAAW,QAAQ,KAAA,EAAO;AACxB,IAAA,MAAM,OAAA,GAAU,KAAK,IAAA,EAAK;AAC1B,IAAA,IAAI,CAAC,OAAA,EAAS;AAGd,IAAA,IAAI,MAAA;AACJ,IAAA,IAAI;AACF,MAAA,MAAA,GAAS,IAAA,CAAK,MAAM,OAAO,CAAA;AAAA,IAC7B,CAAA,CAAA,MAAQ;AACN,MAAA,OAAA,EAAA;AACA,MAAA;AAAA,IACF;AAGA,IAAA,MAAM,MAAA,GAAS,YAAA,CAAa,SAAA,CAAU,MAAM,CAAA;AAC5C,IAAA,IAAI,CAAC,OAAO,OAAA,EAAS;AACnB,MAAA,OAAA,EAAA;AACA,MAAA;AAAA,IACF;AAEA,IAAA,MAAM,SAAiB,MAAA,CAAO,IAAA;AAG9B,IAAA,IAAI,WAAA,CAAY,GAAA,CAAI,MAAA,CAAO,EAAE,CAAA,EAAG;AAC9B,MAAA,OAAA,EAAA;AACA,MAAA;AAAA,IACF;AAGA,IAAA,MAAM,YAAA,CAAa,UAAU,MAAM,CAAA;AACnC,IAAA,WAAA,CAAY,GAAA,CAAI,OAAO,EAAE,CAAA;AACzB,IAAA,QAAA,EAAA;AAAA,EACF;AAGA,EAAA,MAAM,UAAA,GAAa,QAAA,KAAa,CAAA,GAAI,QAAA,GAAW,SAAA;AAC/C,EAAA,MAAM,QAAkB,EAAC;AACzB,EAAA,IAAI,UAAU,CAAA,EAAG,KAAA,CAAM,IAAA,CAAK,CAAA,EAAG,OAAO,CAAA,QAAA,CAAU,CAAA;AAChD,EAAA,IAAI,UAAU,CAAA,EAAG,KAAA,CAAM,IAAA,CAAK,CAAA,EAAG,OAAO,CAAA,QAAA,CAAU,CAAA;AAEhD,EAAA,IAAI,KAAA,CAAM,SAAS,CAAA,EAAG;AACpB,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,SAAA,EAAY,QAAQ,CAAA,CAAA,EAAI,UAAU,KAAK,KAAA,CAAM,IAAA,CAAK,IAAI,CAAC,CAAA,CAAA,CAAG,CAAA;AAAA,EACxE,CAAA,MAAO;AACL,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,SAAA,EAAY,QAAQ,CAAA,CAAA,EAAI,UAAU,CAAA,CAAE,CAAA;AAAA,EAClD;AACF,CAAC,CAAA;AAEH,OAAA,CACG,QAAQ,OAAO,CAAA,CACf,YAAY,qCAAqC,CAAA,CACjD,OAAO,YAAY;AAClB,EAAA,MAAM,WAAW,WAAA,EAAY;AAG7B,EAAA,MAAM,aAAa,QAAQ,CAAA;AAG3B,EAAA,MAAM,EAAE,OAAA,EAAQ,GAAI,MAAM,YAAY,QAAQ,CAAA;AAC9C,EAAA,MAAM,YAAA,GAAe,MAAM,eAAA,CAAgB,QAAQ,CAAA;AACnD,EAAA,MAAM,eAAe,OAAA,CAAQ,MAAA;AAG7B,EAAA,MAAM,cAAA,GAAiB,kBAAkB,QAAQ,CAAA;AACjD,EAAA,MAAM,eAAA,GAAkB,eAAe,MAAA,CAAO,CAAC,KAAK,CAAA,KAAM,GAAA,GAAM,CAAA,CAAE,KAAA,EAAO,CAAC,CAAA;AAC1E,EAAA,MAAM,gBAAgB,YAAA,GAAe,CAAA,GAAA,CAAK,kBAAkB,YAAA,EAAc,OAAA,CAAQ,CAAC,CAAA,GAAI,KAAA;AAGvF,EAAA,MAAM,SAAA,GAAYH,IAAAA,CAAK,QAAA,EAAU,YAAY,CAAA;AAC7C,EAAA,MAAM,MAAA,GAASA,IAAAA,CAAK,QAAA,EAAU,OAAO,CAAA;AAErC,EAAA,IAAI,QAAA,GAAW,CAAA;AACf,EAAA,IAAI,SAAA,GAAY,CAAA;AAEhB,EAAA,IAAI;AACF,IAAA,QAAA,GAAWO,QAAAA,CAAS,SAAS,CAAA,CAAE,IAAA;AAAA,EACjC,CAAA,CAAA,MAAQ;AAAA,EAER;AAEA,EAAA,IAAI;AACF,IAAA,SAAA,GAAYA,QAAAA,CAAS,MAAM,CAAA,CAAE,IAAA;AAAA,EAC/B,CAAA,CAAA,MAAQ;AAAA,EAER;AAEA,EAAA,MAAM,YAAY,QAAA,GAAW,SAAA;AAG7B,EAAA,MAAM,WAAA,GAAc,YAAA,GAAe,CAAA,GAAI,CAAA,EAAA,EAAK,YAAY,CAAA,SAAA,CAAA,GAAc,EAAA;AACtE,EAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,SAAA,EAAY,YAAY,CAAA,MAAA,EAAS,WAAW,CAAA,CAAE,CAAA;AAC1D,EAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,YAAA,EAAe,eAAe,CAAA,QAAA,EAAW,aAAa,CAAA,eAAA,CAAiB,CAAA;AACnF,EAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,SAAA,EAAY,WAAA,CAAY,SAAS,CAAC,CAAA,SAAA,EAAY,WAAA,CAAY,SAAS,CAAC,CAAA,QAAA,EAAW,WAAA,CAAY,QAAQ,CAAC,CAAA,CAAA,CAAG,CAAA;AACrH,CAAC,CAAA;AAEH,OAAA,CAAQ,KAAA,EAAM","file":"cli.js","sourcesContent":["/**\n * Lesson type definitions using Zod schemas\n */\n\nimport { createHash } from 'node:crypto';\nimport { z } from 'zod';\n\n// Source of lesson capture\nexport const SourceSchema = z.enum([\n 'user_correction',\n 'self_correction',\n 'test_failure',\n 'manual',\n]);\n\n// Context about when lesson was learned\nexport const ContextSchema = z.object({\n tool: z.string(),\n intent: z.string(),\n});\n\n// Code pattern (bad -> good)\nexport const PatternSchema = z.object({\n bad: z.string(),\n good: z.string(),\n});\n\n// Severity levels for lessons\nexport const SeveritySchema = z.enum(['high', 'medium', 'low']);\n\n// Lesson type - semantic marker for lesson quality tier\nexport const LessonTypeSchema = z.enum(['quick', 'full']);\n\n/**\n * Unified Lesson schema.\n *\n * The `type` field is a semantic marker:\n * - 'quick': Minimal lesson for fast capture\n * - 'full': Important lesson (typically has evidence/severity)\n *\n * All fields except core identity are optional for flexibility.\n * Semantic meaning is preserved through convention, not schema enforcement.\n */\nexport const LessonSchema = z.object({\n // Core identity (required)\n id: z.string(),\n type: LessonTypeSchema,\n trigger: z.string(),\n insight: z.string(),\n\n // Metadata (required)\n tags: z.array(z.string()),\n source: SourceSchema,\n context: ContextSchema,\n created: z.string(), // ISO8601\n confirmed: z.boolean(),\n\n // Relationships (required, can be empty arrays)\n supersedes: z.array(z.string()),\n related: z.array(z.string()),\n\n // Extended fields (optional - typically present for 'full' type)\n evidence: z.string().optional(),\n severity: SeveritySchema.optional(),\n pattern: PatternSchema.optional(),\n\n // Lifecycle fields (optional)\n deleted: z.boolean().optional(),\n retrievalCount: z.number().optional(),\n});\n\n// Tombstone for deletions (append-only delete marker)\nexport const TombstoneSchema = z.object({\n id: z.string(),\n deleted: z.literal(true),\n deletedAt: z.string(), // ISO8601\n});\n\n// Type exports\nexport type Lesson = z.infer<typeof LessonSchema>;\nexport type LessonType = z.infer<typeof LessonTypeSchema>;\nexport type Tombstone = z.infer<typeof TombstoneSchema>;\nexport type Source = z.infer<typeof SourceSchema>;\nexport type Severity = z.infer<typeof SeveritySchema>;\nexport type Context = z.infer<typeof ContextSchema>;\nexport type Pattern = z.infer<typeof PatternSchema>;\n\n/**\n * Generate deterministic lesson ID from insight text.\n * Format: L + 8 hex characters from SHA-256 hash\n */\nexport function generateId(insight: string): string {\n const hash = createHash('sha256').update(insight).digest('hex');\n return `L${hash.slice(0, 8)}`;\n}\n","/**\n * JSONL storage layer for lessons\n *\n * Append-only storage with last-write-wins deduplication.\n * Source of truth - git trackable.\n */\n\nimport { appendFile, mkdir, readFile } from 'node:fs/promises';\nimport { dirname, join } from 'node:path';\nimport { LessonSchema, type Lesson } from '../types.js';\n\n/** Relative path to lessons file from repo root */\nexport const LESSONS_PATH = '.claude/lessons/index.jsonl';\n\n/** Options for reading lessons */\nexport interface ReadLessonsOptions {\n /** If true, throw on first parse error. Default: false (skip errors) */\n strict?: boolean;\n /** Callback for each parse error in non-strict mode */\n onParseError?: (error: ParseError) => void;\n}\n\n/** Parse error details */\nexport interface ParseError {\n /** 1-based line number */\n line: number;\n /** Error message */\n message: string;\n /** Original error */\n cause: unknown;\n}\n\n/** Result of reading lessons */\nexport interface ReadLessonsResult {\n /** Successfully parsed lessons */\n lessons: Lesson[];\n /** Number of lines skipped due to errors */\n skippedCount: number;\n}\n\n/**\n * Append a lesson to the JSONL file.\n * Creates directory structure if missing.\n */\nexport async function appendLesson(repoRoot: string, lesson: Lesson): Promise<void> {\n const filePath = join(repoRoot, LESSONS_PATH);\n await mkdir(dirname(filePath), { recursive: true });\n\n const line = JSON.stringify(lesson) + '\\n';\n await appendFile(filePath, line, 'utf-8');\n}\n\n/**\n * Parse and validate a single JSON line.\n * @returns Parsed lesson or null if invalid\n */\nfunction parseJsonLine(\n line: string,\n lineNumber: number,\n strict: boolean,\n onParseError?: (error: ParseError) => void\n): Lesson | null {\n // Try to parse JSON\n let parsed: unknown;\n try {\n parsed = JSON.parse(line);\n } catch (err) {\n const parseError: ParseError = {\n line: lineNumber,\n message: `Invalid JSON: ${(err as Error).message}`,\n cause: err,\n };\n if (strict) {\n throw new Error(`Parse error on line ${lineNumber}: ${parseError.message}`);\n }\n onParseError?.(parseError);\n return null;\n }\n\n // Validate against schema\n const result = LessonSchema.safeParse(parsed);\n if (!result.success) {\n const parseError: ParseError = {\n line: lineNumber,\n message: `Schema validation failed: ${result.error.message}`,\n cause: result.error,\n };\n if (strict) {\n throw new Error(`Parse error on line ${lineNumber}: ${parseError.message}`);\n }\n onParseError?.(parseError);\n return null;\n }\n\n return result.data;\n}\n\n/**\n * Read all non-deleted lessons from the JSONL file.\n * Applies last-write-wins deduplication by ID.\n * Returns result object with lessons and skippedCount.\n *\n * @param repoRoot - Repository root directory\n * @param options - Optional settings for error handling\n * @returns Result with lessons array and count of skipped lines\n */\nexport async function readLessons(\n repoRoot: string,\n options: ReadLessonsOptions = {}\n): Promise<ReadLessonsResult> {\n const { strict = false, onParseError } = options;\n const filePath = join(repoRoot, LESSONS_PATH);\n\n let content: string;\n try {\n content = await readFile(filePath, 'utf-8');\n } catch (err) {\n if ((err as NodeJS.ErrnoException).code === 'ENOENT') {\n return { lessons: [], skippedCount: 0 };\n }\n throw err;\n }\n\n const lessons = new Map<string, Lesson>();\n let skippedCount = 0;\n\n const lines = content.split('\\n');\n for (let i = 0; i < lines.length; i++) {\n const trimmed = lines[i]!.trim();\n if (!trimmed) continue;\n\n const lesson = parseJsonLine(trimmed, i + 1, strict, onParseError);\n if (!lesson) {\n skippedCount++;\n continue;\n }\n\n if (lesson.deleted) {\n lessons.delete(lesson.id);\n } else {\n lessons.set(lesson.id, lesson);\n }\n }\n\n return { lessons: Array.from(lessons.values()), skippedCount };\n}\n","/**\n * SQLite storage layer with FTS5 for full-text search\n *\n * Rebuildable index - not the source of truth.\n * Stored in .claude/.cache (gitignored).\n */\n\nimport { createHash } from 'node:crypto';\nimport { mkdirSync, statSync } from 'node:fs';\nimport { dirname, join } from 'node:path';\nimport Database from 'better-sqlite3';\nimport type { Database as DatabaseType } from 'better-sqlite3';\n\nimport type { Lesson } from '../types.js';\n\nimport { LESSONS_PATH, readLessons } from './jsonl.js';\n\n/** Relative path to database file from repo root */\nexport const DB_PATH = '.claude/.cache/lessons.sqlite';\n\n/** SQL schema for lessons database */\nconst SCHEMA_SQL = `\n -- Main lessons table\n CREATE TABLE IF NOT EXISTS lessons (\n id TEXT PRIMARY KEY,\n type TEXT NOT NULL,\n trigger TEXT NOT NULL,\n insight TEXT NOT NULL,\n evidence TEXT,\n severity TEXT,\n tags TEXT NOT NULL DEFAULT '',\n source TEXT NOT NULL,\n context TEXT NOT NULL DEFAULT '{}',\n supersedes TEXT NOT NULL DEFAULT '[]',\n related TEXT NOT NULL DEFAULT '[]',\n created TEXT NOT NULL,\n confirmed INTEGER NOT NULL DEFAULT 0,\n deleted INTEGER NOT NULL DEFAULT 0,\n retrieval_count INTEGER NOT NULL DEFAULT 0,\n last_retrieved TEXT,\n embedding BLOB,\n content_hash TEXT\n );\n\n -- FTS5 virtual table for full-text search\n CREATE VIRTUAL TABLE IF NOT EXISTS lessons_fts USING fts5(\n id,\n trigger,\n insight,\n tags,\n content='lessons',\n content_rowid='rowid'\n );\n\n -- Trigger to sync FTS on INSERT\n CREATE TRIGGER IF NOT EXISTS lessons_ai AFTER INSERT ON lessons BEGIN\n INSERT INTO lessons_fts(rowid, id, trigger, insight, tags)\n VALUES (new.rowid, new.id, new.trigger, new.insight, new.tags);\n END;\n\n -- Trigger to sync FTS on DELETE\n CREATE TRIGGER IF NOT EXISTS lessons_ad AFTER DELETE ON lessons BEGIN\n INSERT INTO lessons_fts(lessons_fts, rowid, id, trigger, insight, tags)\n VALUES ('delete', old.rowid, old.id, old.trigger, old.insight, old.tags);\n END;\n\n -- Trigger to sync FTS on UPDATE\n CREATE TRIGGER IF NOT EXISTS lessons_au AFTER UPDATE ON lessons BEGIN\n INSERT INTO lessons_fts(lessons_fts, rowid, id, trigger, insight, tags)\n VALUES ('delete', old.rowid, old.id, old.trigger, old.insight, old.tags);\n INSERT INTO lessons_fts(rowid, id, trigger, insight, tags)\n VALUES (new.rowid, new.id, new.trigger, new.insight, new.tags);\n END;\n\n -- Index for common queries\n CREATE INDEX IF NOT EXISTS idx_lessons_created ON lessons(created);\n CREATE INDEX IF NOT EXISTS idx_lessons_confirmed ON lessons(confirmed);\n CREATE INDEX IF NOT EXISTS idx_lessons_severity ON lessons(severity);\n\n -- Metadata table for sync tracking\n CREATE TABLE IF NOT EXISTS metadata (\n key TEXT PRIMARY KEY,\n value TEXT NOT NULL\n );\n`;\n\n/**\n * Create database schema for lessons storage.\n */\nfunction createSchema(database: DatabaseType): void {\n database.exec(SCHEMA_SQL);\n}\n\nlet db: DatabaseType | null = null;\n\n/**\n * Compute deterministic content hash for embedding cache validation.\n * Format: SHA-256 hex of \"trigger insight\"\n */\nexport function contentHash(trigger: string, insight: string): string {\n return createHash('sha256').update(`${trigger} ${insight}`).digest('hex');\n}\n\n/**\n * Open or create the SQLite database.\n *\n * Creates directory structure and schema if needed.\n * Returns a singleton instance - subsequent calls return the same connection.\n *\n * **Resource lifecycle:**\n * - First call creates the database file (if needed) and opens a connection\n * - Connection uses WAL mode for better concurrent access\n * - Connection remains open until `closeDb()` is called\n *\n * **Note:** Most code should not call this directly. Higher-level functions\n * like `searchKeyword` and `rebuildIndex` call it internally.\n *\n * @param repoRoot - Path to repository root (database stored at `.claude/.cache/lessons.sqlite`)\n * @returns The singleton database connection\n *\n * @see {@link closeDb} for releasing resources\n */\nexport function openDb(repoRoot: string): DatabaseType {\n if (db) return db;\n\n const dbPath = join(repoRoot, DB_PATH);\n\n // Create directory synchronously (better-sqlite3 is sync)\n const dir = dirname(dbPath);\n mkdirSync(dir, { recursive: true });\n\n db = new Database(dbPath);\n\n // Enable WAL mode for better concurrent access\n db.pragma('journal_mode = WAL');\n\n createSchema(db);\n\n return db;\n}\n\n/**\n * Close the database connection and release resources.\n *\n * **Resource lifecycle:**\n * - The database is opened lazily on first call to `openDb()` or any function that uses it\n * (e.g., `searchKeyword`, `rebuildIndex`, `syncIfNeeded`, `getCachedEmbedding`)\n * - Once opened, the connection remains active until `closeDb()` is called\n * - After closing, subsequent database operations will reopen the connection\n *\n * **When to call:**\n * - At the end of CLI commands to ensure clean process exit\n * - When transitioning between repositories in long-running processes\n * - Before process exit in graceful shutdown handlers\n *\n * **Best practices for long-running processes:**\n * - In single-operation scripts: call before exit\n * - In daemon/server processes: call in shutdown handler\n * - Not necessary to call between operations in the same repository\n *\n * @example\n * ```typescript\n * // CLI command pattern\n * try {\n * await searchKeyword(repoRoot, 'typescript', 10);\n * // ... process results\n * } finally {\n * closeDb();\n * }\n *\n * // Graceful shutdown pattern\n * process.on('SIGTERM', () => {\n * closeDb();\n * process.exit(0);\n * });\n * ```\n */\nexport function closeDb(): void {\n if (db) {\n db.close();\n db = null;\n }\n}\n\n/**\n * Get cached embedding for a lesson if content hash matches.\n * Returns null if no cache exists or hash mismatches.\n */\nexport function getCachedEmbedding(\n repoRoot: string,\n lessonId: string,\n expectedHash?: string\n): number[] | null {\n const database = openDb(repoRoot);\n const row = database\n .prepare('SELECT embedding, content_hash FROM lessons WHERE id = ?')\n .get(lessonId) as { embedding: Buffer | null; content_hash: string | null } | undefined;\n\n if (!row || !row.embedding || !row.content_hash) {\n return null;\n }\n\n // If expected hash provided, validate it matches\n if (expectedHash && row.content_hash !== expectedHash) {\n return null;\n }\n\n // Convert Buffer to Float32Array then to number[]\n const float32 = new Float32Array(\n row.embedding.buffer,\n row.embedding.byteOffset,\n row.embedding.byteLength / 4\n );\n return Array.from(float32);\n}\n\n/**\n * Cache an embedding for a lesson with content hash.\n */\nexport function setCachedEmbedding(\n repoRoot: string,\n lessonId: string,\n embedding: Float32Array | number[],\n hash: string\n): void {\n const database = openDb(repoRoot);\n\n // Convert to Buffer for storage\n const float32 = embedding instanceof Float32Array ? embedding : new Float32Array(embedding);\n const buffer = Buffer.from(float32.buffer, float32.byteOffset, float32.byteLength);\n\n database\n .prepare('UPDATE lessons SET embedding = ?, content_hash = ? WHERE id = ?')\n .run(buffer, hash, lessonId);\n}\n\n/** DB row type for lessons table */\ninterface LessonRow {\n id: string;\n type: string;\n trigger: string;\n insight: string;\n evidence: string | null;\n severity: string | null;\n tags: string;\n source: string;\n context: string;\n supersedes: string;\n related: string;\n created: string;\n confirmed: number;\n deleted: number;\n retrieval_count: number;\n last_retrieved: string | null;\n embedding: Buffer | null;\n}\n\n/**\n * Convert a database row to a typed Lesson object.\n * Maps NULL to undefined for optional fields (lossless roundtrip).\n */\nfunction rowToLesson(row: LessonRow): Lesson {\n const lesson: Lesson = {\n id: row.id,\n type: row.type as 'quick' | 'full',\n trigger: row.trigger,\n insight: row.insight,\n tags: row.tags ? row.tags.split(',').filter(Boolean) : [],\n source: row.source as Lesson['source'],\n context: JSON.parse(row.context) as Lesson['context'],\n supersedes: JSON.parse(row.supersedes) as string[],\n related: JSON.parse(row.related) as string[],\n created: row.created,\n confirmed: row.confirmed === 1,\n };\n\n // Optional fields: map NULL -> undefined (lossless roundtrip)\n if (row.evidence !== null) {\n lesson.evidence = row.evidence;\n }\n if (row.severity !== null) {\n lesson.severity = row.severity as 'high' | 'medium' | 'low';\n }\n if (row.deleted === 1) {\n lesson.deleted = true;\n }\n if (row.retrieval_count > 0) {\n lesson.retrievalCount = row.retrieval_count;\n }\n\n return lesson;\n}\n\n/** Cached embedding with its content hash */\ninterface CachedEmbeddingData {\n embedding: Buffer;\n contentHash: string;\n}\n\n/**\n * Collect cached embeddings from existing lessons for preservation.\n */\nfunction collectCachedEmbeddings(database: DatabaseType): Map<string, CachedEmbeddingData> {\n const cache = new Map<string, CachedEmbeddingData>();\n const rows = database\n .prepare('SELECT id, embedding, content_hash FROM lessons WHERE embedding IS NOT NULL')\n .all() as Array<{ id: string; embedding: Buffer; content_hash: string | null }>;\n\n for (const row of rows) {\n if (row.embedding && row.content_hash) {\n cache.set(row.id, { embedding: row.embedding, contentHash: row.content_hash });\n }\n }\n return cache;\n}\n\n/** SQL for inserting a lesson row */\nconst INSERT_LESSON_SQL = `\n INSERT INTO lessons (id, type, trigger, insight, evidence, severity, tags, source, context, supersedes, related, created, confirmed, deleted, retrieval_count, last_retrieved, embedding, content_hash)\n VALUES (@id, @type, @trigger, @insight, @evidence, @severity, @tags, @source, @context, @supersedes, @related, @created, @confirmed, @deleted, @retrieval_count, @last_retrieved, @embedding, @content_hash)\n`;\n\n/**\n * Get the mtime of the JSONL file, or null if it doesn't exist.\n */\nfunction getJsonlMtime(repoRoot: string): number | null {\n const jsonlPath = join(repoRoot, LESSONS_PATH);\n try {\n const stat = statSync(jsonlPath);\n return stat.mtimeMs;\n } catch {\n return null;\n }\n}\n\n/**\n * Get the last synced mtime from metadata table.\n */\nfunction getLastSyncMtime(database: DatabaseType): number | null {\n const row = database\n .prepare('SELECT value FROM metadata WHERE key = ?')\n .get('last_sync_mtime') as { value: string } | undefined;\n return row ? parseFloat(row.value) : null;\n}\n\n/**\n * Store the last synced mtime in metadata table.\n */\nfunction setLastSyncMtime(database: DatabaseType, mtime: number): void {\n database\n .prepare('INSERT OR REPLACE INTO metadata (key, value) VALUES (?, ?)')\n .run('last_sync_mtime', mtime.toString());\n}\n\n/**\n * Rebuild the SQLite index from the JSONL source of truth.\n * Preserves embeddings where content hash is unchanged.\n * Updates the last sync mtime after successful rebuild.\n */\nexport async function rebuildIndex(repoRoot: string): Promise<void> {\n const database = openDb(repoRoot);\n const { lessons } = await readLessons(repoRoot);\n\n const cachedEmbeddings = collectCachedEmbeddings(database);\n database.exec('DELETE FROM lessons');\n\n if (lessons.length === 0) {\n // Still update mtime even for empty file\n const mtime = getJsonlMtime(repoRoot);\n if (mtime !== null) {\n setLastSyncMtime(database, mtime);\n }\n return;\n }\n\n const insert = database.prepare(INSERT_LESSON_SQL);\n const insertMany = database.transaction((items: Lesson[]) => {\n for (const lesson of items) {\n const newHash = contentHash(lesson.trigger, lesson.insight);\n const cached = cachedEmbeddings.get(lesson.id);\n const hasValidCache = cached && cached.contentHash === newHash;\n\n insert.run({\n id: lesson.id,\n type: lesson.type,\n trigger: lesson.trigger,\n insight: lesson.insight,\n evidence: lesson.evidence ?? null,\n severity: lesson.severity ?? null,\n tags: lesson.tags.join(','),\n source: lesson.source,\n context: JSON.stringify(lesson.context),\n supersedes: JSON.stringify(lesson.supersedes),\n related: JSON.stringify(lesson.related),\n created: lesson.created,\n confirmed: lesson.confirmed ? 1 : 0,\n deleted: lesson.deleted ? 1 : 0,\n retrieval_count: lesson.retrievalCount ?? 0,\n last_retrieved: null, // Reset on rebuild since we're rebuilding from source\n embedding: hasValidCache ? cached.embedding : null,\n content_hash: hasValidCache ? cached.contentHash : null,\n });\n }\n });\n\n insertMany(lessons);\n\n // Update last sync mtime\n const mtime = getJsonlMtime(repoRoot);\n if (mtime !== null) {\n setLastSyncMtime(database, mtime);\n }\n}\n\n/** Options for syncIfNeeded */\nexport interface SyncOptions {\n /** Force rebuild even if JSONL unchanged */\n force?: boolean;\n}\n\n/**\n * Sync the index if JSONL has changed since last sync.\n * Returns true if a rebuild was performed, false if skipped.\n */\nexport async function syncIfNeeded(\n repoRoot: string,\n options: SyncOptions = {}\n): Promise<boolean> {\n const { force = false } = options;\n\n // Check JSONL mtime\n const jsonlMtime = getJsonlMtime(repoRoot);\n if (jsonlMtime === null && !force) {\n // No JSONL file exists\n return false;\n }\n\n const database = openDb(repoRoot);\n const lastSyncMtime = getLastSyncMtime(database);\n\n // Rebuild if forced, no previous sync, or JSONL is newer\n const needsRebuild = force || lastSyncMtime === null || (jsonlMtime !== null && jsonlMtime > lastSyncMtime);\n\n if (needsRebuild) {\n await rebuildIndex(repoRoot);\n return true;\n }\n\n return false;\n}\n\n/**\n * Search lessons using FTS5 keyword search.\n * Returns matching lessons up to the specified limit.\n * Increments retrieval count for all returned lessons.\n */\nexport async function searchKeyword(\n repoRoot: string,\n query: string,\n limit: number\n): Promise<Lesson[]> {\n const database = openDb(repoRoot);\n\n // Check if there are any lessons\n const countResult = database.prepare('SELECT COUNT(*) as cnt FROM lessons').get() as {\n cnt: number;\n };\n if (countResult.cnt === 0) return [];\n\n // Use FTS5 MATCH for search\n const rows = database\n .prepare(\n `\n SELECT l.*\n FROM lessons l\n JOIN lessons_fts fts ON l.rowid = fts.rowid\n WHERE lessons_fts MATCH ?\n LIMIT ?\n `\n )\n .all(query, limit) as LessonRow[];\n\n // Increment retrieval count for matched lessons\n if (rows.length > 0) {\n incrementRetrievalCount(repoRoot, rows.map((r) => r.id));\n }\n\n return rows.map(rowToLesson);\n}\n\n/** Retrieval statistics for a lesson */\nexport interface RetrievalStat {\n id: string;\n count: number;\n lastRetrieved: string | null;\n}\n\n/**\n * Increment retrieval count for a list of lesson IDs.\n * Updates both count and last_retrieved timestamp.\n * Non-existent IDs are silently ignored.\n */\nexport function incrementRetrievalCount(repoRoot: string, lessonIds: string[]): void {\n if (lessonIds.length === 0) return;\n\n const database = openDb(repoRoot);\n const now = new Date().toISOString();\n\n const update = database.prepare(`\n UPDATE lessons\n SET retrieval_count = retrieval_count + 1,\n last_retrieved = ?\n WHERE id = ?\n `);\n\n const updateMany = database.transaction((ids: string[]) => {\n for (const id of ids) {\n update.run(now, id);\n }\n });\n\n updateMany(lessonIds);\n}\n\n/**\n * Get retrieval statistics for all lessons.\n * Returns id, retrieval count, and last retrieved timestamp for each lesson.\n */\nexport function getRetrievalStats(repoRoot: string): RetrievalStat[] {\n const database = openDb(repoRoot);\n\n const rows = database\n .prepare('SELECT id, retrieval_count, last_retrieved FROM lessons')\n .all() as Array<{ id: string; retrieval_count: number; last_retrieved: string | null }>;\n\n return rows.map((row) => ({\n id: row.id,\n count: row.retrieval_count,\n lastRetrieved: row.last_retrieved,\n }));\n}\n","/**\n * Quality filters for lesson capture\n *\n * Filters to ensure lessons are:\n * - Novel (not duplicate)\n * - Specific (not vague)\n * - Actionable (contains action words)\n */\n\nimport { searchKeyword, syncIfNeeded } from '../storage/sqlite.js';\n\n/** Default similarity threshold for duplicate detection */\nconst DEFAULT_SIMILARITY_THRESHOLD = 0.8;\n\n/** Result of novelty check */\nexport interface NoveltyResult {\n novel: boolean;\n reason?: string;\n existingId?: string;\n}\n\n/** Options for novelty check */\nexport interface NoveltyOptions {\n threshold?: number;\n}\n\n/**\n * Check if an insight is novel (not a duplicate of existing lessons).\n * Uses keyword search to find potentially similar lessons.\n */\nexport async function isNovel(\n repoRoot: string,\n insight: string,\n options: NoveltyOptions = {}\n): Promise<NoveltyResult> {\n const threshold = options.threshold ?? DEFAULT_SIMILARITY_THRESHOLD;\n\n // Sync index if JSONL has changed\n await syncIfNeeded(repoRoot);\n\n // Extract key words for search (take first 3 significant words)\n const words = insight\n .toLowerCase()\n .replace(/[^a-z0-9\\s]/g, '')\n .split(/\\s+/)\n .filter((w) => w.length > 3)\n .slice(0, 3);\n\n if (words.length === 0) {\n return { novel: true };\n }\n\n // Search for each word and collect results\n const searchQuery = words.join(' OR ');\n const results = await searchKeyword(repoRoot, searchQuery, 10);\n\n if (results.length === 0) {\n return { novel: true };\n }\n\n // Check similarity using simple word overlap (since we may not have embeddings)\n const insightWords = new Set(insight.toLowerCase().split(/\\s+/));\n\n for (const lesson of results) {\n const lessonWords = new Set(lesson.insight.toLowerCase().split(/\\s+/));\n\n // Calculate Jaccard similarity\n const intersection = [...insightWords].filter((w) => lessonWords.has(w)).length;\n const union = new Set([...insightWords, ...lessonWords]).size;\n const similarity = union > 0 ? intersection / union : 0;\n\n if (similarity >= threshold) {\n return {\n novel: false,\n reason: `Found similar existing lesson: \"${lesson.insight.slice(0, 50)}...\"`,\n existingId: lesson.id,\n };\n }\n\n // Also check exact match\n if (lesson.insight.toLowerCase() === insight.toLowerCase()) {\n return {\n novel: false,\n reason: `Exact duplicate found`,\n existingId: lesson.id,\n };\n }\n }\n\n return { novel: true };\n}\n\n/** Minimum word count for a specific insight */\nconst MIN_WORD_COUNT = 4;\n\n/** Vague patterns that indicate non-specific advice */\nconst VAGUE_PATTERNS = [\n /\\bwrite better\\b/i,\n /\\bbe careful\\b/i,\n /\\bremember to\\b/i,\n /\\bmake sure\\b/i,\n /\\btry to\\b/i,\n /\\bdouble check\\b/i,\n];\n\n/** Generic \"always/never\" phrases (short, lacking specificity) */\nconst GENERIC_IMPERATIVE_PATTERN = /^(always|never)\\s+\\w+(\\s+\\w+){0,2}$/i;\n\n/** Result of specificity check */\nexport interface SpecificityResult {\n specific: boolean;\n reason?: string;\n}\n\n/**\n * Check if an insight is specific enough to be useful.\n * Rejects vague, generic advice that doesn't provide actionable guidance.\n */\nexport function isSpecific(insight: string): SpecificityResult {\n // Check minimum length first\n const words = insight.trim().split(/\\s+/).filter((w) => w.length > 0);\n if (words.length < MIN_WORD_COUNT) {\n return { specific: false, reason: 'Insight is too short to be actionable' };\n }\n\n // Check for vague patterns\n for (const pattern of VAGUE_PATTERNS) {\n if (pattern.test(insight)) {\n return { specific: false, reason: 'Insight matches a vague pattern' };\n }\n }\n\n // Check for generic \"Always X\" or \"Never X\" phrases\n if (GENERIC_IMPERATIVE_PATTERN.test(insight)) {\n return { specific: false, reason: 'Insight matches a vague pattern' };\n }\n\n return { specific: true };\n}\n\n/** Action word patterns that indicate actionable guidance */\nconst ACTION_PATTERNS = [\n /\\buse\\s+.+\\s+instead\\s+of\\b/i, // \"use X instead of Y\"\n /\\bprefer\\s+.+\\s+(over|to)\\b/i, // \"prefer X over Y\" or \"prefer X to Y\"\n /\\balways\\s+.+\\s+when\\b/i, // \"always X when Y\"\n /\\bnever\\s+.+\\s+without\\b/i, // \"never X without Y\"\n /\\bavoid\\s+(using\\s+)?\\w+/i, // \"avoid X\" or \"avoid using X\"\n /\\bcheck\\s+.+\\s+before\\b/i, // \"check X before Y\"\n /^(run|use|add|remove|install|update|configure|set|enable|disable)\\s+/i, // Imperative commands at start\n];\n\n/** Result of actionability check */\nexport interface ActionabilityResult {\n actionable: boolean;\n reason?: string;\n}\n\n/**\n * Check if an insight contains actionable guidance.\n * Returns false for pure observations or questions.\n */\nexport function isActionable(insight: string): ActionabilityResult {\n // Check for action patterns\n for (const pattern of ACTION_PATTERNS) {\n if (pattern.test(insight)) {\n return { actionable: true };\n }\n }\n\n return { actionable: false, reason: 'Insight lacks clear action guidance' };\n}\n\n/** Result of combined quality check */\nexport interface ProposeResult {\n shouldPropose: boolean;\n reason?: string;\n}\n\n/**\n * Combined quality check for lesson proposals.\n * Returns true only if insight is novel, specific, AND actionable.\n */\nexport async function shouldPropose(\n repoRoot: string,\n insight: string\n): Promise<ProposeResult> {\n // Check specificity first (fast, no DB)\n const specificResult = isSpecific(insight);\n if (!specificResult.specific) {\n return { shouldPropose: false, reason: specificResult.reason };\n }\n\n // Check actionability (fast, no DB)\n const actionableResult = isActionable(insight);\n if (!actionableResult.actionable) {\n return { shouldPropose: false, reason: actionableResult.reason };\n }\n\n // Check novelty (requires DB lookup)\n const noveltyResult = await isNovel(repoRoot, insight);\n if (!noveltyResult.novel) {\n return { shouldPropose: false, reason: noveltyResult.reason };\n }\n\n return { shouldPropose: true };\n}\n","/**\n * Trigger detection for automatic lesson capture\n *\n * Detects patterns that indicate potential learning opportunities:\n * - User corrections\n * - Self-corrections\n * - Test failures\n */\n\nimport type { Context } from '../types.js';\n\n/** Signal data for correction detection */\nexport interface CorrectionSignal {\n messages: string[];\n context: Context;\n}\n\n/** Detected correction result */\nexport interface DetectedCorrection {\n trigger: string;\n correctionMessage: string;\n context: Context;\n}\n\n/** User correction patterns */\nconst USER_CORRECTION_PATTERNS = [\n /\\bno\\b[,.]?\\s/i, // \"no, ...\" or \"no ...\"\n /\\bwrong\\b/i, // \"wrong\"\n /\\bactually\\b/i, // \"actually...\"\n /\\bnot that\\b/i, // \"not that\"\n /\\bi meant\\b/i, // \"I meant\"\n];\n\n/**\n * Detect user correction signals in conversation.\n *\n * Looks for patterns that indicate the user is correcting Claude's\n * understanding or actions.\n *\n * @param signals - Messages and context to analyze\n * @returns Detected correction or null if none found\n */\nexport function detectUserCorrection(signals: CorrectionSignal): DetectedCorrection | null {\n const { messages, context } = signals;\n\n if (messages.length < 2) {\n return null;\n }\n\n // Check later messages for correction patterns\n for (let i = 1; i < messages.length; i++) {\n const message = messages[i];\n if (!message) continue;\n\n for (const pattern of USER_CORRECTION_PATTERNS) {\n if (pattern.test(message)) {\n return {\n trigger: `User correction during ${context.intent}`,\n correctionMessage: message,\n context,\n };\n }\n }\n }\n\n return null;\n}\n\n/** Edit history entry */\nexport interface EditEntry {\n file: string;\n success: boolean;\n timestamp: number;\n}\n\n/** Edit history for self-correction detection */\nexport interface EditHistory {\n edits: EditEntry[];\n}\n\n/** Detected self-correction */\nexport interface DetectedSelfCorrection {\n file: string;\n trigger: string;\n}\n\n/**\n * Detect self-correction patterns in edit history.\n *\n * Looks for edit→fail→re-edit patterns on the same file,\n * which indicate Claude had to correct its own work.\n *\n * @param history - Edit history to analyze\n * @returns Detected self-correction or null if none found\n */\nexport function detectSelfCorrection(history: EditHistory): DetectedSelfCorrection | null {\n const { edits } = history;\n\n if (edits.length < 3) {\n return null;\n }\n\n // Look for edit→fail→re-edit pattern on same file\n for (let i = 0; i <= edits.length - 3; i++) {\n const first = edits[i];\n const second = edits[i + 1];\n const third = edits[i + 2];\n\n if (!first || !second || !third) continue;\n\n // Pattern: success → fail → success on same file\n if (\n first.file === second.file &&\n second.file === third.file &&\n first.success &&\n !second.success &&\n third.success\n ) {\n return {\n file: first.file,\n trigger: `Self-correction on ${first.file}`,\n };\n }\n }\n\n return null;\n}\n\n/** Test result for failure detection */\nexport interface TestResult {\n passed: boolean;\n output: string;\n testFile: string;\n}\n\n/** Detected test failure */\nexport interface DetectedTestFailure {\n testFile: string;\n errorOutput: string;\n trigger: string;\n}\n\n/**\n * Detect test failure patterns.\n *\n * When tests fail, this creates a potential learning opportunity\n * if the failure is later fixed.\n *\n * @param testResult - Test result to analyze\n * @returns Detected test failure or null if tests passed\n */\nexport function detectTestFailure(testResult: TestResult): DetectedTestFailure | null {\n if (testResult.passed) {\n return null;\n }\n\n // Extract first meaningful error line for trigger\n const lines = testResult.output.split('\\n').filter((line) => line.trim().length > 0);\n const errorLine = lines.find((line) => /error|fail|assert/i.test(line)) ?? lines[0] ?? '';\n\n return {\n testFile: testResult.testFile,\n errorOutput: testResult.output,\n trigger: `Test failure in ${testResult.testFile}: ${errorLine.slice(0, 100)}`,\n };\n}\n","/**\n * Trigger detection integration\n *\n * Orchestrates detection -> quality filter -> lesson proposal flow.\n * Provides a high-level API for CLI and hooks.\n */\n\nimport * as fs from 'node:fs/promises';\n\nimport type { Source } from '../types.js';\nimport { shouldPropose } from './quality.js';\nimport {\n detectUserCorrection,\n detectSelfCorrection,\n detectTestFailure,\n} from './triggers.js';\nimport type {\n CorrectionSignal,\n EditHistory,\n TestResult,\n} from './triggers.js';\n\n/** Detection input types */\nexport type DetectionType = 'user' | 'self' | 'test';\n\n/** Input for user correction detection */\nexport interface UserDetectionInput {\n type: 'user';\n data: CorrectionSignal;\n}\n\n/** Input for self correction detection */\nexport interface SelfDetectionInput {\n type: 'self';\n data: EditHistory;\n}\n\n/** Input for test failure detection */\nexport interface TestDetectionInput {\n type: 'test';\n data: TestResult;\n}\n\n/** Union type for all detection inputs */\nexport type DetectionInput = UserDetectionInput | SelfDetectionInput | TestDetectionInput;\n\n/** Result of successful detection */\nexport interface DetectionResult {\n trigger: string;\n source: Source;\n proposedInsight: string;\n}\n\n/**\n * Detect triggers and propose lessons.\n *\n * Runs the appropriate detector based on input type, then filters\n * through quality checks. Returns a proposal if detection passes\n * all quality filters.\n *\n * @param repoRoot - Repository root path\n * @param input - Detection input with type and data\n * @returns Detection result with proposed insight, or null\n */\nexport async function detectAndPropose(\n repoRoot: string,\n input: DetectionInput\n): Promise<DetectionResult | null> {\n const detected = runDetector(input);\n if (!detected) {\n return null;\n }\n\n const { trigger, source, proposedInsight } = detected;\n\n // Run quality filters on proposed insight\n const quality = await shouldPropose(repoRoot, proposedInsight);\n if (!quality.shouldPropose) {\n return null;\n }\n\n return { trigger, source, proposedInsight };\n}\n\n/** Internal detection result before quality filtering */\ninterface RawDetection {\n trigger: string;\n source: Source;\n proposedInsight: string;\n}\n\n/**\n * Run the appropriate detector based on input type.\n */\nfunction runDetector(input: DetectionInput): RawDetection | null {\n switch (input.type) {\n case 'user':\n return detectUserCorrectionFlow(input.data);\n case 'self':\n return detectSelfCorrectionFlow(input.data);\n case 'test':\n return detectTestFailureFlow(input.data);\n }\n}\n\n/**\n * Detect user correction and extract insight.\n */\nfunction detectUserCorrectionFlow(data: CorrectionSignal): RawDetection | null {\n const result = detectUserCorrection(data);\n if (!result) {\n return null;\n }\n\n return {\n trigger: result.trigger,\n source: 'user_correction',\n proposedInsight: result.correctionMessage,\n };\n}\n\n/**\n * Detect self correction and extract insight.\n */\nfunction detectSelfCorrectionFlow(data: EditHistory): RawDetection | null {\n const result = detectSelfCorrection(data);\n if (!result) {\n return null;\n }\n\n return {\n trigger: result.trigger,\n source: 'self_correction',\n // Self-corrections need context to form useful insights\n proposedInsight: `Check ${result.file} for common errors before editing`,\n };\n}\n\n/**\n * Detect test failure and extract insight.\n */\nfunction detectTestFailureFlow(data: TestResult): RawDetection | null {\n const result = detectTestFailure(data);\n if (!result) {\n return null;\n }\n\n return {\n trigger: result.trigger,\n source: 'test_failure',\n proposedInsight: result.errorOutput,\n };\n}\n\n/** Valid detection types for validation */\nconst VALID_TYPES = new Set<string>(['user', 'self', 'test']);\n\n/**\n * Parse detection input from a JSON file.\n *\n * @param filePath - Path to JSON input file\n * @returns Parsed detection input\n * @throws Error if file is invalid or type is unknown\n */\nexport async function parseInputFile(filePath: string): Promise<DetectionInput> {\n const content = await fs.readFile(filePath, 'utf-8');\n const data = JSON.parse(content) as { type: string; data: unknown };\n\n if (!VALID_TYPES.has(data.type)) {\n throw new Error(`Invalid detection type: ${data.type}. Must be one of: user, self, test`);\n }\n\n return data as DetectionInput;\n}\n","/**\n * CLI utility functions.\n *\n * Pure functions extracted from cli.ts for testability.\n */\n\n/**\n * Format bytes to human-readable string.\n *\n * @param bytes - Number of bytes\n * @returns Formatted string (e.g., \"1.5 KB\", \"2.0 MB\")\n */\nexport function formatBytes(bytes: number): string {\n if (bytes === 0) return '0 B';\n if (bytes < 1024) return `${bytes} B`;\n const kb = bytes / 1024;\n if (kb < 1024) return `${kb.toFixed(1)} KB`;\n const mb = kb / 1024;\n return `${mb.toFixed(1)} MB`;\n}\n\n/**\n * Parse limit option and validate it's a positive integer.\n *\n * @param value - String value from command option\n * @param name - Option name for error message\n * @returns Parsed integer\n * @throws Error if value is not a valid positive integer\n */\nexport function parseLimit(value: string, name: string): number {\n const parsed = parseInt(value, 10);\n if (Number.isNaN(parsed) || parsed <= 0) {\n throw new Error(`Invalid ${name}: must be a positive integer`);\n }\n return parsed;\n}\n\n/**\n * Get repository root from environment variable or current directory.\n *\n * @returns Repository root path for lesson storage\n */\nexport function getRepoRoot(): string {\n return process.env['LEARNING_AGENT_ROOT'] ?? process.cwd();\n}\n","/**\n * Embedding model resolution using node-llama-cpp's built-in resolver.\n *\n * Uses resolveModelFile for automatic download and caching.\n * Model is stored in ~/.node-llama-cpp/models/ by default.\n */\n\nimport { existsSync } from 'node:fs';\nimport { homedir } from 'node:os';\nimport { join } from 'node:path';\nimport { resolveModelFile } from 'node-llama-cpp';\n\n/**\n * HuggingFace URI for EmbeddingGemma-300M (Q4_0 quantization).\n *\n * - Size: ~278MB\n * - Dimensions: 768 (default), supports MRL truncation to 512/256/128\n * - Context: 2048 tokens\n */\nexport const MODEL_URI = 'hf:ggml-org/embeddinggemma-300M-qat-q4_0-GGUF/embeddinggemma-300M-qat-Q4_0.gguf';\n\n/**\n * Expected model filename after download.\n * node-llama-cpp uses format: hf_{org}_{filename}\n */\nexport const MODEL_FILENAME = 'hf_ggml-org_embeddinggemma-300M-qat-Q4_0.gguf';\n\n/** Default model directory used by node-llama-cpp */\nconst DEFAULT_MODEL_DIR = join(homedir(), '.node-llama-cpp', 'models');\n\n/**\n * Check if the embedding model is available locally.\n *\n * @returns true if model file exists\n */\nexport function isModelAvailable(): boolean {\n return existsSync(join(DEFAULT_MODEL_DIR, MODEL_FILENAME));\n}\n\n/**\n * Resolve the embedding model path, downloading if necessary.\n *\n * Uses node-llama-cpp's resolveModelFile for automatic download with progress.\n *\n * @param options - Optional configuration\n * @param options.cli - Show download progress in console (default: true)\n * @returns Path to the resolved model file\n *\n * @example\n * ```typescript\n * const modelPath = await resolveModel();\n * const llama = await getLlama();\n * const model = await llama.loadModel({ modelPath });\n * ```\n */\nexport async function resolveModel(options: { cli?: boolean } = {}): Promise<string> {\n const { cli = true } = options;\n return resolveModelFile(MODEL_URI, { cli });\n}\n","/**\n * Learning Agent - Repository-scoped learning system for Claude Code\n *\n * This package helps Claude Code learn from mistakes and avoid repeating them.\n * It captures lessons during coding sessions and retrieves relevant lessons\n * when planning new work.\n *\n * ## Quick Start\n *\n * ```typescript\n * import { appendLesson, retrieveForPlan, loadSessionLessons } from 'learning-agent';\n *\n * // At session start, load high-severity lessons\n * const criticalLessons = await loadSessionLessons(repoRoot);\n *\n * // When planning, retrieve relevant lessons\n * const { lessons, message } = await retrieveForPlan(repoRoot, planText);\n *\n * // When capturing a lesson\n * await appendLesson(repoRoot, lesson);\n * ```\n *\n * ## Hook Integration\n *\n * Add to your `.claude/settings.json`:\n *\n * ```json\n * {\n * \"hooks\": {\n * \"session_start\": \"npx learning-agent load-session\",\n * \"pre_tool\": \"npx learning-agent check-plan\"\n * }\n * }\n * ```\n *\n * ## Resource Management\n *\n * This library manages two heavyweight resources that require cleanup:\n *\n * ### SQLite Database\n * - **Acquired:** Lazily on first database operation (search, rebuild, etc.)\n * - **Memory:** Minimal (~few KB for connection, index cached by OS)\n * - **Cleanup:** Call `closeDb()` before process exit\n *\n * ### Embedding Model\n * - **Acquired:** Lazily on first embedding call (embedText, embedTexts, searchVector)\n * - **Memory:** ~150MB RAM for the EmbeddingGemma model\n * - **Cleanup:** Call `unloadEmbedding()` before process exit\n *\n * ### Recommended Cleanup Pattern\n *\n * ```typescript\n * import { closeDb, unloadEmbedding } from 'learning-agent';\n *\n * // For CLI commands - use try/finally\n * async function main() {\n * try {\n * // ... your code that uses learning-agent\n * } finally {\n * unloadEmbedding();\n * closeDb();\n * }\n * }\n *\n * // For long-running processes - use shutdown handlers\n * process.on('SIGTERM', () => {\n * unloadEmbedding();\n * closeDb();\n * process.exit(0);\n * });\n * process.on('SIGINT', () => {\n * unloadEmbedding();\n * closeDb();\n * process.exit(0);\n * });\n * ```\n *\n * **Note:** Failing to clean up will not corrupt data, but may cause:\n * - Memory leaks in long-running processes\n * - Unclean process exits (warnings in some environments)\n *\n * @see {@link closeDb} for database cleanup\n * @see {@link unloadEmbedding} for embedding model cleanup\n * @module learning-agent\n */\n\nexport const VERSION = '0.1.0';\n\n// Storage API\nexport { appendLesson, readLessons, LESSONS_PATH } from './storage/jsonl.js';\nexport type { ReadLessonsOptions, ReadLessonsResult, ParseError } from './storage/jsonl.js';\nexport { rebuildIndex, searchKeyword, closeDb, DB_PATH } from './storage/sqlite.js';\n\n// Embeddings API\nexport { embedText, embedTexts, getEmbedding, isModelAvailable, unloadEmbedding } from './embeddings/nomic.js';\nexport { MODEL_FILENAME, MODEL_URI, resolveModel } from './embeddings/model.js';\n\n// Search API\nexport { searchVector, cosineSimilarity } from './search/vector.js';\nexport type { ScoredLesson, SearchVectorOptions } from './search/vector.js';\nexport { rankLessons, calculateScore, severityBoost, recencyBoost, confirmationBoost } from './search/ranking.js';\nexport type { RankedLesson } from './search/ranking.js';\n\n// Capture API - Quality filters\nexport { shouldPropose, isNovel, isSpecific, isActionable } from './capture/quality.js';\nexport type { NoveltyResult, NoveltyOptions, SpecificityResult, ActionabilityResult, ProposeResult } from './capture/quality.js';\n\n// Capture API - Triggers\nexport { detectUserCorrection, detectSelfCorrection, detectTestFailure } from './capture/triggers.js';\nexport type {\n CorrectionSignal,\n DetectedCorrection,\n EditHistory,\n EditEntry,\n DetectedSelfCorrection,\n TestResult,\n DetectedTestFailure,\n} from './capture/triggers.js';\n\n// Retrieval API\nexport { loadSessionLessons } from './retrieval/session.js';\nexport { retrieveForPlan, formatLessonsCheck } from './retrieval/plan.js';\nexport type { PlanRetrievalResult } from './retrieval/plan.js';\n\n// Types and schemas\nexport {\n generateId,\n LessonSchema,\n LessonTypeSchema,\n TombstoneSchema,\n} from './types.js';\nexport type {\n Lesson,\n LessonType,\n Tombstone,\n Source,\n Severity,\n Context,\n} from './types.js';\n","/**\n * Compaction and auto-archive for lessons\n *\n * Handles:\n * - Archiving old lessons (>90 days with 0 retrievals)\n * - Removing tombstones through JSONL rewrite\n * - Tracking compaction thresholds\n */\n\nimport { appendFile, mkdir, readFile, rename, writeFile } from 'node:fs/promises';\nimport { dirname, join } from 'node:path';\n\nimport type { Lesson } from '../types.js';\n\nimport { LESSONS_PATH, readLessons } from './jsonl.js';\n\n/** Relative path to archive directory from repo root */\nexport const ARCHIVE_DIR = '.claude/lessons/archive';\n\n/** Number of tombstones that triggers automatic compaction */\nexport const TOMBSTONE_THRESHOLD = 100;\n\n/** Age threshold for archiving (in days) */\nexport const ARCHIVE_AGE_DAYS = 90;\n\n/** Milliseconds per day for time calculations */\nconst MS_PER_DAY = 1000 * 60 * 60 * 24;\n\n/** Month offset for JavaScript's 0-indexed months */\nconst MONTH_INDEX_OFFSET = 1;\n\n/** Padding length for month in archive filename (e.g., \"01\" not \"1\") */\nconst MONTH_PAD_LENGTH = 2;\n\n/**\n * Result of a compaction operation\n */\nexport interface CompactResult {\n /** Number of lessons moved to archive */\n archived: number;\n /** Number of tombstones removed */\n tombstonesRemoved: number;\n /** Number of lessons remaining in index.jsonl */\n lessonsRemaining: number;\n}\n\n/**\n * Generate archive file path for a given date.\n * Format: .claude/lessons/archive/YYYY-MM.jsonl\n */\nexport function getArchivePath(repoRoot: string, date: Date): string {\n const year = date.getFullYear();\n const month = String(date.getMonth() + MONTH_INDEX_OFFSET).padStart(MONTH_PAD_LENGTH, '0');\n return join(repoRoot, ARCHIVE_DIR, `${year}-${month}.jsonl`);\n}\n\n/**\n * Parse raw JSONL lines from the lessons file.\n * Returns all lines (including invalid ones) as parsed objects or null.\n */\nasync function parseRawJsonlLines(\n repoRoot: string\n): Promise<Array<{ line: string; parsed: Record<string, unknown> | null }>> {\n const filePath = join(repoRoot, LESSONS_PATH);\n let content: string;\n try {\n content = await readFile(filePath, 'utf-8');\n } catch {\n return [];\n }\n\n const results: Array<{ line: string; parsed: Record<string, unknown> | null }> = [];\n for (const line of content.split('\\n')) {\n const trimmed = line.trim();\n if (!trimmed) continue;\n\n try {\n const parsed = JSON.parse(trimmed) as Record<string, unknown>;\n results.push({ line: trimmed, parsed });\n } catch {\n results.push({ line: trimmed, parsed: null });\n }\n }\n return results;\n}\n\n/**\n * Count the number of tombstones (deleted: true records) in the JSONL file.\n */\nexport async function countTombstones(repoRoot: string): Promise<number> {\n const lines = await parseRawJsonlLines(repoRoot);\n let count = 0;\n for (const { parsed } of lines) {\n if (parsed && parsed['deleted'] === true) {\n count++;\n }\n }\n return count;\n}\n\n/**\n * Check if compaction is needed based on tombstone count.\n */\nexport async function needsCompaction(repoRoot: string): Promise<boolean> {\n const count = await countTombstones(repoRoot);\n return count >= TOMBSTONE_THRESHOLD;\n}\n\n/**\n * Rewrite the JSONL file without tombstones.\n * Applies last-write-wins deduplication.\n */\nexport async function rewriteWithoutTombstones(repoRoot: string): Promise<number> {\n const filePath = join(repoRoot, LESSONS_PATH);\n const tempPath = filePath + '.tmp';\n\n // Read deduplicated lessons (already handles last-write-wins)\n const { lessons } = await readLessons(repoRoot);\n\n // Count tombstones before rewrite\n const tombstoneCount = await countTombstones(repoRoot);\n\n // Ensure directory exists\n await mkdir(dirname(filePath), { recursive: true });\n\n // Write clean lessons to temp file\n const lines = lessons.map((lesson) => JSON.stringify(lesson) + '\\n');\n await writeFile(tempPath, lines.join(''), 'utf-8');\n\n // Atomic rename\n await rename(tempPath, filePath);\n\n return tombstoneCount;\n}\n\n/**\n * Determine if a lesson should be archived based on age and retrieval count.\n * Lessons are archived if older than ARCHIVE_AGE_DAYS and never retrieved.\n *\n * @param lesson - The lesson to evaluate\n * @param now - Current date for age calculation\n * @returns true if lesson should be archived\n */\nfunction shouldArchive(lesson: Lesson, now: Date): boolean {\n const created = new Date(lesson.created);\n const ageMs = now.getTime() - created.getTime();\n const ageDays = ageMs / MS_PER_DAY;\n\n // Archive if: older than threshold AND never retrieved\n return ageDays > ARCHIVE_AGE_DAYS && (lesson.retrievalCount === undefined || lesson.retrievalCount === 0);\n}\n\n/**\n * Archive old lessons that haven't been retrieved.\n * Moves lessons >90 days old with 0 retrievals to archive files.\n * Returns the number of lessons archived.\n */\nexport async function archiveOldLessons(repoRoot: string): Promise<number> {\n const { lessons } = await readLessons(repoRoot);\n const now = new Date();\n\n const toArchive: Lesson[] = [];\n const toKeep: Lesson[] = [];\n\n for (const lesson of lessons) {\n if (shouldArchive(lesson, now)) {\n toArchive.push(lesson);\n } else {\n toKeep.push(lesson);\n }\n }\n\n if (toArchive.length === 0) {\n return 0;\n }\n\n // Group lessons by archive file (YYYY-MM)\n const archiveGroups = new Map<string, Lesson[]>();\n for (const lesson of toArchive) {\n const created = new Date(lesson.created);\n const archivePath = getArchivePath(repoRoot, created);\n const group = archiveGroups.get(archivePath) ?? [];\n group.push(lesson);\n archiveGroups.set(archivePath, group);\n }\n\n // Create archive directory\n const archiveDir = join(repoRoot, ARCHIVE_DIR);\n await mkdir(archiveDir, { recursive: true });\n\n // Append to archive files\n for (const [archivePath, archiveLessons] of archiveGroups) {\n const lines = archiveLessons.map((l) => JSON.stringify(l) + '\\n').join('');\n await appendFile(archivePath, lines, 'utf-8');\n }\n\n // Rewrite main file without archived lessons\n const filePath = join(repoRoot, LESSONS_PATH);\n const tempPath = filePath + '.tmp';\n await mkdir(dirname(filePath), { recursive: true });\n\n const lines = toKeep.map((lesson) => JSON.stringify(lesson) + '\\n');\n await writeFile(tempPath, lines.join(''), 'utf-8');\n await rename(tempPath, filePath);\n\n return toArchive.length;\n}\n\n/**\n * Run full compaction: archive old lessons and remove tombstones.\n */\nexport async function compact(repoRoot: string): Promise<CompactResult> {\n // Count tombstones BEFORE any operations (archiving also rewrites the file)\n const tombstonesBefore = await countTombstones(repoRoot);\n\n // First, archive old lessons\n const archived = await archiveOldLessons(repoRoot);\n\n // Then, remove tombstones (may be fewer now if archiving removed some)\n const tombstonesAfterArchive = await countTombstones(repoRoot);\n await rewriteWithoutTombstones(repoRoot);\n\n // Total tombstones removed = before - after rewrite (which is 0 after rewrite)\n // But we want to report what was actually in the file before compaction\n const tombstonesRemoved = archived > 0 ? tombstonesBefore : tombstonesAfterArchive;\n\n // Get final count\n const { lessons } = await readLessons(repoRoot);\n\n return {\n archived,\n tombstonesRemoved,\n lessonsRemaining: lessons.length,\n };\n}\n","#!/usr/bin/env node\n/**\n * Learning Agent CLI\n *\n * Commands:\n * learn <insight> - Capture a new lesson\n * search <query> - Search lessons by keyword\n * list - List all lessons\n * detect --input - Detect learning triggers from input\n * compact - Archive old lessons and remove tombstones\n */\n\nimport chalk from 'chalk';\nimport { Command } from 'commander';\n\nimport { statSync } from 'node:fs';\nimport { join } from 'node:path';\n\nimport { detectAndPropose, parseInputFile } from './capture/integration.js';\nimport { formatBytes, getRepoRoot, parseLimit } from './cli-utils.js';\nimport { VERSION } from './index.js';\nimport { compact, countTombstones, needsCompaction, TOMBSTONE_THRESHOLD } from './storage/compact.js';\nimport { appendLesson, LESSONS_PATH, readLessons } from './storage/jsonl.js';\nimport { DB_PATH, getRetrievalStats, rebuildIndex, searchKeyword, syncIfNeeded } from './storage/sqlite.js';\nimport { generateId, LessonSchema } from './types.js';\nimport type { Lesson } from './types.js';\n\n// ============================================================================\n// Output Formatting Helpers\n// ============================================================================\n\n/** Output helper functions for consistent formatting */\nconst out = {\n success: (msg: string): void => console.log(chalk.green('[ok]'), msg),\n error: (msg: string): void => console.error(chalk.red('[error]'), msg),\n info: (msg: string): void => console.log(chalk.blue('[info]'), msg),\n warn: (msg: string): void => console.log(chalk.yellow('[warn]'), msg),\n};\n\n/** Global options interface */\ninterface GlobalOpts {\n verbose: boolean;\n quiet: boolean;\n}\n\n/**\n * Get global options from command.\n */\nfunction getGlobalOpts(cmd: Command): GlobalOpts {\n const opts = cmd.optsWithGlobals() as { verbose?: boolean; quiet?: boolean };\n return {\n verbose: opts.verbose ?? false,\n quiet: opts.quiet ?? false,\n };\n}\n\n/** Default limit for search results */\nconst DEFAULT_SEARCH_LIMIT = '10';\n\n/** Default limit for list results */\nconst DEFAULT_LIST_LIMIT = '20';\n\nconst program = new Command();\n\n// Add global options\nprogram\n .option('-v, --verbose', 'Show detailed output')\n .option('-q, --quiet', 'Suppress non-essential output');\n\nprogram\n .name('learning-agent')\n .description('Repository-scoped learning system for Claude Code')\n .version(VERSION);\n\nprogram\n .command('learn <insight>')\n .description('Capture a new lesson')\n .option('-t, --trigger <text>', 'What triggered this lesson')\n .option('--tags <tags>', 'Comma-separated tags', '')\n .option('-y, --yes', 'Skip confirmation')\n .action(async function (this: Command, insight: string, options: { trigger?: string; tags: string; yes?: boolean }) {\n const repoRoot = getRepoRoot();\n const { quiet } = getGlobalOpts(this);\n\n const lesson: Lesson = {\n id: generateId(insight),\n type: 'quick',\n trigger: options.trigger ?? 'Manual capture',\n insight,\n tags: options.tags ? options.tags.split(',').map((t) => t.trim()) : [],\n source: 'manual',\n context: {\n tool: 'cli',\n intent: 'manual learning',\n },\n created: new Date().toISOString(),\n confirmed: options.yes ?? false,\n supersedes: [],\n related: [],\n };\n\n await appendLesson(repoRoot, lesson);\n out.success(`Learned: ${insight}`);\n if (!quiet) {\n console.log(`ID: ${chalk.dim(lesson.id)}`);\n }\n });\n\nprogram\n .command('search <query>')\n .description('Search lessons by keyword')\n .option('-n, --limit <number>', 'Maximum results', DEFAULT_SEARCH_LIMIT)\n .action(async function (this: Command, query: string, options: { limit: string }) {\n const repoRoot = getRepoRoot();\n const limit = parseLimit(options.limit, 'limit');\n const { verbose, quiet } = getGlobalOpts(this);\n\n // Sync index if JSONL has changed\n await syncIfNeeded(repoRoot);\n\n const results = await searchKeyword(repoRoot, query, limit);\n\n if (results.length === 0) {\n console.log('No lessons match your search. Try a different query or use \"list\" to see all lessons.');\n return;\n }\n\n if (!quiet) {\n out.info(`Found ${results.length} lesson(s):\\n`);\n }\n for (const lesson of results) {\n console.log(`[${chalk.cyan(lesson.id)}] ${lesson.insight}`);\n console.log(` Trigger: ${lesson.trigger}`);\n if (verbose && lesson.context) {\n console.log(` Context: ${lesson.context.tool} - ${lesson.context.intent}`);\n console.log(` Created: ${lesson.created}`);\n }\n if (lesson.tags.length > 0) {\n console.log(` Tags: ${lesson.tags.join(', ')}`);\n }\n console.log();\n }\n });\n\nprogram\n .command('list')\n .description('List all lessons')\n .option('-n, --limit <number>', 'Maximum results', DEFAULT_LIST_LIMIT)\n .action(async function (this: Command, options: { limit: string }) {\n const repoRoot = getRepoRoot();\n const limit = parseLimit(options.limit, 'limit');\n const { verbose, quiet } = getGlobalOpts(this);\n\n const { lessons, skippedCount } = await readLessons(repoRoot);\n\n if (lessons.length === 0) {\n console.log('No lessons found. Get started with: learn \"Your first lesson\"');\n if (skippedCount > 0) {\n out.warn(`${skippedCount} corrupted lesson(s) skipped.`);\n }\n return;\n }\n\n const toShow = lessons.slice(0, limit);\n\n // Show summary unless quiet mode\n if (!quiet) {\n out.info(`Showing ${toShow.length} of ${lessons.length} lesson(s):\\n`);\n }\n\n for (const lesson of toShow) {\n console.log(`[${chalk.cyan(lesson.id)}] ${lesson.insight}`);\n if (verbose) {\n console.log(` Type: ${lesson.type} | Source: ${lesson.source}`);\n console.log(` Created: ${lesson.created}`);\n if (lesson.context) {\n console.log(` Context: ${lesson.context.tool} - ${lesson.context.intent}`);\n }\n } else {\n console.log(` Type: ${lesson.type} | Source: ${lesson.source}`);\n }\n if (lesson.tags.length > 0) {\n console.log(` Tags: ${lesson.tags.join(', ')}`);\n }\n console.log();\n }\n\n if (skippedCount > 0) {\n out.warn(`${skippedCount} corrupted lesson(s) skipped.`);\n }\n });\n\nprogram\n .command('rebuild')\n .description('Rebuild SQLite index from JSONL')\n .option('-f, --force', 'Force rebuild even if unchanged')\n .action(async (options: { force?: boolean }) => {\n const repoRoot = getRepoRoot();\n if (options.force) {\n console.log('Forcing index rebuild...');\n await rebuildIndex(repoRoot);\n console.log('Index rebuilt.');\n } else {\n const rebuilt = await syncIfNeeded(repoRoot);\n if (rebuilt) {\n console.log('Index rebuilt (JSONL changed).');\n } else {\n console.log('Index is up to date.');\n }\n }\n });\n\nprogram\n .command('detect')\n .description('Detect learning triggers from input')\n .requiredOption('--input <file>', 'Path to JSON input file')\n .option('--save', 'Automatically save proposed lesson')\n .option('--json', 'Output result as JSON')\n .action(\n async (options: { input: string; save?: boolean; json?: boolean }) => {\n const repoRoot = getRepoRoot();\n\n const input = await parseInputFile(options.input);\n const result = await detectAndPropose(repoRoot, input);\n\n if (!result) {\n if (options.json) {\n console.log(JSON.stringify({ detected: false }));\n } else {\n console.log('No learning trigger detected.');\n }\n return;\n }\n\n if (options.json) {\n console.log(JSON.stringify({ detected: true, ...result }));\n return;\n }\n\n console.log('Learning trigger detected!');\n console.log(` Trigger: ${result.trigger}`);\n console.log(` Source: ${result.source}`);\n console.log(` Proposed: ${result.proposedInsight}`);\n\n if (options.save) {\n const lesson: Lesson = {\n id: generateId(result.proposedInsight),\n type: 'quick',\n trigger: result.trigger,\n insight: result.proposedInsight,\n tags: [],\n source: result.source,\n context: { tool: 'detect', intent: 'auto-capture' },\n created: new Date().toISOString(),\n confirmed: false,\n supersedes: [],\n related: [],\n };\n\n await appendLesson(repoRoot, lesson);\n console.log(`\\nSaved as lesson: ${lesson.id}`);\n }\n }\n );\n\nprogram\n .command('compact')\n .description('Compact lessons: archive old lessons and remove tombstones')\n .option('-f, --force', 'Run compaction even if below threshold')\n .option('--dry-run', 'Show what would be done without making changes')\n .action(async (options: { force?: boolean; dryRun?: boolean }) => {\n const repoRoot = getRepoRoot();\n\n const tombstones = await countTombstones(repoRoot);\n const needs = await needsCompaction(repoRoot);\n\n if (options.dryRun) {\n console.log('Dry run - no changes will be made.\\n');\n console.log(`Tombstones found: ${tombstones}`);\n console.log(`Compaction needed: ${needs ? 'yes' : 'no'}`);\n return;\n }\n\n if (!needs && !options.force) {\n console.log(`Compaction not needed (${tombstones} tombstones, threshold is ${TOMBSTONE_THRESHOLD}).`);\n console.log('Use --force to compact anyway.');\n return;\n }\n\n console.log('Running compaction...');\n const result = await compact(repoRoot);\n\n console.log('\\nCompaction complete:');\n console.log(` Archived: ${result.archived} lesson(s)`);\n console.log(` Tombstones removed: ${result.tombstonesRemoved}`);\n console.log(` Lessons remaining: ${result.lessonsRemaining}`);\n\n // Rebuild SQLite index after compaction\n await rebuildIndex(repoRoot);\n console.log(' Index rebuilt.');\n });\n\nprogram\n .command('export')\n .description('Export lessons as JSON to stdout')\n .option('--since <date>', 'Only include lessons created after this date (ISO8601)')\n .option('--tags <tags>', 'Filter by tags (comma-separated, OR logic)')\n .action(async (options: { since?: string; tags?: string }) => {\n const repoRoot = getRepoRoot();\n\n const { lessons } = await readLessons(repoRoot);\n\n let filtered = lessons;\n\n // Filter by date if --since provided\n if (options.since) {\n const sinceDate = new Date(options.since);\n if (Number.isNaN(sinceDate.getTime())) {\n console.error(`Invalid date format: ${options.since}. Use ISO8601 format (e.g., 2024-01-15).`);\n process.exit(1);\n }\n filtered = filtered.filter((lesson) => new Date(lesson.created) >= sinceDate);\n }\n\n // Filter by tags if --tags provided (OR logic)\n if (options.tags) {\n const filterTags = options.tags.split(',').map((t) => t.trim());\n filtered = filtered.filter((lesson) => lesson.tags.some((tag) => filterTags.includes(tag)));\n }\n\n // Output JSON to stdout (portable format for sharing)\n console.log(JSON.stringify(filtered, null, 2));\n });\n\nprogram\n .command('import <file>')\n .description('Import lessons from a JSONL file')\n .action(async (file: string) => {\n const repoRoot = getRepoRoot();\n\n // Read input file\n let content: string;\n try {\n const { readFile } = await import('node:fs/promises');\n content = await readFile(file, 'utf-8');\n } catch (err) {\n const code = (err as NodeJS.ErrnoException).code;\n if (code === 'ENOENT') {\n console.error(`Error: File not found: ${file}`);\n } else {\n console.error(`Error reading file: ${(err as Error).message}`);\n }\n process.exit(1);\n }\n\n // Get existing lesson IDs\n const { lessons: existingLessons } = await readLessons(repoRoot);\n const existingIds = new Set(existingLessons.map((l) => l.id));\n\n // Parse and validate each line\n const lines = content.split('\\n');\n let imported = 0;\n let skipped = 0;\n let invalid = 0;\n\n for (const line of lines) {\n const trimmed = line.trim();\n if (!trimmed) continue;\n\n // Parse JSON\n let parsed: unknown;\n try {\n parsed = JSON.parse(trimmed);\n } catch {\n invalid++;\n continue;\n }\n\n // Validate schema\n const result = LessonSchema.safeParse(parsed);\n if (!result.success) {\n invalid++;\n continue;\n }\n\n const lesson: Lesson = result.data;\n\n // Skip if ID already exists\n if (existingIds.has(lesson.id)) {\n skipped++;\n continue;\n }\n\n // Append lesson\n await appendLesson(repoRoot, lesson);\n existingIds.add(lesson.id);\n imported++;\n }\n\n // Format summary\n const lessonWord = imported === 1 ? 'lesson' : 'lessons';\n const parts: string[] = [];\n if (skipped > 0) parts.push(`${skipped} skipped`);\n if (invalid > 0) parts.push(`${invalid} invalid`);\n\n if (parts.length > 0) {\n console.log(`Imported ${imported} ${lessonWord} (${parts.join(', ')})`);\n } else {\n console.log(`Imported ${imported} ${lessonWord}`);\n }\n });\n\nprogram\n .command('stats')\n .description('Show database health and statistics')\n .action(async () => {\n const repoRoot = getRepoRoot();\n\n // Sync index to ensure accurate stats\n await syncIfNeeded(repoRoot);\n\n // Read lessons from JSONL to get accurate counts\n const { lessons } = await readLessons(repoRoot);\n const deletedCount = await countTombstones(repoRoot);\n const totalLessons = lessons.length;\n\n // Get retrieval stats from SQLite\n const retrievalStats = getRetrievalStats(repoRoot);\n const totalRetrievals = retrievalStats.reduce((sum, s) => sum + s.count, 0);\n const avgRetrievals = totalLessons > 0 ? (totalRetrievals / totalLessons).toFixed(1) : '0.0';\n\n // Get storage sizes\n const jsonlPath = join(repoRoot, LESSONS_PATH);\n const dbPath = join(repoRoot, DB_PATH);\n\n let dataSize = 0;\n let indexSize = 0;\n\n try {\n dataSize = statSync(jsonlPath).size;\n } catch {\n // File doesn't exist\n }\n\n try {\n indexSize = statSync(dbPath).size;\n } catch {\n // File doesn't exist\n }\n\n const totalSize = dataSize + indexSize;\n\n // Format output\n const deletedInfo = deletedCount > 0 ? ` (${deletedCount} deleted)` : '';\n console.log(`Lessons: ${totalLessons} total${deletedInfo}`);\n console.log(`Retrievals: ${totalRetrievals} total, ${avgRetrievals} avg per lesson`);\n console.log(`Storage: ${formatBytes(totalSize)} (index: ${formatBytes(indexSize)}, data: ${formatBytes(dataSize)})`);\n });\n\nprogram.parse();\n"]}
|
|
1
|
+
{"version":3,"sources":["../src/types.ts","../src/storage/jsonl.ts","../src/storage/sqlite.ts","../src/capture/quality.ts","../src/capture/triggers.ts","../src/capture/integration.ts","../src/cli-utils.ts","../src/embeddings/model.ts","../src/embeddings/nomic.ts","../src/search/vector.ts","../src/search/ranking.ts","../src/retrieval/session.ts","../src/retrieval/plan.ts","../src/index.ts","../src/storage/compact.ts","../src/cli.ts"],"names":["createHash","join","dirname","mtime","DEFAULT_LIMIT","readFile","mkdir","lines","appendFile","existsSync","writeFile","homedir","rename","statSync"],"mappings":";;;;;;;;;;;;;AAQO,IAAM,YAAA,GAAe,EAAE,IAAA,CAAK;AAAA,EACjC,iBAAA;AAAA,EACA,iBAAA;AAAA,EACA,cAAA;AAAA,EACA;AACF,CAAC,CAAA;AAGM,IAAM,aAAA,GAAgB,EAAE,MAAA,CAAO;AAAA,EACpC,IAAA,EAAM,EAAE,MAAA,EAAO;AAAA,EACf,MAAA,EAAQ,EAAE,MAAA;AACZ,CAAC,CAAA;AAGM,IAAM,aAAA,GAAgB,EAAE,MAAA,CAAO;AAAA,EACpC,GAAA,EAAK,EAAE,MAAA,EAAO;AAAA,EACd,IAAA,EAAM,EAAE,MAAA;AACV,CAAC,CAAA;AAGM,IAAM,iBAAiB,CAAA,CAAE,IAAA,CAAK,CAAC,MAAA,EAAQ,QAAA,EAAU,KAAK,CAAC,CAAA;AAGvD,IAAM,mBAAmB,CAAA,CAAE,IAAA,CAAK,CAAC,OAAA,EAAS,MAAM,CAAC,CAAA;AAYjD,IAAM,YAAA,GAAe,EAAE,MAAA,CAAO;AAAA;AAAA,EAEnC,EAAA,EAAI,EAAE,MAAA,EAAO;AAAA,EACb,IAAA,EAAM,gBAAA;AAAA,EACN,OAAA,EAAS,EAAE,MAAA,EAAO;AAAA,EAClB,OAAA,EAAS,EAAE,MAAA,EAAO;AAAA;AAAA,EAGlB,IAAA,EAAM,CAAA,CAAE,KAAA,CAAM,CAAA,CAAE,QAAQ,CAAA;AAAA,EACxB,MAAA,EAAQ,YAAA;AAAA,EACR,OAAA,EAAS,aAAA;AAAA,EACT,OAAA,EAAS,EAAE,MAAA,EAAO;AAAA;AAAA,EAClB,SAAA,EAAW,EAAE,OAAA,EAAQ;AAAA;AAAA,EAGrB,UAAA,EAAY,CAAA,CAAE,KAAA,CAAM,CAAA,CAAE,QAAQ,CAAA;AAAA,EAC9B,OAAA,EAAS,CAAA,CAAE,KAAA,CAAM,CAAA,CAAE,QAAQ,CAAA;AAAA;AAAA,EAG3B,QAAA,EAAU,CAAA,CAAE,MAAA,EAAO,CAAE,QAAA,EAAS;AAAA,EAC9B,QAAA,EAAU,eAAe,QAAA,EAAS;AAAA,EAClC,OAAA,EAAS,cAAc,QAAA,EAAS;AAAA;AAAA,EAGhC,OAAA,EAAS,CAAA,CAAE,OAAA,EAAQ,CAAE,QAAA,EAAS;AAAA,EAC9B,cAAA,EAAgB,CAAA,CAAE,MAAA,EAAO,CAAE,QAAA;AAC7B,CAAC,CAAA;AAG8B,EAAE,MAAA,CAAO;AAAA,EACtC,EAAA,EAAI,EAAE,MAAA,EAAO;AAAA,EACb,OAAA,EAAS,CAAA,CAAE,OAAA,CAAQ,IAAI,CAAA;AAAA,EACvB,SAAA,EAAW,EAAE,MAAA;AAAO;AACtB,CAAC;AAeM,SAAS,WAAW,OAAA,EAAyB;AAClD,EAAA,MAAM,IAAA,GAAO,WAAW,QAAQ,CAAA,CAAE,OAAO,OAAO,CAAA,CAAE,OAAO,KAAK,CAAA;AAC9D,EAAA,OAAO,CAAA,CAAA,EAAI,IAAA,CAAK,KAAA,CAAM,CAAA,EAAG,CAAC,CAAC,CAAA,CAAA;AAC7B;;;AClFO,IAAM,YAAA,GAAe,6BAAA;AAgC5B,eAAsB,YAAA,CAAa,UAAkB,MAAA,EAA+B;AAClF,EAAA,MAAM,QAAA,GAAW,IAAA,CAAK,QAAA,EAAU,YAAY,CAAA;AAC5C,EAAA,MAAM,MAAM,OAAA,CAAQ,QAAQ,GAAG,EAAE,SAAA,EAAW,MAAM,CAAA;AAElD,EAAA,MAAM,IAAA,GAAO,IAAA,CAAK,SAAA,CAAU,MAAM,CAAA,GAAI,IAAA;AACtC,EAAA,MAAM,UAAA,CAAW,QAAA,EAAU,IAAA,EAAM,OAAO,CAAA;AAC1C;AAMA,SAAS,aAAA,CACP,IAAA,EACA,UAAA,EACA,MAAA,EACA,YAAA,EACe;AAEf,EAAA,IAAI,MAAA;AACJ,EAAA,IAAI;AACF,IAAA,MAAA,GAAS,IAAA,CAAK,MAAM,IAAI,CAAA;AAAA,EAC1B,SAAS,GAAA,EAAK;AACZ,IAAA,MAAM,UAAA,GAAyB;AAAA,MAC7B,IAAA,EAAM,UAAA;AAAA,MACN,OAAA,EAAS,CAAA,cAAA,EAAkB,GAAA,CAAc,OAAO,CAAA,CAAA;AAAA,MAChD,KAAA,EAAO;AAAA,KACT;AACA,IAAA,IAAI,MAAA,EAAQ;AACV,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,oBAAA,EAAuB,UAAU,CAAA,EAAA,EAAK,UAAA,CAAW,OAAO,CAAA,CAAE,CAAA;AAAA,IAC5E;AACA,IAAA,YAAA,GAAe,UAAU,CAAA;AACzB,IAAA,OAAO,IAAA;AAAA,EACT;AAGA,EAAA,MAAM,MAAA,GAAS,YAAA,CAAa,SAAA,CAAU,MAAM,CAAA;AAC5C,EAAA,IAAI,CAAC,OAAO,OAAA,EAAS;AACnB,IAAA,MAAM,UAAA,GAAyB;AAAA,MAC7B,IAAA,EAAM,UAAA;AAAA,MACN,OAAA,EAAS,CAAA,0BAAA,EAA6B,MAAA,CAAO,KAAA,CAAM,OAAO,CAAA,CAAA;AAAA,MAC1D,OAAO,MAAA,CAAO;AAAA,KAChB;AACA,IAAA,IAAI,MAAA,EAAQ;AACV,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,oBAAA,EAAuB,UAAU,CAAA,EAAA,EAAK,UAAA,CAAW,OAAO,CAAA,CAAE,CAAA;AAAA,IAC5E;AACA,IAAA,YAAA,GAAe,UAAU,CAAA;AACzB,IAAA,OAAO,IAAA;AAAA,EACT;AAEA,EAAA,OAAO,MAAA,CAAO,IAAA;AAChB;AAWA,eAAsB,WAAA,CACpB,QAAA,EACA,OAAA,GAA8B,EAAC,EACH;AAC5B,EAAA,MAAM,EAAE,MAAA,GAAS,KAAA,EAAO,YAAA,EAAa,GAAI,OAAA;AACzC,EAAA,MAAM,QAAA,GAAW,IAAA,CAAK,QAAA,EAAU,YAAY,CAAA;AAE5C,EAAA,IAAI,OAAA;AACJ,EAAA,IAAI;AACF,IAAA,OAAA,GAAU,MAAM,QAAA,CAAS,QAAA,EAAU,OAAO,CAAA;AAAA,EAC5C,SAAS,GAAA,EAAK;AACZ,IAAA,IAAK,GAAA,CAA8B,SAAS,QAAA,EAAU;AACpD,MAAA,OAAO,EAAE,OAAA,EAAS,EAAC,EAAG,cAAc,CAAA,EAAE;AAAA,IACxC;AACA,IAAA,MAAM,GAAA;AAAA,EACR;AAEA,EAAA,MAAM,OAAA,uBAAc,GAAA,EAAoB;AACxC,EAAA,IAAI,YAAA,GAAe,CAAA;AAEnB,EAAA,MAAM,KAAA,GAAQ,OAAA,CAAQ,KAAA,CAAM,IAAI,CAAA;AAChC,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,KAAA,CAAM,QAAQ,CAAA,EAAA,EAAK;AACrC,IAAA,MAAM,OAAA,GAAU,KAAA,CAAM,CAAC,CAAA,CAAG,IAAA,EAAK;AAC/B,IAAA,IAAI,CAAC,OAAA,EAAS;AAEd,IAAA,MAAM,SAAS,aAAA,CAAc,OAAA,EAAS,CAAA,GAAI,CAAA,EAAG,QAAQ,YAAY,CAAA;AACjE,IAAA,IAAI,CAAC,MAAA,EAAQ;AACX,MAAA,YAAA,EAAA;AACA,MAAA;AAAA,IACF;AAEA,IAAA,IAAI,OAAO,OAAA,EAAS;AAClB,MAAA,OAAA,CAAQ,MAAA,CAAO,OAAO,EAAE,CAAA;AAAA,IAC1B,CAAA,MAAO;AACL,MAAA,OAAA,CAAQ,GAAA,CAAI,MAAA,CAAO,EAAA,EAAI,MAAM,CAAA;AAAA,IAC/B;AAAA,EACF;AAEA,EAAA,OAAO,EAAE,SAAS,KAAA,CAAM,IAAA,CAAK,QAAQ,MAAA,EAAQ,GAAG,YAAA,EAAa;AAC/D;;;AC/HO,IAAM,OAAA,GAAU,+BAAA;AAGvB,IAAM,UAAA,GAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,CAAA;AAoEnB,SAAS,aAAa,QAAA,EAA8B;AAClD,EAAA,QAAA,CAAS,KAAK,UAAU,CAAA;AAC1B;AAEA,IAAI,EAAA,GAA0B,IAAA;AAMvB,SAAS,WAAA,CAAY,SAAiB,OAAA,EAAyB;AACpE,EAAA,OAAOA,UAAAA,CAAW,QAAQ,CAAA,CAAE,MAAA,CAAO,CAAA,EAAG,OAAO,CAAA,CAAA,EAAI,OAAO,CAAA,CAAE,CAAA,CAAE,MAAA,CAAO,KAAK,CAAA;AAC1E;AAqBO,SAAS,OAAO,QAAA,EAAgC;AACrD,EAAA,IAAI,IAAI,OAAO,EAAA;AAEf,EAAA,MAAM,MAAA,GAASC,IAAAA,CAAK,QAAA,EAAU,OAAO,CAAA;AAGrC,EAAA,MAAM,GAAA,GAAMC,QAAQ,MAAM,CAAA;AAC1B,EAAA,SAAA,CAAU,GAAA,EAAK,EAAE,SAAA,EAAW,IAAA,EAAM,CAAA;AAElC,EAAA,EAAA,GAAK,IAAI,SAAS,MAAM,CAAA;AAGxB,EAAA,EAAA,CAAG,OAAO,oBAAoB,CAAA;AAE9B,EAAA,YAAA,CAAa,EAAE,CAAA;AAEf,EAAA,OAAO,EAAA;AACT;AAiDO,SAAS,kBAAA,CACd,QAAA,EACA,QAAA,EACA,YAAA,EACiB;AACjB,EAAA,MAAM,QAAA,GAAW,OAAO,QAAQ,CAAA;AAChC,EAAA,MAAM,MAAM,QAAA,CACT,OAAA,CAAQ,0DAA0D,CAAA,CAClE,IAAI,QAAQ,CAAA;AAEf,EAAA,IAAI,CAAC,GAAA,IAAO,CAAC,IAAI,SAAA,IAAa,CAAC,IAAI,YAAA,EAAc;AAC/C,IAAA,OAAO,IAAA;AAAA,EACT;AAGA,EAAA,IAAI,YAAA,IAAgB,GAAA,CAAI,YAAA,KAAiB,YAAA,EAAc;AACrD,IAAA,OAAO,IAAA;AAAA,EACT;AAGA,EAAA,MAAM,UAAU,IAAI,YAAA;AAAA,IAClB,IAAI,SAAA,CAAU,MAAA;AAAA,IACd,IAAI,SAAA,CAAU,UAAA;AAAA,IACd,GAAA,CAAI,UAAU,UAAA,GAAa;AAAA,GAC7B;AACA,EAAA,OAAO,KAAA,CAAM,KAAK,OAAO,CAAA;AAC3B;AAKO,SAAS,kBAAA,CACd,QAAA,EACA,QAAA,EACA,SAAA,EACA,IAAA,EACM;AACN,EAAA,MAAM,QAAA,GAAW,OAAO,QAAQ,CAAA;AAGhC,EAAA,MAAM,UAAU,SAAA,YAAqB,YAAA,GAAe,SAAA,GAAY,IAAI,aAAa,SAAS,CAAA;AAC1F,EAAA,MAAM,MAAA,GAAS,OAAO,IAAA,CAAK,OAAA,CAAQ,QAAQ,OAAA,CAAQ,UAAA,EAAY,QAAQ,UAAU,CAAA;AAEjF,EAAA,QAAA,CACG,QAAQ,iEAAiE,CAAA,CACzE,GAAA,CAAI,MAAA,EAAQ,MAAM,QAAQ,CAAA;AAC/B;AA2BA,SAAS,YAAY,GAAA,EAAwB;AAC3C,EAAA,MAAM,MAAA,GAAiB;AAAA,IACrB,IAAI,GAAA,CAAI,EAAA;AAAA,IACR,MAAM,GAAA,CAAI,IAAA;AAAA,IACV,SAAS,GAAA,CAAI,OAAA;AAAA,IACb,SAAS,GAAA,CAAI,OAAA;AAAA,IACb,IAAA,EAAM,GAAA,CAAI,IAAA,GAAO,GAAA,CAAI,IAAA,CAAK,KAAA,CAAM,GAAG,CAAA,CAAE,MAAA,CAAO,OAAO,CAAA,GAAI,EAAC;AAAA,IACxD,QAAQ,GAAA,CAAI,MAAA;AAAA,IACZ,OAAA,EAAS,IAAA,CAAK,KAAA,CAAM,GAAA,CAAI,OAAO,CAAA;AAAA,IAC/B,UAAA,EAAY,IAAA,CAAK,KAAA,CAAM,GAAA,CAAI,UAAU,CAAA;AAAA,IACrC,OAAA,EAAS,IAAA,CAAK,KAAA,CAAM,GAAA,CAAI,OAAO,CAAA;AAAA,IAC/B,SAAS,GAAA,CAAI,OAAA;AAAA,IACb,SAAA,EAAW,IAAI,SAAA,KAAc;AAAA,GAC/B;AAGA,EAAA,IAAI,GAAA,CAAI,aAAa,IAAA,EAAM;AACzB,IAAA,MAAA,CAAO,WAAW,GAAA,CAAI,QAAA;AAAA,EACxB;AACA,EAAA,IAAI,GAAA,CAAI,aAAa,IAAA,EAAM;AACzB,IAAA,MAAA,CAAO,WAAW,GAAA,CAAI,QAAA;AAAA,EACxB;AACA,EAAA,IAAI,GAAA,CAAI,YAAY,CAAA,EAAG;AACrB,IAAA,MAAA,CAAO,OAAA,GAAU,IAAA;AAAA,EACnB;AACA,EAAA,IAAI,GAAA,CAAI,kBAAkB,CAAA,EAAG;AAC3B,IAAA,MAAA,CAAO,iBAAiB,GAAA,CAAI,eAAA;AAAA,EAC9B;AAEA,EAAA,OAAO,MAAA;AACT;AAWA,SAAS,wBAAwB,QAAA,EAA0D;AACzF,EAAA,MAAM,KAAA,uBAAY,GAAA,EAAiC;AACnD,EAAA,MAAM,IAAA,GAAO,QAAA,CACV,OAAA,CAAQ,6EAA6E,EACrF,GAAA,EAAI;AAEP,EAAA,KAAA,MAAW,OAAO,IAAA,EAAM;AACtB,IAAA,IAAI,GAAA,CAAI,SAAA,IAAa,GAAA,CAAI,YAAA,EAAc;AACrC,MAAA,KAAA,CAAM,GAAA,CAAI,GAAA,CAAI,EAAA,EAAI,EAAE,SAAA,EAAW,IAAI,SAAA,EAAW,WAAA,EAAa,GAAA,CAAI,YAAA,EAAc,CAAA;AAAA,IAC/E;AAAA,EACF;AACA,EAAA,OAAO,KAAA;AACT;AAGA,IAAM,iBAAA,GAAoB;AAAA;AAAA;AAAA,CAAA;AAQ1B,SAAS,cAAc,QAAA,EAAiC;AACtD,EAAA,MAAM,SAAA,GAAYD,IAAAA,CAAK,QAAA,EAAU,YAAY,CAAA;AAC7C,EAAA,IAAI;AACF,IAAA,MAAM,IAAA,GAAO,SAAS,SAAS,CAAA;AAC/B,IAAA,OAAO,IAAA,CAAK,OAAA;AAAA,EACd,CAAA,CAAA,MAAQ;AACN,IAAA,OAAO,IAAA;AAAA,EACT;AACF;AAKA,SAAS,iBAAiB,QAAA,EAAuC;AAC/D,EAAA,MAAM,MAAM,QAAA,CACT,OAAA,CAAQ,0CAA0C,CAAA,CAClD,IAAI,iBAAiB,CAAA;AACxB,EAAA,OAAO,GAAA,GAAM,UAAA,CAAW,GAAA,CAAI,KAAK,CAAA,GAAI,IAAA;AACvC;AAKA,SAAS,gBAAA,CAAiB,UAAwB,KAAA,EAAqB;AACrE,EAAA,QAAA,CACG,QAAQ,4DAA4D,CAAA,CACpE,IAAI,iBAAA,EAAmB,KAAA,CAAM,UAAU,CAAA;AAC5C;AAOA,eAAsB,aAAa,QAAA,EAAiC;AAClE,EAAA,MAAM,QAAA,GAAW,OAAO,QAAQ,CAAA;AAChC,EAAA,MAAM,EAAE,OAAA,EAAQ,GAAI,MAAM,YAAY,QAAQ,CAAA;AAE9C,EAAA,MAAM,gBAAA,GAAmB,wBAAwB,QAAQ,CAAA;AACzD,EAAA,QAAA,CAAS,KAAK,qBAAqB,CAAA;AAEnC,EAAA,IAAI,OAAA,CAAQ,WAAW,CAAA,EAAG;AAExB,IAAA,MAAME,MAAAA,GAAQ,cAAc,QAAQ,CAAA;AACpC,IAAA,IAAIA,WAAU,IAAA,EAAM;AAClB,MAAA,gBAAA,CAAiB,UAAUA,MAAK,CAAA;AAAA,IAClC;AACA,IAAA;AAAA,EACF;AAEA,EAAA,MAAM,MAAA,GAAS,QAAA,CAAS,OAAA,CAAQ,iBAAiB,CAAA;AACjD,EAAA,MAAM,UAAA,GAAa,QAAA,CAAS,WAAA,CAAY,CAAC,KAAA,KAAoB;AAC3D,IAAA,KAAA,MAAW,UAAU,KAAA,EAAO;AAC1B,MAAA,MAAM,OAAA,GAAU,WAAA,CAAY,MAAA,CAAO,OAAA,EAAS,OAAO,OAAO,CAAA;AAC1D,MAAA,MAAM,MAAA,GAAS,gBAAA,CAAiB,GAAA,CAAI,MAAA,CAAO,EAAE,CAAA;AAC7C,MAAA,MAAM,aAAA,GAAgB,MAAA,IAAU,MAAA,CAAO,WAAA,KAAgB,OAAA;AAEvD,MAAA,MAAA,CAAO,GAAA,CAAI;AAAA,QACT,IAAI,MAAA,CAAO,EAAA;AAAA,QACX,MAAM,MAAA,CAAO,IAAA;AAAA,QACb,SAAS,MAAA,CAAO,OAAA;AAAA,QAChB,SAAS,MAAA,CAAO,OAAA;AAAA,QAChB,QAAA,EAAU,OAAO,QAAA,IAAY,IAAA;AAAA,QAC7B,QAAA,EAAU,OAAO,QAAA,IAAY,IAAA;AAAA,QAC7B,IAAA,EAAM,MAAA,CAAO,IAAA,CAAK,IAAA,CAAK,GAAG,CAAA;AAAA,QAC1B,QAAQ,MAAA,CAAO,MAAA;AAAA,QACf,OAAA,EAAS,IAAA,CAAK,SAAA,CAAU,MAAA,CAAO,OAAO,CAAA;AAAA,QACtC,UAAA,EAAY,IAAA,CAAK,SAAA,CAAU,MAAA,CAAO,UAAU,CAAA;AAAA,QAC5C,OAAA,EAAS,IAAA,CAAK,SAAA,CAAU,MAAA,CAAO,OAAO,CAAA;AAAA,QACtC,SAAS,MAAA,CAAO,OAAA;AAAA,QAChB,SAAA,EAAW,MAAA,CAAO,SAAA,GAAY,CAAA,GAAI,CAAA;AAAA,QAClC,OAAA,EAAS,MAAA,CAAO,OAAA,GAAU,CAAA,GAAI,CAAA;AAAA,QAC9B,eAAA,EAAiB,OAAO,cAAA,IAAkB,CAAA;AAAA,QAC1C,cAAA,EAAgB,IAAA;AAAA;AAAA,QAChB,SAAA,EAAW,aAAA,GAAgB,MAAA,CAAO,SAAA,GAAY,IAAA;AAAA,QAC9C,YAAA,EAAc,aAAA,GAAgB,MAAA,CAAO,WAAA,GAAc;AAAA,OACpD,CAAA;AAAA,IACH;AAAA,EACF,CAAC,CAAA;AAED,EAAA,UAAA,CAAW,OAAO,CAAA;AAGlB,EAAA,MAAM,KAAA,GAAQ,cAAc,QAAQ,CAAA;AACpC,EAAA,IAAI,UAAU,IAAA,EAAM;AAClB,IAAA,gBAAA,CAAiB,UAAU,KAAK,CAAA;AAAA,EAClC;AACF;AAYA,eAAsB,YAAA,CACpB,QAAA,EACA,OAAA,GAAuB,EAAC,EACN;AAClB,EAAA,MAAM,EAAE,KAAA,GAAQ,KAAA,EAAM,GAAI,OAAA;AAG1B,EAAA,MAAM,UAAA,GAAa,cAAc,QAAQ,CAAA;AACzC,EAAA,IAAI,UAAA,KAAe,IAAA,IAAQ,CAAC,KAAA,EAAO;AAEjC,IAAA,OAAO,KAAA;AAAA,EACT;AAEA,EAAA,MAAM,QAAA,GAAW,OAAO,QAAQ,CAAA;AAChC,EAAA,MAAM,aAAA,GAAgB,iBAAiB,QAAQ,CAAA;AAG/C,EAAA,MAAM,eAAe,KAAA,IAAS,aAAA,KAAkB,IAAA,IAAS,UAAA,KAAe,QAAQ,UAAA,GAAa,aAAA;AAE7F,EAAA,IAAI,YAAA,EAAc;AAChB,IAAA,MAAM,aAAa,QAAQ,CAAA;AAC3B,IAAA,OAAO,IAAA;AAAA,EACT;AAEA,EAAA,OAAO,KAAA;AACT;AAOA,eAAsB,aAAA,CACpB,QAAA,EACA,KAAA,EACA,KAAA,EACmB;AACnB,EAAA,MAAM,QAAA,GAAW,OAAO,QAAQ,CAAA;AAGhC,EAAA,MAAM,WAAA,GAAc,QAAA,CAAS,OAAA,CAAQ,qCAAqC,EAAE,GAAA,EAAI;AAGhF,EAAA,IAAI,WAAA,CAAY,GAAA,KAAQ,CAAA,EAAG,OAAO,EAAC;AAGnC,EAAA,MAAM,OAAO,QAAA,CACV,OAAA;AAAA,IACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAAA;AAAA,GAOF,CACC,GAAA,CAAI,KAAA,EAAO,KAAK,CAAA;AAGnB,EAAA,IAAI,IAAA,CAAK,SAAS,CAAA,EAAG;AACnB,IAAA,uBAAA,CAAwB,UAAU,IAAA,CAAK,GAAA,CAAI,CAAC,CAAA,KAAM,CAAA,CAAE,EAAE,CAAC,CAAA;AAAA,EACzD;AAEA,EAAA,OAAO,IAAA,CAAK,IAAI,WAAW,CAAA;AAC7B;AAcO,SAAS,uBAAA,CAAwB,UAAkB,SAAA,EAA2B;AACnF,EAAA,IAAI,SAAA,CAAU,WAAW,CAAA,EAAG;AAE5B,EAAA,MAAM,QAAA,GAAW,OAAO,QAAQ,CAAA;AAChC,EAAA,MAAM,GAAA,GAAA,iBAAM,IAAI,IAAA,EAAK,EAAE,WAAA,EAAY;AAEnC,EAAA,MAAM,MAAA,GAAS,SAAS,OAAA,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,EAAA,CAK/B,CAAA;AAED,EAAA,MAAM,UAAA,GAAa,QAAA,CAAS,WAAA,CAAY,CAAC,GAAA,KAAkB;AACzD,IAAA,KAAA,MAAW,MAAM,GAAA,EAAK;AACpB,MAAA,MAAA,CAAO,GAAA,CAAI,KAAK,EAAE,CAAA;AAAA,IACpB;AAAA,EACF,CAAC,CAAA;AAED,EAAA,UAAA,CAAW,SAAS,CAAA;AACtB;AAMO,SAAS,kBAAkB,QAAA,EAAmC;AACnE,EAAA,MAAM,QAAA,GAAW,OAAO,QAAQ,CAAA;AAEhC,EAAA,MAAM,IAAA,GAAO,QAAA,CACV,OAAA,CAAQ,yDAAyD,EACjE,GAAA,EAAI;AAEP,EAAA,OAAO,IAAA,CAAK,GAAA,CAAI,CAAC,GAAA,MAAS;AAAA,IACxB,IAAI,GAAA,CAAI,EAAA;AAAA,IACR,OAAO,GAAA,CAAI,eAAA;AAAA,IACX,eAAe,GAAA,CAAI;AAAA,GACrB,CAAE,CAAA;AACJ;;;AChhBA,IAAM,4BAAA,GAA+B,GAAA;AAkBrC,eAAsB,OAAA,CACpB,QAAA,EACA,OAAA,EACA,OAAA,GAA0B,EAAC,EACH;AACxB,EAAA,MAAM,SAAA,GAAY,QAAQ,SAAA,IAAa,4BAAA;AAGvC,EAAA,MAAM,aAAa,QAAQ,CAAA;AAG3B,EAAA,MAAM,KAAA,GAAQ,QACX,WAAA,EAAY,CACZ,QAAQ,cAAA,EAAgB,EAAE,EAC1B,KAAA,CAAM,KAAK,EACX,MAAA,CAAO,CAAC,MAAM,CAAA,CAAE,MAAA,GAAS,CAAC,CAAA,CAC1B,KAAA,CAAM,GAAG,CAAC,CAAA;AAEb,EAAA,IAAI,KAAA,CAAM,WAAW,CAAA,EAAG;AACtB,IAAA,OAAO,EAAE,OAAO,IAAA,EAAK;AAAA,EACvB;AAGA,EAAA,MAAM,WAAA,GAAc,KAAA,CAAM,IAAA,CAAK,MAAM,CAAA;AACrC,EAAA,MAAM,OAAA,GAAU,MAAM,aAAA,CAAc,QAAA,EAAU,aAAa,EAAE,CAAA;AAE7D,EAAA,IAAI,OAAA,CAAQ,WAAW,CAAA,EAAG;AACxB,IAAA,OAAO,EAAE,OAAO,IAAA,EAAK;AAAA,EACvB;AAGA,EAAA,MAAM,YAAA,GAAe,IAAI,GAAA,CAAI,OAAA,CAAQ,aAAY,CAAE,KAAA,CAAM,KAAK,CAAC,CAAA;AAE/D,EAAA,KAAA,MAAW,UAAU,OAAA,EAAS;AAC5B,IAAA,MAAM,WAAA,GAAc,IAAI,GAAA,CAAI,MAAA,CAAO,QAAQ,WAAA,EAAY,CAAE,KAAA,CAAM,KAAK,CAAC,CAAA;AAGrE,IAAA,MAAM,YAAA,GAAe,CAAC,GAAG,YAAY,CAAA,CAAE,MAAA,CAAO,CAAC,CAAA,KAAM,WAAA,CAAY,GAAA,CAAI,CAAC,CAAC,CAAA,CAAE,MAAA;AACzE,IAAA,MAAM,KAAA,GAAA,qBAAY,GAAA,CAAI,CAAC,GAAG,YAAA,EAAc,GAAG,WAAW,CAAC,CAAA,EAAE,IAAA;AACzD,IAAA,MAAM,UAAA,GAAa,KAAA,GAAQ,CAAA,GAAI,YAAA,GAAe,KAAA,GAAQ,CAAA;AAEtD,IAAA,IAAI,cAAc,SAAA,EAAW;AAC3B,MAAA,OAAO;AAAA,QACL,KAAA,EAAO,KAAA;AAAA,QACP,QAAQ,CAAA,gCAAA,EAAmC,MAAA,CAAO,QAAQ,KAAA,CAAM,CAAA,EAAG,EAAE,CAAC,CAAA,IAAA,CAAA;AAAA,QACtE,YAAY,MAAA,CAAO;AAAA,OACrB;AAAA,IACF;AAGA,IAAA,IAAI,OAAO,OAAA,CAAQ,WAAA,EAAY,KAAM,OAAA,CAAQ,aAAY,EAAG;AAC1D,MAAA,OAAO;AAAA,QACL,KAAA,EAAO,KAAA;AAAA,QACP,MAAA,EAAQ,CAAA,qBAAA,CAAA;AAAA,QACR,YAAY,MAAA,CAAO;AAAA,OACrB;AAAA,IACF;AAAA,EACF;AAEA,EAAA,OAAO,EAAE,OAAO,IAAA,EAAK;AACvB;AAGA,IAAM,cAAA,GAAiB,CAAA;AAGvB,IAAM,cAAA,GAAiB;AAAA,EACrB,mBAAA;AAAA,EACA,iBAAA;AAAA,EACA,kBAAA;AAAA,EACA,gBAAA;AAAA,EACA,aAAA;AAAA,EACA;AACF,CAAA;AAGA,IAAM,0BAAA,GAA6B,sCAAA;AAY5B,SAAS,WAAW,OAAA,EAAoC;AAE7D,EAAA,MAAM,KAAA,GAAQ,OAAA,CAAQ,IAAA,EAAK,CAAE,KAAA,CAAM,KAAK,CAAA,CAAE,MAAA,CAAO,CAAC,CAAA,KAAM,CAAA,CAAE,MAAA,GAAS,CAAC,CAAA;AACpE,EAAA,IAAI,KAAA,CAAM,SAAS,cAAA,EAAgB;AACjC,IAAA,OAAO,EAAE,QAAA,EAAU,KAAA,EAAO,MAAA,EAAQ,uCAAA,EAAwC;AAAA,EAC5E;AAGA,EAAA,KAAA,MAAW,WAAW,cAAA,EAAgB;AACpC,IAAA,IAAI,OAAA,CAAQ,IAAA,CAAK,OAAO,CAAA,EAAG;AACzB,MAAA,OAAO,EAAE,QAAA,EAAU,KAAA,EAAO,MAAA,EAAQ,iCAAA,EAAkC;AAAA,IACtE;AAAA,EACF;AAGA,EAAA,IAAI,0BAAA,CAA2B,IAAA,CAAK,OAAO,CAAA,EAAG;AAC5C,IAAA,OAAO,EAAE,QAAA,EAAU,KAAA,EAAO,MAAA,EAAQ,iCAAA,EAAkC;AAAA,EACtE;AAEA,EAAA,OAAO,EAAE,UAAU,IAAA,EAAK;AAC1B;AAGA,IAAM,eAAA,GAAkB;AAAA,EACtB,8BAAA;AAAA;AAAA,EACA,8BAAA;AAAA;AAAA,EACA,yBAAA;AAAA;AAAA,EACA,2BAAA;AAAA;AAAA,EACA,2BAAA;AAAA;AAAA,EACA,0BAAA;AAAA;AAAA,EACA;AAAA;AACF,CAAA;AAYO,SAAS,aAAa,OAAA,EAAsC;AAEjE,EAAA,KAAA,MAAW,WAAW,eAAA,EAAiB;AACrC,IAAA,IAAI,OAAA,CAAQ,IAAA,CAAK,OAAO,CAAA,EAAG;AACzB,MAAA,OAAO,EAAE,YAAY,IAAA,EAAK;AAAA,IAC5B;AAAA,EACF;AAEA,EAAA,OAAO,EAAE,UAAA,EAAY,KAAA,EAAO,MAAA,EAAQ,qCAAA,EAAsC;AAC5E;AAYA,eAAsB,aAAA,CACpB,UACA,OAAA,EACwB;AAExB,EAAA,MAAM,cAAA,GAAiB,WAAW,OAAO,CAAA;AACzC,EAAA,IAAI,CAAC,eAAe,QAAA,EAAU;AAC5B,IAAA,OAAO,EAAE,aAAA,EAAe,KAAA,EAAO,MAAA,EAAQ,eAAe,MAAA,EAAO;AAAA,EAC/D;AAGA,EAAA,MAAM,gBAAA,GAAmB,aAAa,OAAO,CAAA;AAC7C,EAAA,IAAI,CAAC,iBAAiB,UAAA,EAAY;AAChC,IAAA,OAAO,EAAE,aAAA,EAAe,KAAA,EAAO,MAAA,EAAQ,iBAAiB,MAAA,EAAO;AAAA,EACjE;AAGA,EAAA,MAAM,aAAA,GAAgB,MAAM,OAAA,CAAQ,QAAA,EAAU,OAAO,CAAA;AACrD,EAAA,IAAI,CAAC,cAAc,KAAA,EAAO;AACxB,IAAA,OAAO,EAAE,aAAA,EAAe,KAAA,EAAO,MAAA,EAAQ,cAAc,MAAA,EAAO;AAAA,EAC9D;AAEA,EAAA,OAAO,EAAE,eAAe,IAAA,EAAK;AAC/B;;;ACpLA,IAAM,wBAAA,GAA2B;AAAA,EAC/B,gBAAA;AAAA;AAAA,EACA,YAAA;AAAA;AAAA,EACA,eAAA;AAAA;AAAA,EACA,eAAA;AAAA;AAAA,EACA;AAAA;AACF,CAAA;AAWO,SAAS,qBAAqB,OAAA,EAAsD;AACzF,EAAA,MAAM,EAAE,QAAA,EAAU,OAAA,EAAQ,GAAI,OAAA;AAE9B,EAAA,IAAI,QAAA,CAAS,SAAS,CAAA,EAAG;AACvB,IAAA,OAAO,IAAA;AAAA,EACT;AAGA,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,CAAS,QAAQ,CAAA,EAAA,EAAK;AACxC,IAAA,MAAM,OAAA,GAAU,SAAS,CAAC,CAAA;AAC1B,IAAA,IAAI,CAAC,OAAA,EAAS;AAEd,IAAA,KAAA,MAAW,WAAW,wBAAA,EAA0B;AAC9C,MAAA,IAAI,OAAA,CAAQ,IAAA,CAAK,OAAO,CAAA,EAAG;AACzB,QAAA,OAAO;AAAA,UACL,OAAA,EAAS,CAAA,uBAAA,EAA0B,OAAA,CAAQ,MAAM,CAAA,CAAA;AAAA,UACjD,iBAAA,EAAmB,OAAA;AAAA,UACnB;AAAA,SACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,EAAA,OAAO,IAAA;AACT;AA6BO,SAAS,qBAAqB,OAAA,EAAqD;AACxF,EAAA,MAAM,EAAE,OAAM,GAAI,OAAA;AAElB,EAAA,IAAI,KAAA,CAAM,SAAS,CAAA,EAAG;AACpB,IAAA,OAAO,IAAA;AAAA,EACT;AAGA,EAAA,KAAA,IAAS,IAAI,CAAA,EAAG,CAAA,IAAK,KAAA,CAAM,MAAA,GAAS,GAAG,CAAA,EAAA,EAAK;AAC1C,IAAA,MAAM,KAAA,GAAQ,MAAM,CAAC,CAAA;AACrB,IAAA,MAAM,MAAA,GAAS,KAAA,CAAM,CAAA,GAAI,CAAC,CAAA;AAC1B,IAAA,MAAM,KAAA,GAAQ,KAAA,CAAM,CAAA,GAAI,CAAC,CAAA;AAEzB,IAAA,IAAI,CAAC,KAAA,IAAS,CAAC,MAAA,IAAU,CAAC,KAAA,EAAO;AAGjC,IAAA,IACE,KAAA,CAAM,IAAA,KAAS,MAAA,CAAO,IAAA,IACtB,OAAO,IAAA,KAAS,KAAA,CAAM,IAAA,IACtB,KAAA,CAAM,OAAA,IACN,CAAC,MAAA,CAAO,OAAA,IACR,MAAM,OAAA,EACN;AACA,MAAA,OAAO;AAAA,QACL,MAAM,KAAA,CAAM,IAAA;AAAA,QACZ,OAAA,EAAS,CAAA,mBAAA,EAAsB,KAAA,CAAM,IAAI,CAAA;AAAA,OAC3C;AAAA,IACF;AAAA,EACF;AAEA,EAAA,OAAO,IAAA;AACT;AAyBO,SAAS,kBAAkB,UAAA,EAAoD;AACpF,EAAA,IAAI,WAAW,MAAA,EAAQ;AACrB,IAAA,OAAO,IAAA;AAAA,EACT;AAGA,EAAA,MAAM,KAAA,GAAQ,UAAA,CAAW,MAAA,CAAO,KAAA,CAAM,IAAI,CAAA,CAAE,MAAA,CAAO,CAAC,IAAA,KAAS,IAAA,CAAK,IAAA,EAAK,CAAE,SAAS,CAAC,CAAA;AACnF,EAAA,MAAM,SAAA,GAAY,KAAA,CAAM,IAAA,CAAK,CAAC,IAAA,KAAS,oBAAA,CAAqB,IAAA,CAAK,IAAI,CAAC,CAAA,IAAK,KAAA,CAAM,CAAC,CAAA,IAAK,EAAA;AAEvF,EAAA,OAAO;AAAA,IACL,UAAU,UAAA,CAAW,QAAA;AAAA,IACrB,aAAa,UAAA,CAAW,MAAA;AAAA,IACxB,OAAA,EAAS,mBAAmB,UAAA,CAAW,QAAQ,KAAK,SAAA,CAAU,KAAA,CAAM,CAAA,EAAG,GAAG,CAAC,CAAA;AAAA,GAC7E;AACF;ACrGA,eAAsB,gBAAA,CACpB,UACA,KAAA,EACiC;AACjC,EAAA,MAAM,QAAA,GAAW,YAAY,KAAK,CAAA;AAClC,EAAA,IAAI,CAAC,QAAA,EAAU;AACb,IAAA,OAAO,IAAA;AAAA,EACT;AAEA,EAAA,MAAM,EAAE,OAAA,EAAS,MAAA,EAAQ,eAAA,EAAgB,GAAI,QAAA;AAG7C,EAAA,MAAM,OAAA,GAAU,MAAM,aAAA,CAAc,QAAA,EAAU,eAAe,CAAA;AAC7D,EAAA,IAAI,CAAC,QAAQ,aAAA,EAAe;AAC1B,IAAA,OAAO,IAAA;AAAA,EACT;AAEA,EAAA,OAAO,EAAE,OAAA,EAAS,MAAA,EAAQ,eAAA,EAAgB;AAC5C;AAYA,SAAS,YAAY,KAAA,EAA4C;AAC/D,EAAA,QAAQ,MAAM,IAAA;AAAM,IAClB,KAAK,MAAA;AACH,MAAA,OAAO,wBAAA,CAAyB,MAAM,IAAI,CAAA;AAAA,IAC5C,KAAK,MAAA;AACH,MAAA,OAAO,wBAAA,CAAyB,MAAM,IAAI,CAAA;AAAA,IAC5C,KAAK,MAAA;AACH,MAAA,OAAO,qBAAA,CAAsB,MAAM,IAAI,CAAA;AAAA;AAE7C;AAKA,SAAS,yBAAyB,IAAA,EAA6C;AAC7E,EAAA,MAAM,MAAA,GAAS,qBAAqB,IAAI,CAAA;AACxC,EAAA,IAAI,CAAC,MAAA,EAAQ;AACX,IAAA,OAAO,IAAA;AAAA,EACT;AAEA,EAAA,OAAO;AAAA,IACL,SAAS,MAAA,CAAO,OAAA;AAAA,IAChB,MAAA,EAAQ,iBAAA;AAAA,IACR,iBAAiB,MAAA,CAAO;AAAA,GAC1B;AACF;AAKA,SAAS,yBAAyB,IAAA,EAAwC;AACxE,EAAA,MAAM,MAAA,GAAS,qBAAqB,IAAI,CAAA;AACxC,EAAA,IAAI,CAAC,MAAA,EAAQ;AACX,IAAA,OAAO,IAAA;AAAA,EACT;AAEA,EAAA,OAAO;AAAA,IACL,SAAS,MAAA,CAAO,OAAA;AAAA,IAChB,MAAA,EAAQ,iBAAA;AAAA;AAAA,IAER,eAAA,EAAiB,CAAA,MAAA,EAAS,MAAA,CAAO,IAAI,CAAA,iCAAA;AAAA,GACvC;AACF;AAKA,SAAS,sBAAsB,IAAA,EAAuC;AACpE,EAAA,MAAM,MAAA,GAAS,kBAAkB,IAAI,CAAA;AACrC,EAAA,IAAI,CAAC,MAAA,EAAQ;AACX,IAAA,OAAO,IAAA;AAAA,EACT;AAEA,EAAA,OAAO;AAAA,IACL,SAAS,MAAA,CAAO,OAAA;AAAA,IAChB,MAAA,EAAQ,cAAA;AAAA,IACR,iBAAiB,MAAA,CAAO;AAAA,GAC1B;AACF;AAGA,IAAM,8BAAc,IAAI,GAAA,CAAY,CAAC,MAAA,EAAQ,MAAA,EAAQ,MAAM,CAAC,CAAA;AAS5D,eAAsB,eAAe,QAAA,EAA2C;AAC9E,EAAA,MAAM,OAAA,GAAU,MAAS,EAAA,CAAA,QAAA,CAAS,QAAA,EAAU,OAAO,CAAA;AACnD,EAAA,MAAM,IAAA,GAAO,IAAA,CAAK,KAAA,CAAM,OAAO,CAAA;AAE/B,EAAA,IAAI,CAAC,WAAA,CAAY,GAAA,CAAI,IAAA,CAAK,IAAI,CAAA,EAAG;AAC/B,IAAA,MAAM,IAAI,KAAA,CAAM,CAAA,wBAAA,EAA2B,IAAA,CAAK,IAAI,CAAA,kCAAA,CAAoC,CAAA;AAAA,EAC1F;AAEA,EAAA,OAAO,IAAA;AACT;;;ACjKO,SAAS,YAAY,KAAA,EAAuB;AACjD,EAAA,IAAI,KAAA,KAAU,GAAG,OAAO,KAAA;AACxB,EAAA,IAAI,KAAA,GAAQ,IAAA,EAAM,OAAO,CAAA,EAAG,KAAK,CAAA,EAAA,CAAA;AACjC,EAAA,MAAM,KAAK,KAAA,GAAQ,IAAA;AACnB,EAAA,IAAI,KAAK,IAAA,EAAM,OAAO,GAAG,EAAA,CAAG,OAAA,CAAQ,CAAC,CAAC,CAAA,GAAA,CAAA;AACtC,EAAA,MAAM,KAAK,EAAA,GAAK,IAAA;AAChB,EAAA,OAAO,CAAA,EAAG,EAAA,CAAG,OAAA,CAAQ,CAAC,CAAC,CAAA,GAAA,CAAA;AACzB;AAUO,SAAS,UAAA,CAAW,OAAe,IAAA,EAAsB;AAC9D,EAAA,MAAM,MAAA,GAAS,QAAA,CAAS,KAAA,EAAO,EAAE,CAAA;AACjC,EAAA,IAAI,MAAA,CAAO,KAAA,CAAM,MAAM,CAAA,IAAK,UAAU,CAAA,EAAG;AACvC,IAAA,MAAM,IAAI,KAAA,CAAM,CAAA,QAAA,EAAW,IAAI,CAAA,4BAAA,CAA8B,CAAA;AAAA,EAC/D;AACA,EAAA,OAAO,MAAA;AACT;AAOO,SAAS,WAAA,GAAsB;AACpC,EAAA,OAAO,OAAA,CAAQ,GAAA,CAAI,qBAAqB,CAAA,IAAK,QAAQ,GAAA,EAAI;AAC3D;ACzBO,IAAM,SAAA,GAAY,iFAAA;AAMlB,IAAM,cAAA,GAAiB,+CAAA;AAG9B,IAAM,iBAAA,GAAoBF,IAAAA,CAAK,OAAA,EAAQ,EAAG,mBAAmB,QAAQ,CAAA;AAO9D,SAAS,gBAAA,GAA4B;AAC1C,EAAA,OAAO,UAAA,CAAWA,IAAAA,CAAK,iBAAA,EAAmB,cAAc,CAAC,CAAA;AAC3D;AAkBA,eAAsB,YAAA,CAAa,OAAA,GAA6B,EAAC,EAAoB;AACnF,EAAA,MAAM,EAAE,GAAA,GAAM,IAAA,EAAK,GAAI,OAAA;AACvB,EAAA,OAAO,gBAAA,CAAiB,SAAA,EAAW,EAAE,GAAA,EAAK,CAAA;AAC5C;;;ACrCA,IAAI,gBAAA,GAAiD,IAAA;AAgCrD,eAAsB,YAAA,GAA+C;AACnE,EAAA,IAAI,kBAAkB,OAAO,gBAAA;AAG7B,EAAA,MAAM,YAAY,MAAM,YAAA,CAAa,EAAE,GAAA,EAAK,MAAM,CAAA;AAGlD,EAAA,MAAM,KAAA,GAAQ,MAAM,QAAA,EAAS;AAC7B,EAAA,MAAM,QAAQ,MAAM,KAAA,CAAM,SAAA,CAAU,EAAE,WAAW,CAAA;AACjD,EAAA,gBAAA,GAAmB,MAAM,MAAM,sBAAA,EAAuB;AAEtD,EAAA,OAAO,gBAAA;AACT;AAwEA,eAAsB,UAAU,IAAA,EAAiC;AAC/D,EAAA,MAAM,GAAA,GAAM,MAAM,YAAA,EAAa;AAC/B,EAAA,MAAM,MAAA,GAAS,MAAM,GAAA,CAAI,eAAA,CAAgB,IAAI,CAAA;AAC7C,EAAA,OAAO,KAAA,CAAM,IAAA,CAAK,MAAA,CAAO,MAAM,CAAA;AACjC;;;AC7HO,SAAS,gBAAA,CAAiB,GAAa,CAAA,EAAqB;AACjE,EAAA,IAAI,CAAA,CAAE,MAAA,KAAW,CAAA,CAAE,MAAA,EAAQ;AACzB,IAAA,MAAM,IAAI,MAAM,+BAA+B,CAAA;AAAA,EACjD;AAEA,EAAA,IAAI,UAAA,GAAa,CAAA;AACjB,EAAA,IAAI,KAAA,GAAQ,CAAA;AACZ,EAAA,IAAI,KAAA,GAAQ,CAAA;AAEZ,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,QAAQ,CAAA,EAAA,EAAK;AACjC,IAAA,UAAA,IAAc,CAAA,CAAE,CAAC,CAAA,GAAK,CAAA,CAAE,CAAC,CAAA;AACzB,IAAA,KAAA,IAAS,CAAA,CAAE,CAAC,CAAA,GAAK,CAAA,CAAE,CAAC,CAAA;AACpB,IAAA,KAAA,IAAS,CAAA,CAAE,CAAC,CAAA,GAAK,CAAA,CAAE,CAAC,CAAA;AAAA,EACtB;AAEA,EAAA,MAAM,YAAY,IAAA,CAAK,IAAA,CAAK,KAAK,CAAA,GAAI,IAAA,CAAK,KAAK,KAAK,CAAA;AACpD,EAAA,IAAI,SAAA,KAAc,GAAG,OAAO,CAAA;AAE5B,EAAA,OAAO,UAAA,GAAa,SAAA;AACtB;AAeA,IAAM,aAAA,GAAgB,EAAA;AAOtB,eAAsB,YAAA,CACpB,QAAA,EACA,KAAA,EACA,OAAA,EACyB;AACzB,EAAA,MAAM,KAAA,GAAQ,SAAS,KAAA,IAAS,aAAA;AAEhC,EAAA,MAAM,EAAE,OAAA,EAAQ,GAAI,MAAM,YAAY,QAAQ,CAAA;AAC9C,EAAA,IAAI,OAAA,CAAQ,MAAA,KAAW,CAAA,EAAG,OAAO,EAAC;AAGlC,EAAA,MAAM,WAAA,GAAc,MAAM,SAAA,CAAU,KAAK,CAAA;AAGzC,EAAA,MAAM,SAAyB,EAAC;AAChC,EAAA,KAAA,MAAW,UAAU,OAAA,EAAS;AAC5B,IAAA,MAAM,aAAa,CAAA,EAAG,MAAA,CAAO,OAAO,CAAA,CAAA,EAAI,OAAO,OAAO,CAAA,CAAA;AACtD,IAAA,MAAM,IAAA,GAAO,WAAA,CAAY,MAAA,CAAO,OAAA,EAAS,OAAO,OAAO,CAAA;AAGvD,IAAA,IAAI,YAAA,GAAe,kBAAA,CAAmB,QAAA,EAAU,MAAA,CAAO,IAAI,IAAI,CAAA;AAE/D,IAAA,IAAI,CAAC,YAAA,EAAc;AAEjB,MAAA,YAAA,GAAe,MAAM,UAAU,UAAU,CAAA;AACzC,MAAA,kBAAA,CAAmB,QAAA,EAAU,MAAA,CAAO,EAAA,EAAI,YAAA,EAAc,IAAI,CAAA;AAAA,IAC5D;AAEA,IAAA,MAAM,KAAA,GAAQ,gBAAA,CAAiB,WAAA,EAAa,YAAY,CAAA;AACxD,IAAA,MAAA,CAAO,IAAA,CAAK,EAAE,MAAA,EAAQ,KAAA,EAAO,CAAA;AAAA,EAC/B;AAGA,EAAA,MAAA,CAAO,KAAK,CAAC,CAAA,EAAG,MAAM,CAAA,CAAE,KAAA,GAAQ,EAAE,KAAK,CAAA;AACvC,EAAA,OAAO,MAAA,CAAO,KAAA,CAAM,CAAA,EAAG,KAAK,CAAA;AAC9B;;;AC1EA,IAAM,sBAAA,GAAyB,EAAA;AAC/B,IAAM,mBAAA,GAAsB,GAAA;AAC5B,IAAM,qBAAA,GAAwB,CAAA;AAC9B,IAAM,kBAAA,GAAqB,GAAA;AAC3B,IAAM,aAAA,GAAgB,GAAA;AACtB,IAAM,kBAAA,GAAqB,GAAA;AAMpB,SAAS,cAAc,MAAA,EAAwB;AACpD,EAAA,QAAQ,OAAO,QAAA;AAAU,IACvB,KAAK,MAAA;AACH,MAAA,OAAO,mBAAA;AAAA,IACT,KAAK,QAAA;AACH,MAAA,OAAO,qBAAA;AAAA,IACT,KAAK,KAAA;AACH,MAAA,OAAO,kBAAA;AAAA,IACT;AACE,MAAA,OAAO,qBAAA;AAAA;AAEb;AAMO,SAAS,aAAa,MAAA,EAAwB;AACnD,EAAA,MAAM,OAAA,GAAU,IAAI,IAAA,CAAK,MAAA,CAAO,OAAO,CAAA;AACvC,EAAA,MAAM,GAAA,uBAAU,IAAA,EAAK;AACrB,EAAA,MAAM,KAAA,GAAQ,GAAA,CAAI,OAAA,EAAQ,GAAI,QAAQ,OAAA,EAAQ;AAC9C,EAAA,MAAM,UAAU,IAAA,CAAK,KAAA,CAAM,SAAS,GAAA,GAAO,EAAA,GAAK,KAAK,EAAA,CAAG,CAAA;AAExD,EAAA,OAAO,OAAA,IAAW,yBAAyB,aAAA,GAAgB,CAAA;AAC7D;AAMO,SAAS,kBAAkB,MAAA,EAAwB;AACxD,EAAA,OAAO,MAAA,CAAO,YAAY,kBAAA,GAAqB,CAAA;AACjD;AAMO,SAAS,cAAA,CAAe,QAAgB,gBAAA,EAAkC;AAC/E,EAAA,OACE,gBAAA,GAAmB,cAAc,MAAM,CAAA,GAAI,aAAa,MAAM,CAAA,GAAI,kBAAkB,MAAM,CAAA;AAE9F;AAMO,SAAS,YAAY,OAAA,EAAyC;AACnE,EAAA,OAAO,OAAA,CACJ,GAAA,CAAI,CAAC,MAAA,MAAY;AAAA,IAChB,GAAG,MAAA;AAAA,IACH,UAAA,EAAY,cAAA,CAAe,MAAA,CAAO,MAAA,EAAQ,OAAO,KAAK;AAAA,GACxD,CAAE,CAAA,CACD,IAAA,CAAK,CAAC,CAAA,EAAG,CAAA,KAAA,CAAO,CAAA,CAAE,UAAA,IAAc,CAAA,KAAM,CAAA,CAAE,UAAA,IAAc,CAAA,CAAE,CAAA;AAC7D;;;ACzEA,IAAMG,cAAAA,GAAgB,CAAA;AAQtB,SAAS,aAAa,MAAA,EAAsC;AAC1D,EAAA,OAAO,MAAA,CAAO,IAAA,KAAS,MAAA,IAAU,MAAA,CAAO,QAAA,KAAa,MAAA;AACvD;AAaA,eAAsB,kBAAA,CACpB,QAAA,EACA,KAAA,GAAgBA,cAAAA,EACO;AACvB,EAAA,MAAM,EAAE,OAAA,EAAS,UAAA,EAAW,GAAI,MAAM,YAAY,QAAQ,CAAA;AAG1D,EAAA,MAAM,sBAAsB,UAAA,CAAW,MAAA;AAAA,IACrC,CAAC,WACC,YAAA,CAAa,MAAM,KAAK,MAAA,CAAO,QAAA,KAAa,UAAU,MAAA,CAAO;AAAA,GACjE;AAGA,EAAA,mBAAA,CAAoB,IAAA,CAAK,CAAC,CAAA,EAAG,CAAA,KAAM;AACjC,IAAA,MAAM,QAAQ,IAAI,IAAA,CAAK,CAAA,CAAE,OAAO,EAAE,OAAA,EAAQ;AAC1C,IAAA,MAAM,QAAQ,IAAI,IAAA,CAAK,CAAA,CAAE,OAAO,EAAE,OAAA,EAAQ;AAC1C,IAAA,OAAO,KAAA,GAAQ,KAAA;AAAA,EACjB,CAAC,CAAA;AAGD,EAAA,OAAO,mBAAA,CAAoB,KAAA,CAAM,CAAA,EAAG,KAAK,CAAA;AAC3C;;;AC5CA,IAAMA,cAAAA,GAAgB,CAAA;AAqBtB,eAAsB,eAAA,CACpB,QAAA,EACA,QAAA,EACA,KAAA,GAAgBA,cAAAA,EACc;AAE9B,EAAA,MAAM,MAAA,GAAS,MAAM,YAAA,CAAa,QAAA,EAAU,UAAU,EAAE,KAAA,EAAO,KAAA,GAAQ,CAAA,EAAG,CAAA;AAG1E,EAAA,MAAM,MAAA,GAAS,YAAY,MAAM,CAAA;AAGjC,EAAA,MAAM,UAAA,GAAa,MAAA,CAAO,KAAA,CAAM,CAAA,EAAG,KAAK,CAAA;AAGxC,EAAA,MAAM,OAAA,GAAU,mBAAmB,UAAU,CAAA;AAE7C,EAAA,OAAO,EAAE,OAAA,EAAS,UAAA,EAAY,OAAA,EAAQ;AACxC;AAWO,SAAS,mBAAmB,OAAA,EAAiC;AAClE,EAAA,MAAM,MAAA,GAAS,2BAAA,GAAuB,QAAA,CAAI,MAAA,CAAO,EAAE,CAAA;AAEnD,EAAA,IAAI,OAAA,CAAQ,WAAW,CAAA,EAAG;AACxB,IAAA,OAAO,GAAG,MAAM;AAAA,wCAAA,CAAA;AAAA,EAClB;AAEA,EAAA,MAAM,WAAA,GAAc,OAAA,CAAQ,GAAA,CAAI,CAAC,GAAG,CAAA,KAAM;AACxC,IAAA,MAAM,MAAA,GAAS,CAAA,EAAG,CAAA,GAAI,CAAC,CAAA,CAAA,CAAA;AACvB,IAAA,MAAM,OAAA,GAAU,EAAE,MAAA,CAAO,OAAA;AACzB,IAAA,OAAO,CAAA,EAAG,MAAM,CAAA,CAAA,EAAI,OAAO,CAAA,CAAA;AAAA,EAC7B,CAAC,CAAA;AAED,EAAA,OAAO,GAAG,MAAM;AAAA,EAAK,WAAA,CAAY,IAAA,CAAK,IAAI,CAAC,CAAA,CAAA;AAC7C;;;ACWO,IAAM,OAAA,GAAU,OAAA;ACrEhB,IAAM,WAAA,GAAc,yBAAA;AAGpB,IAAM,mBAAA,GAAsB,GAAA;AAG5B,IAAM,gBAAA,GAAmB,EAAA;AAGhC,IAAM,UAAA,GAAa,GAAA,GAAO,EAAA,GAAK,EAAA,GAAK,EAAA;AAGpC,IAAM,kBAAA,GAAqB,CAAA;AAG3B,IAAM,gBAAA,GAAmB,CAAA;AAkBlB,SAAS,cAAA,CAAe,UAAkB,IAAA,EAAoB;AACnE,EAAA,MAAM,IAAA,GAAO,KAAK,WAAA,EAAY;AAC9B,EAAA,MAAM,KAAA,GAAQ,OAAO,IAAA,CAAK,QAAA,KAAa,kBAAkB,CAAA,CAAE,QAAA,CAAS,gBAAA,EAAkB,GAAG,CAAA;AACzF,EAAA,OAAOH,KAAK,QAAA,EAAU,WAAA,EAAa,GAAG,IAAI,CAAA,CAAA,EAAI,KAAK,CAAA,MAAA,CAAQ,CAAA;AAC7D;AAMA,eAAe,mBACb,QAAA,EAC0E;AAC1E,EAAA,MAAM,QAAA,GAAWA,IAAAA,CAAK,QAAA,EAAU,YAAY,CAAA;AAC5C,EAAA,IAAI,OAAA;AACJ,EAAA,IAAI;AACF,IAAA,OAAA,GAAU,MAAMI,QAAAA,CAAS,QAAA,EAAU,OAAO,CAAA;AAAA,EAC5C,CAAA,CAAA,MAAQ;AACN,IAAA,OAAO,EAAC;AAAA,EACV;AAEA,EAAA,MAAM,UAA2E,EAAC;AAClF,EAAA,KAAA,MAAW,IAAA,IAAQ,OAAA,CAAQ,KAAA,CAAM,IAAI,CAAA,EAAG;AACtC,IAAA,MAAM,OAAA,GAAU,KAAK,IAAA,EAAK;AAC1B,IAAA,IAAI,CAAC,OAAA,EAAS;AAEd,IAAA,IAAI;AACF,MAAA,MAAM,MAAA,GAAS,IAAA,CAAK,KAAA,CAAM,OAAO,CAAA;AACjC,MAAA,OAAA,CAAQ,IAAA,CAAK,EAAE,IAAA,EAAM,OAAA,EAAS,QAAQ,CAAA;AAAA,IACxC,CAAA,CAAA,MAAQ;AACN,MAAA,OAAA,CAAQ,KAAK,EAAE,IAAA,EAAM,OAAA,EAAS,MAAA,EAAQ,MAAM,CAAA;AAAA,IAC9C;AAAA,EACF;AACA,EAAA,OAAO,OAAA;AACT;AAKA,eAAsB,gBAAgB,QAAA,EAAmC;AACvE,EAAA,MAAM,KAAA,GAAQ,MAAM,kBAAA,CAAmB,QAAQ,CAAA;AAC/C,EAAA,IAAI,KAAA,GAAQ,CAAA;AACZ,EAAA,KAAA,MAAW,EAAE,MAAA,EAAO,IAAK,KAAA,EAAO;AAC9B,IAAA,IAAI,MAAA,IAAU,MAAA,CAAO,SAAS,CAAA,KAAM,IAAA,EAAM;AACxC,MAAA,KAAA,EAAA;AAAA,IACF;AAAA,EACF;AACA,EAAA,OAAO,KAAA;AACT;AAKA,eAAsB,gBAAgB,QAAA,EAAoC;AACxE,EAAA,MAAM,KAAA,GAAQ,MAAM,eAAA,CAAgB,QAAQ,CAAA;AAC5C,EAAA,OAAO,KAAA,IAAS,mBAAA;AAClB;AAMA,eAAsB,yBAAyB,QAAA,EAAmC;AAChF,EAAA,MAAM,QAAA,GAAWJ,IAAAA,CAAK,QAAA,EAAU,YAAY,CAAA;AAC5C,EAAA,MAAM,WAAW,QAAA,GAAW,MAAA;AAG5B,EAAA,MAAM,EAAE,OAAA,EAAQ,GAAI,MAAM,YAAY,QAAQ,CAAA;AAG9C,EAAA,MAAM,cAAA,GAAiB,MAAM,eAAA,CAAgB,QAAQ,CAAA;AAGrD,EAAA,MAAMK,MAAMJ,OAAAA,CAAQ,QAAQ,GAAG,EAAE,SAAA,EAAW,MAAM,CAAA;AAGlD,EAAA,MAAM,KAAA,GAAQ,QAAQ,GAAA,CAAI,CAAC,WAAW,IAAA,CAAK,SAAA,CAAU,MAAM,CAAA,GAAI,IAAI,CAAA;AACnE,EAAA,MAAM,UAAU,QAAA,EAAU,KAAA,CAAM,IAAA,CAAK,EAAE,GAAG,OAAO,CAAA;AAGjD,EAAA,MAAM,MAAA,CAAO,UAAU,QAAQ,CAAA;AAE/B,EAAA,OAAO,cAAA;AACT;AAUA,SAAS,aAAA,CAAc,QAAgB,GAAA,EAAoB;AACzD,EAAA,MAAM,OAAA,GAAU,IAAI,IAAA,CAAK,MAAA,CAAO,OAAO,CAAA;AACvC,EAAA,MAAM,KAAA,GAAQ,GAAA,CAAI,OAAA,EAAQ,GAAI,QAAQ,OAAA,EAAQ;AAC9C,EAAA,MAAM,UAAU,KAAA,GAAQ,UAAA;AAGxB,EAAA,OAAO,UAAU,gBAAA,KAAqB,MAAA,CAAO,cAAA,KAAmB,MAAA,IAAa,OAAO,cAAA,KAAmB,CAAA,CAAA;AACzG;AAOA,eAAsB,kBAAkB,QAAA,EAAmC;AACzE,EAAA,MAAM,EAAE,OAAA,EAAQ,GAAI,MAAM,YAAY,QAAQ,CAAA;AAC9C,EAAA,MAAM,GAAA,uBAAU,IAAA,EAAK;AAErB,EAAA,MAAM,YAAsB,EAAC;AAC7B,EAAA,MAAM,SAAmB,EAAC;AAE1B,EAAA,KAAA,MAAW,UAAU,OAAA,EAAS;AAC5B,IAAA,IAAI,aAAA,CAAc,MAAA,EAAQ,GAAG,CAAA,EAAG;AAC9B,MAAA,SAAA,CAAU,KAAK,MAAM,CAAA;AAAA,IACvB,CAAA,MAAO;AACL,MAAA,MAAA,CAAO,KAAK,MAAM,CAAA;AAAA,IACpB;AAAA,EACF;AAEA,EAAA,IAAI,SAAA,CAAU,WAAW,CAAA,EAAG;AAC1B,IAAA,OAAO,CAAA;AAAA,EACT;AAGA,EAAA,MAAM,aAAA,uBAAoB,GAAA,EAAsB;AAChD,EAAA,KAAA,MAAW,UAAU,SAAA,EAAW;AAC9B,IAAA,MAAM,OAAA,GAAU,IAAI,IAAA,CAAK,MAAA,CAAO,OAAO,CAAA;AACvC,IAAA,MAAM,WAAA,GAAc,cAAA,CAAe,QAAA,EAAU,OAAO,CAAA;AACpD,IAAA,MAAM,KAAA,GAAQ,aAAA,CAAc,GAAA,CAAI,WAAW,KAAK,EAAC;AACjD,IAAA,KAAA,CAAM,KAAK,MAAM,CAAA;AACjB,IAAA,aAAA,CAAc,GAAA,CAAI,aAAa,KAAK,CAAA;AAAA,EACtC;AAGA,EAAA,MAAM,UAAA,GAAaD,IAAAA,CAAK,QAAA,EAAU,WAAW,CAAA;AAC7C,EAAA,MAAMK,KAAAA,CAAM,UAAA,EAAY,EAAE,SAAA,EAAW,MAAM,CAAA;AAG3C,EAAA,KAAA,MAAW,CAAC,WAAA,EAAa,cAAc,CAAA,IAAK,aAAA,EAAe;AACzD,IAAA,MAAMC,MAAAA,GAAQ,cAAA,CAAe,GAAA,CAAI,CAAC,CAAA,KAAM,IAAA,CAAK,SAAA,CAAU,CAAC,CAAA,GAAI,IAAI,CAAA,CAAE,IAAA,CAAK,EAAE,CAAA;AACzE,IAAA,MAAMC,UAAAA,CAAW,WAAA,EAAaD,MAAAA,EAAO,OAAO,CAAA;AAAA,EAC9C;AAGA,EAAA,MAAM,QAAA,GAAWN,IAAAA,CAAK,QAAA,EAAU,YAAY,CAAA;AAC5C,EAAA,MAAM,WAAW,QAAA,GAAW,MAAA;AAC5B,EAAA,MAAMK,MAAMJ,OAAAA,CAAQ,QAAQ,GAAG,EAAE,SAAA,EAAW,MAAM,CAAA;AAElD,EAAA,MAAM,KAAA,GAAQ,OAAO,GAAA,CAAI,CAAC,WAAW,IAAA,CAAK,SAAA,CAAU,MAAM,CAAA,GAAI,IAAI,CAAA;AAClE,EAAA,MAAM,UAAU,QAAA,EAAU,KAAA,CAAM,IAAA,CAAK,EAAE,GAAG,OAAO,CAAA;AACjD,EAAA,MAAM,MAAA,CAAO,UAAU,QAAQ,CAAA;AAE/B,EAAA,OAAO,SAAA,CAAU,MAAA;AACnB;AAKA,eAAsB,QAAQ,QAAA,EAA0C;AAEtE,EAAA,MAAM,gBAAA,GAAmB,MAAM,eAAA,CAAgB,QAAQ,CAAA;AAGvD,EAAA,MAAM,QAAA,GAAW,MAAM,iBAAA,CAAkB,QAAQ,CAAA;AAGjD,EAAA,MAAM,sBAAA,GAAyB,MAAM,eAAA,CAAgB,QAAQ,CAAA;AAC7D,EAAA,MAAM,yBAAyB,QAAQ,CAAA;AAIvC,EAAA,MAAM,iBAAA,GAAoB,QAAA,GAAW,CAAA,GAAI,gBAAA,GAAmB,sBAAA;AAG5D,EAAA,MAAM,EAAE,OAAA,EAAQ,GAAI,MAAM,YAAY,QAAQ,CAAA;AAE9C,EAAA,OAAO;AAAA,IACL,QAAA;AAAA,IACA,iBAAA;AAAA,IACA,kBAAkB,OAAA,CAAQ;AAAA,GAC5B;AACF;;;AC/MA,IAAM,kBAAA,GAAqB,CAAA;AAAA;;AAAA;AAAA,mFAAA,CAAA;AAO3B,IAAM,wBAAA,GAA2B,CAAA;AAAA;AAAA;;AAAA;AAAA,CAAA;AAYjC,IAAM,kBAAA,GAAqB,6BAAA;AAG3B,IAAM,kBAAA,GAAqB;AAAA,EACzB,OAAA,EAAS,wBAAA;AAAA,EACT,KAAA,EAAO;AAAA,IACL;AAAA,MACE,IAAA,EAAM,SAAA;AAAA,MACN,OAAA,EAAS;AAAA;AACX;AAEJ,CAAA;AAGA,IAAM,WAAA,GAAc,kCAAA;AA4BpB,IAAM,GAAA,GAAM;AAAA,EACV,OAAA,EAAS,CAAC,GAAA,KAAsB,OAAA,CAAQ,IAAI,KAAA,CAAM,KAAA,CAAM,MAAM,CAAA,EAAG,GAAG,CAAA;AAAA,EACpE,KAAA,EAAO,CAAC,GAAA,KAAsB,OAAA,CAAQ,MAAM,KAAA,CAAM,GAAA,CAAI,SAAS,CAAA,EAAG,GAAG,CAAA;AAAA,EACrE,IAAA,EAAM,CAAC,GAAA,KAAsB,OAAA,CAAQ,IAAI,KAAA,CAAM,IAAA,CAAK,QAAQ,CAAA,EAAG,GAAG,CAAA;AAAA,EAClE,IAAA,EAAM,CAAC,GAAA,KAAsB,OAAA,CAAQ,IAAI,KAAA,CAAM,MAAA,CAAO,QAAQ,CAAA,EAAG,GAAG;AACtE,CAAA;AAWA,SAAS,cAAc,GAAA,EAA0B;AAC/C,EAAA,MAAM,IAAA,GAAO,IAAI,eAAA,EAAgB;AACjC,EAAA,OAAO;AAAA,IACL,OAAA,EAAS,KAAK,OAAA,IAAW,KAAA;AAAA,IACzB,KAAA,EAAO,KAAK,KAAA,IAAS;AAAA,GACvB;AACF;AAGA,IAAM,oBAAA,GAAuB,IAAA;AAG7B,IAAM,kBAAA,GAAqB,IAAA;AAG3B,IAAM,wBAAA,GAA2B,GAAA;AAGjC,IAAM,sBAAA,GAAyB,EAAA;AAG/B,IAAM,kBAAA,GAAqB,CAAA;AAG3B,IAAM,wBAAA,GAA2B,CAAA;AAGjC,IAAM,kBAAA,GAAqB,CAAA;AAkB3B,SAAS,qBAAA,CAAsB,OAAA,EAAiB,OAAA,EAAiB,SAAA,EAA4B;AAC3F,EAAA,OAAO;AAAA,IACL,EAAA,EAAI,WAAW,OAAO,CAAA;AAAA,IACtB,IAAA,EAAM,OAAA;AAAA,IACN,OAAA;AAAA,IACA,OAAA;AAAA,IACA,MAAM,EAAC;AAAA,IACP,MAAA,EAAQ,QAAA;AAAA,IACR,OAAA,EAAS,EAAE,IAAA,EAAM,SAAA,EAAW,QAAQ,gBAAA,EAAiB;AAAA,IACrD,OAAA,EAAA,iBAAS,IAAI,IAAA,EAAK,EAAE,WAAA,EAAY;AAAA,IAChC,SAAA;AAAA,IACA,YAAY,EAAC;AAAA,IACb,SAAS;AAAC,GACZ;AACF;AAKA,SAAS,iBAAA,CAAkB,QAAgB,KAAA,EAAsB;AAC/D,EAAA,OAAA,CAAQ,GAAA,CAAI,KAAK,SAAA,CAAU;AAAA,IACzB,IAAI,MAAA,CAAO,EAAA;AAAA,IACX,SAAS,MAAA,CAAO,OAAA;AAAA,IAChB,SAAS,MAAA,CAAO,OAAA;AAAA,IAChB,MAAM,MAAA,CAAO,IAAA;AAAA,IACb;AAAA,GACD,CAAC,CAAA;AACJ;AAKA,SAAS,qBAAqB,MAAA,EAAsB;AAClD,EAAA,OAAA,CAAQ,IAAI,kBAAkB,CAAA;AAC9B,EAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,MAAA,EAAS,MAAA,CAAO,EAAE,CAAA,CAAE,CAAA;AAChC,EAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,WAAA,EAAc,MAAA,CAAO,OAAO,CAAA,CAAE,CAAA;AAC1C,EAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,WAAA,EAAc,MAAA,CAAO,OAAO,CAAA,CAAE,CAAA;AAC1C,EAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,QAAA,EAAW,MAAA,CAAO,IAAI,CAAA,CAAE,CAAA;AACpC,EAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,QAAA,EAAW,MAAA,CAAO,IAAA,CAAK,MAAA,GAAS,CAAA,GAAI,MAAA,CAAO,IAAA,CAAK,IAAA,CAAK,IAAI,CAAA,GAAI,QAAQ,CAAA,CAAE,CAAA;AACnF,EAAA,OAAA,CAAQ,IAAI,2BAA2B,CAAA;AACzC;AAKA,SAAS,yBAAA,CAA0B,QAAyB,SAAA,EAA4B;AACtF,EAAA,OAAO;AAAA,IACL,EAAA,EAAI,UAAA,CAAW,MAAA,CAAO,eAAe,CAAA;AAAA,IACrC,IAAA,EAAM,OAAA;AAAA,IACN,SAAS,MAAA,CAAO,OAAA;AAAA,IAChB,SAAS,MAAA,CAAO,eAAA;AAAA,IAChB,MAAM,EAAC;AAAA,IACP,QAAQ,MAAA,CAAO,MAAA;AAAA,IACf,OAAA,EAAS,EAAE,IAAA,EAAM,SAAA,EAAW,QAAQ,cAAA,EAAe;AAAA,IACnD,OAAA,EAAA,iBAAS,IAAI,IAAA,EAAK,EAAE,WAAA,EAAY;AAAA,IAChC,SAAA;AAAA,IACA,YAAY,EAAC;AAAA,IACb,SAAS;AAAC,GACZ;AACF;AASA,eAAe,iBAAA,GAAiD;AAC9D,EAAA,MAAM,EAAE,KAAA,EAAM,GAAI,MAAM,OAAO,SAAc,CAAA;AAC7C,EAAA,IAAI,CAAC,MAAM,KAAA,EAAO;AAChB,IAAA,MAAM,SAAmB,EAAC;AAC1B,IAAA,WAAA,MAAiB,SAAS,KAAA,EAAO;AAC/B,MAAA,MAAA,CAAO,KAAK,KAAe,CAAA;AAAA,IAC7B;AACA,IAAA,OAAO,OAAO,MAAA,CAAO,MAAM,EAAE,QAAA,CAAS,OAAO,EAAE,IAAA,EAAK;AAAA,EACtD;AACA,EAAA,OAAO,MAAA;AACT;AAKA,SAAS,oBAAoB,OAAA,EAAyD;AACpF,EAAA,MAAM,UAAA,GAAa;AAAA,IACjB,OAAA,EAAS,OAAA,CAAQ,GAAA,CAAI,CAAC,CAAA,MAAO;AAAA,MAC3B,EAAA,EAAI,EAAE,MAAA,CAAO,EAAA;AAAA,MACb,OAAA,EAAS,EAAE,MAAA,CAAO,OAAA;AAAA,MAClB,WAAW,CAAA,CAAE,KAAA;AAAA,MACb,MAAA,EAAQ,EAAE,MAAA,CAAO;AAAA,KACnB,CAAE,CAAA;AAAA,IACF,OAAO,OAAA,CAAQ;AAAA,GACjB;AACA,EAAA,OAAA,CAAQ,GAAA,CAAI,IAAA,CAAK,SAAA,CAAU,UAAU,CAAC,CAAA;AACxC;AAKA,SAAS,oBAAA,CAAqB,SAAmD,KAAA,EAAsB;AACrG,EAAA,OAAA,CAAQ,IAAI,oBAAoB,CAAA;AAChC,EAAA,OAAA,CAAQ,IAAI,0BAA0B,CAAA;AAEtC,EAAA,OAAA,CAAQ,OAAA,CAAQ,CAAC,IAAA,EAAM,CAAA,KAAM;AAC3B,IAAA,MAAM,MAAM,CAAA,GAAI,CAAA;AAChB,IAAA,OAAA,CAAQ,IAAI,CAAA,EAAG,GAAG,CAAA,EAAA,EAAK,KAAA,CAAM,KAAK,CAAA,CAAA,EAAI,IAAA,CAAK,MAAA,CAAO,EAAE,GAAG,CAAC,CAAA,CAAA,EAAI,IAAA,CAAK,MAAA,CAAO,OAAO,CAAA,CAAE,CAAA;AACjF,IAAA,OAAA,CAAQ,IAAI,CAAA,gBAAA,EAAmB,IAAA,CAAK,MAAM,OAAA,CAAQ,wBAAwB,CAAC,CAAA,CAAE,CAAA;AAC7E,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,aAAA,EAAgB,IAAA,CAAK,MAAA,CAAO,MAAM,CAAA,CAAE,CAAA;AAChD,IAAA,OAAA,CAAQ,GAAA,EAAI;AAAA,EACd,CAAC,CAAA;AAED,EAAA,IAAI,CAAC,KAAA,EAAO;AACV,IAAA,OAAA,CAAQ,IAAI,KAAK,CAAA;AACjB,IAAA,OAAA,CAAQ,IAAI,4CAA4C,CAAA;AAAA,EAC1D;AACF;AASA,SAAS,yBAAA,CAA0B,SAAmB,KAAA,EAAsB;AAC1E,EAAA,OAAA,CAAQ,IAAI,sCAAsC,CAAA;AAElD,EAAA,OAAA,CAAQ,OAAA,CAAQ,CAAC,MAAA,EAAQ,CAAA,KAAM;AAC7B,IAAA,MAAM,MAAM,CAAA,GAAI,CAAA;AAChB,IAAA,MAAM,IAAA,GAAO,MAAA,CAAO,OAAA,CAAQ,KAAA,CAAM,GAAG,sBAAsB,CAAA;AAE3D,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,EAAG,GAAG,CAAA,EAAA,EAAK,MAAM,IAAA,CAAK,CAAA,CAAA,EAAI,MAAA,CAAO,EAAE,CAAA,CAAA,CAAG,CAAC,CAAA,CAAA,EAAI,MAAA,CAAO,OAAO,CAAA,CAAE,CAAA;AACvE,IAAA,OAAA,CAAQ,IAAI,CAAA,aAAA,EAAgB,MAAA,CAAO,MAAM,CAAA,EAAA,EAAK,IAAI,CAAA,CAAA,CAAG,CAAA;AACrD,IAAA,IAAI,MAAA,CAAO,IAAA,CAAK,MAAA,GAAS,CAAA,EAAG;AAC1B,MAAA,OAAA,CAAQ,IAAI,CAAA,WAAA,EAAc,MAAA,CAAO,KAAK,IAAA,CAAK,IAAI,CAAC,CAAA,CAAE,CAAA;AAAA,IACpD;AACA,IAAA,OAAA,CAAQ,GAAA,EAAI;AAAA,EACd,CAAC,CAAA;AAED,EAAA,MAAM,UAAA,GAAa,OAAA,CAAQ,MAAA,KAAW,CAAA,GAAI,QAAA,GAAW,SAAA;AACrD,EAAA,IAAI,CAAC,KAAA,EAAO;AACV,IAAA,OAAA,CAAQ,IAAI,KAAK,CAAA;AACjB,IAAA,OAAA,CAAQ,IAAI,CAAA,EAAG,OAAA,CAAQ,MAAM,CAAA,eAAA,EAAkB,UAAU,CAAA,QAAA,CAAU,CAAA;AAAA,EACrE;AACF;AAOA,IAAM,6BAAA,GAAgC,+BAAA;AAGtC,IAAM,kBAAA,GAAqB;AAAA;;AAAA;;AAAA;;AAAA;AAAA;;AAAA;;AAAA;;AAAA;AAAA;AAAA;;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;;AAAA;;AAAA;;AAAA;;AAAA;AAAA;AAAA;;AAAA;;AAAA;AAAA;AAAA;;AAAA;;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA,CAAA;AAmE3B,SAAS,wBAAwB,OAAA,EAA0B;AACzD,EAAA,OAAO,OAAA,CAAQ,SAAS,6BAA6B,CAAA;AACvD;AAKA,eAAe,uBAAuB,QAAA,EAAiC;AACrE,EAAA,MAAM,UAAA,GAAaA,OAAAA,CAAQD,IAAAA,CAAK,QAAA,EAAU,YAAY,CAAC,CAAA;AACvD,EAAA,MAAMK,KAAAA,CAAM,UAAA,EAAY,EAAE,SAAA,EAAW,MAAM,CAAA;AAC7C;AAKA,eAAe,gBAAgB,QAAA,EAAiC;AAC9D,EAAA,MAAM,SAAA,GAAYL,IAAAA,CAAK,QAAA,EAAU,YAAY,CAAA;AAC7C,EAAA,IAAI,CAACQ,UAAAA,CAAW,SAAS,CAAA,EAAG;AAC1B,IAAA,MAAMC,SAAAA,CAAU,SAAA,EAAW,EAAA,EAAI,OAAO,CAAA;AAAA,EACxC;AACF;AAKA,eAAe,eAAe,QAAA,EAAoC;AAChE,EAAA,MAAM,UAAA,GAAaT,IAAAA,CAAK,QAAA,EAAU,WAAW,CAAA;AAC7C,EAAA,IAAI,OAAA,GAAU,EAAA;AACd,EAAA,IAAI,OAAA,GAAU,KAAA;AAEd,EAAA,IAAIQ,UAAAA,CAAW,UAAU,CAAA,EAAG;AAC1B,IAAA,OAAA,GAAU,MAAMJ,QAAAA,CAAS,UAAA,EAAY,OAAO,CAAA;AAC5C,IAAA,OAAA,GAAU,IAAA;AACV,IAAA,IAAI,uBAAA,CAAwB,OAAO,CAAA,EAAG;AACpC,MAAA,OAAO,KAAA;AAAA,IACT;AAAA,EACF;AAGA,EAAA,MAAM,UAAA,GAAa,UAAU,OAAA,CAAQ,OAAA,KAAY,IAAA,GAAO,kBAAA,GAAqB,kBAAA,CAAmB,IAAA,EAAK,GAAI,IAAA;AACzG,EAAA,MAAMK,SAAAA,CAAU,UAAA,EAAY,UAAA,EAAY,OAAO,CAAA;AAC/C,EAAA,OAAO,IAAA;AACT;AAOA,IAAM,cAAA,GAAiB,GAAA;AAKvB,SAAS,qBAAqB,OAAA,EAA0B;AACtD,EAAA,OAAO,OAAA,CAAQ,SAAS,WAAW,CAAA;AACrC;AAKA,eAAe,eAAe,QAAA,EAA0C;AACtE,EAAA,MAAM,MAAA,GAAST,IAAAA,CAAK,QAAA,EAAU,MAAM,CAAA;AAGpC,EAAA,IAAI,CAACQ,UAAAA,CAAW,MAAM,CAAA,EAAG;AACvB,IAAA,OAAO,IAAA;AAAA,EACT;AAGA,EAAA,MAAM,UAAA,GAAaR,IAAAA,CAAK,MAAA,EAAQ,QAAQ,CAAA;AACxC,EAAA,IAAIQ,UAAAA,CAAW,UAAU,CAAA,EAAG;AAC1B,IAAA,MAAM,MAAA,GAAS,MAAMJ,QAAAA,CAAS,UAAA,EAAY,OAAO,CAAA;AACjD,IAAA,MAAM,KAAA,GAAQ,wBAAA,CAAyB,IAAA,CAAK,MAAM,CAAA;AAClD,IAAA,IAAI,KAAA,GAAQ,CAAC,CAAA,EAAG;AACd,MAAA,MAAM,SAAA,GAAY,KAAA,CAAM,CAAC,CAAA,CAAE,IAAA,EAAK;AAEhC,MAAA,OAAO,UAAU,UAAA,CAAW,GAAG,IAAI,SAAA,GAAYJ,IAAAA,CAAK,UAAU,SAAS,CAAA;AAAA,IACzE;AAAA,EACF;AAGA,EAAA,MAAM,eAAA,GAAkBA,IAAAA,CAAK,MAAA,EAAQ,OAAO,CAAA;AAC5C,EAAA,OAAOQ,UAAAA,CAAW,eAAe,CAAA,GAAI,eAAA,GAAkB,IAAA;AACzD;AAGA,IAAM,yBAAA,GAA4B;AAAA;AAAA;AAAA,CAAA;AAYlC,eAAe,qBAAqB,QAAA,EAAoC;AACtE,EAAA,MAAM,WAAA,GAAc,MAAM,cAAA,CAAe,QAAQ,CAAA;AAGjD,EAAA,IAAI,CAAC,WAAA,EAAa;AAChB,IAAA,OAAO,KAAA;AAAA,EACT;AAGA,EAAA,MAAMH,KAAAA,CAAM,WAAA,EAAa,EAAE,SAAA,EAAW,MAAM,CAAA;AAE5C,EAAA,MAAM,QAAA,GAAWL,IAAAA,CAAK,WAAA,EAAa,YAAY,CAAA;AAG/C,EAAA,IAAIQ,UAAAA,CAAW,QAAQ,CAAA,EAAG;AACxB,IAAA,MAAM,OAAA,GAAU,MAAMJ,QAAAA,CAAS,QAAA,EAAU,OAAO,CAAA;AAChD,IAAA,IAAI,oBAAA,CAAqB,OAAO,CAAA,EAAG;AACjC,MAAA,OAAO,KAAA;AAAA,IACT;AAGA,IAAA,MAAM,UAAA,GAAa,OAAA,CAAQ,OAAA,EAAQ,GAAI,IAAA,GAAO,yBAAA;AAC9C,IAAA,MAAMK,SAAAA,CAAU,QAAA,EAAU,UAAA,EAAY,OAAO,CAAA;AAC7C,IAAA,SAAA,CAAU,UAAU,cAAc,CAAA;AAClC,IAAA,OAAO,IAAA;AAAA,EACT;AAGA,EAAA,MAAMA,SAAAA,CAAU,QAAA,EAAU,wBAAA,EAA0B,OAAO,CAAA;AAC3D,EAAA,SAAA,CAAU,UAAU,cAAc,CAAA;AAElC,EAAA,OAAO,IAAA;AACT;AAEA,IAAM,OAAA,GAAU,IAAI,OAAA,EAAQ;AAG5B,OAAA,CACG,OAAO,eAAA,EAAiB,sBAAsB,CAAA,CAC9C,MAAA,CAAO,eAAe,+BAA+B,CAAA;AAExD,OAAA,CACG,KAAK,gBAAgB,CAAA,CACrB,YAAY,mDAAmD,CAAA,CAC/D,QAAQ,OAAO,CAAA;AAWlB,OAAA,CACG,OAAA,CAAQ,MAAM,CAAA,CACd,WAAA,CAAY,8CAA8C,CAAA,CAC1D,MAAA,CAAO,iBAAiB,6BAA6B,CAAA,CACrD,OAAO,cAAA,EAAgB,6BAA6B,EACpD,MAAA,CAAO,QAAA,EAAU,uBAAuB,CAAA,CACxC,MAAA,CAAO,eAA+B,OAAA,EAAwE;AAC7G,EAAA,MAAM,WAAW,WAAA,EAAY;AAC7B,EAAA,MAAM,EAAE,KAAA,EAAM,GAAI,aAAA,CAAc,IAAI,CAAA;AAGpC,EAAA,MAAM,uBAAuB,QAAQ,CAAA;AACrC,EAAA,MAAM,gBAAgB,QAAQ,CAAA;AAC9B,EAAA,MAAM,UAAA,GAAaR,OAAAA,CAAQD,IAAAA,CAAK,QAAA,EAAU,YAAY,CAAC,CAAA;AAGvD,EAAA,IAAI,eAAA,GAAkB,KAAA;AACtB,EAAA,IAAI,CAAC,QAAQ,UAAA,EAAY;AACvB,IAAA,eAAA,GAAkB,MAAM,eAAe,QAAQ,CAAA;AAAA,EACjD;AAGA,EAAA,IAAI,cAAA,GAAiB,KAAA;AACrB,EAAA,IAAI,CAAC,QAAQ,SAAA,EAAW;AACtB,IAAA,cAAA,GAAiB,MAAM,qBAAqB,QAAQ,CAAA;AAAA,EACtD;AAGA,EAAA,IAAI,QAAQ,IAAA,EAAM;AAChB,IAAA,OAAA,CAAQ,GAAA,CAAI,KAAK,SAAA,CAAU;AAAA,MACzB,WAAA,EAAa,IAAA;AAAA,MACb,UAAA;AAAA,MACA,QAAA,EAAU,eAAA;AAAA,MACV,KAAA,EAAO;AAAA,KACR,CAAC,CAAA;AAAA,EACJ,CAAA,MAAA,IAAW,CAAC,KAAA,EAAO;AACjB,IAAA,GAAA,CAAI,QAAQ,4BAA4B,CAAA;AACxC,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,qBAAA,EAAwB,UAAU,CAAA,CAAE,CAAA;AAChD,IAAA,IAAI,eAAA,EAAiB;AACnB,MAAA,OAAA,CAAQ,IAAI,kDAAkD,CAAA;AAAA,IAChE,CAAA,MAAA,IAAW,QAAQ,UAAA,EAAY;AAC7B,MAAA,OAAA,CAAQ,IAAI,sCAAsC,CAAA;AAAA,IACpD,CAAA,MAAO;AACL,MAAA,OAAA,CAAQ,IAAI,iDAAiD,CAAA;AAAA,IAC/D;AACA,IAAA,IAAI,cAAA,EAAgB;AAClB,MAAA,OAAA,CAAQ,IAAI,wCAAwC,CAAA;AAAA,IACtD,CAAA,MAAA,IAAW,QAAQ,SAAA,EAAW;AAC5B,MAAA,OAAA,CAAQ,IAAI,qCAAqC,CAAA;AAAA,IACnD,CAAA,MAAO;AACL,MAAA,OAAA,CAAQ,IAAI,kDAAkD,CAAA;AAAA,IAChE;AAAA,EACF;AACF,CAAC,CAAA;AASH,IAAM,eAAe,OAAA,CAAQ,OAAA,CAAQ,OAAO,CAAA,CAAE,YAAY,sBAAsB,CAAA;AAEhF,YAAA,CACG,OAAA,CAAQ,YAAY,CAAA,CACpB,WAAA,CAAY,yCAAyC,CAAA,CACrD,MAAA,CAAO,QAAA,EAAU,gBAAgB,CAAA,CACjC,MAAA,CAAO,CAAC,MAAc,OAAA,KAAgC;AACrD,EAAA,IAAI,SAAS,YAAA,EAAc;AACzB,IAAA,IAAI,QAAQ,IAAA,EAAM;AAChB,MAAA,OAAA,CAAQ,GAAA,CAAI,KAAK,SAAA,CAAU,EAAE,MAAM,YAAA,EAAc,OAAA,EAAS,kBAAA,EAAoB,CAAC,CAAA;AAAA,IACjF,CAAA,MAAO;AACL,MAAA,OAAA,CAAQ,IAAI,kBAAkB,CAAA;AAAA,IAChC;AAAA,EACF,CAAA,MAAO;AACL,IAAA,IAAI,QAAQ,IAAA,EAAM;AAChB,MAAA,OAAA,CAAQ,GAAA,CAAI,KAAK,SAAA,CAAU,EAAE,OAAO,CAAA,cAAA,EAAiB,IAAI,CAAA,CAAA,EAAI,CAAC,CAAA;AAAA,IAChE,CAAA,MAAO;AACL,MAAA,GAAA,CAAI,KAAA,CAAM,CAAA,cAAA,EAAiB,IAAI,CAAA,CAAE,CAAA;AAAA,IACnC;AACA,IAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,EAChB;AACF,CAAC,CAAA;AASH,SAAS,sBAAsB,OAAA,EAA0B;AACvD,EAAA,IAAI,OAAA,EAAS;AACX,IAAA,MAAM,WAAW,WAAA,EAAY;AAC7B,IAAA,OAAOA,IAAAA,CAAK,QAAA,EAAU,SAAA,EAAW,eAAe,CAAA;AAAA,EAClD;AACA,EAAA,OAAOA,IAAAA,CAAKU,OAAAA,EAAQ,EAAG,SAAA,EAAW,eAAe,CAAA;AACnD;AAKA,eAAe,mBAAmB,YAAA,EAAwD;AACxF,EAAA,IAAI,CAACF,UAAAA,CAAW,YAAY,CAAA,EAAG;AAC7B,IAAA,OAAO,EAAC;AAAA,EACV;AACA,EAAA,MAAM,OAAA,GAAU,MAAMJ,QAAAA,CAAS,YAAA,EAAc,OAAO,CAAA;AACpD,EAAA,OAAO,IAAA,CAAK,MAAM,OAAO,CAAA;AAC3B;AAKA,SAAS,cAAc,QAAA,EAA4C;AACjE,EAAA,MAAM,QAAQ,QAAA,CAAS,KAAA;AACvB,EAAA,IAAI,CAAC,KAAA,EAAO,YAAA,EAAc,OAAO,KAAA;AAEjC,EAAA,OAAO,KAAA,CAAM,YAAA,CAAa,IAAA,CAAK,CAAC,KAAA,KAAU;AACxC,IAAA,MAAM,SAAA,GAAY,KAAA;AAClB,IAAA,OAAO,SAAA,CAAU,OAAO,IAAA,CAAK,CAAC,MAAM,CAAA,CAAE,OAAA,EAAS,QAAA,CAAS,kBAAkB,CAAC,CAAA;AAAA,EAC7E,CAAC,CAAA;AACH;AAKA,SAAS,qBAAqB,QAAA,EAAyC;AACrE,EAAA,IAAI,CAAC,SAAS,KAAA,EAAO;AACnB,IAAA,QAAA,CAAS,QAAQ,EAAC;AAAA,EACpB;AACA,EAAA,MAAM,QAAQ,QAAA,CAAS,KAAA;AACvB,EAAA,IAAI,CAAC,MAAM,YAAA,EAAc;AACvB,IAAA,KAAA,CAAM,eAAe,EAAC;AAAA,EACxB;AACA,EAAA,KAAA,CAAM,YAAA,CAAa,KAAK,kBAAkB,CAAA;AAC5C;AAKA,SAAS,wBAAwB,QAAA,EAA4C;AAC3E,EAAA,MAAM,QAAQ,QAAA,CAAS,KAAA;AACvB,EAAA,IAAI,CAAC,KAAA,EAAO,YAAA,EAAc,OAAO,KAAA;AAEjC,EAAA,MAAM,cAAA,GAAiB,MAAM,YAAA,CAAa,MAAA;AAC1C,EAAA,KAAA,CAAM,YAAA,GAAe,KAAA,CAAM,YAAA,CAAa,MAAA,CAAO,CAAC,KAAA,KAAU;AACxD,IAAA,MAAM,SAAA,GAAY,KAAA;AAClB,IAAA,OAAO,CAAC,SAAA,CAAU,KAAA,EAAO,IAAA,CAAK,CAAC,MAAM,CAAA,CAAE,OAAA,EAAS,QAAA,CAAS,kBAAkB,CAAC,CAAA;AAAA,EAC9E,CAAC,CAAA;AAED,EAAA,OAAO,KAAA,CAAM,aAAa,MAAA,GAAS,cAAA;AACrC;AAKA,eAAe,mBAAA,CAAoB,cAAsB,QAAA,EAAkD;AACzG,EAAA,MAAM,GAAA,GAAMH,QAAQ,YAAY,CAAA;AAChC,EAAA,MAAMI,KAAAA,CAAM,GAAA,EAAK,EAAE,SAAA,EAAW,MAAM,CAAA;AAGpC,EAAA,MAAM,WAAW,YAAA,GAAe,MAAA;AAChC,EAAA,MAAMI,SAAAA,CAAU,UAAU,IAAA,CAAK,SAAA,CAAU,UAAU,IAAA,EAAM,CAAC,CAAA,GAAI,IAAA,EAAM,OAAO,CAAA;AAC3E,EAAA,MAAME,MAAAA,CAAO,UAAU,YAAY,CAAA;AACrC;AAEA,IAAM,eAAe,OAAA,CAAQ,OAAA,CAAQ,OAAO,CAAA,CAAE,YAAY,oBAAoB,CAAA;AAE9E,YAAA,CACG,OAAA,CAAQ,QAAQ,CAAA,CAChB,WAAA,CAAY,wCAAwC,EACpD,MAAA,CAAO,WAAA,EAAa,wDAAwD,CAAA,CAC5E,MAAA,CAAO,aAAA,EAAe,6BAA6B,CAAA,CACnD,MAAA,CAAO,WAAA,EAAa,wCAAwC,CAAA,CAC5D,MAAA,CAAO,UAAU,gBAAgB,CAAA,CACjC,MAAA,CAAO,OAAO,OAAA,KAA0F;AACvG,EAAA,MAAM,YAAA,GAAe,qBAAA,CAAsB,OAAA,CAAQ,OAAA,IAAW,KAAK,CAAA;AACnE,EAAA,MAAM,WAAA,GAAc,OAAA,CAAQ,OAAA,GAAU,uBAAA,GAA0B,yBAAA;AAEhE,EAAA,IAAI,QAAA;AACJ,EAAA,IAAI;AACF,IAAA,QAAA,GAAW,MAAM,mBAAmB,YAAY,CAAA;AAAA,EAClD,CAAA,CAAA,MAAQ;AACN,IAAA,IAAI,QAAQ,IAAA,EAAM;AAChB,MAAA,OAAA,CAAQ,IAAI,IAAA,CAAK,SAAA,CAAU,EAAE,KAAA,EAAO,+BAAA,EAAiC,CAAC,CAAA;AAAA,IACxE,CAAA,MAAO;AACL,MAAA,GAAA,CAAI,MAAM,wDAAwD,CAAA;AAAA,IACpE;AACA,IAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,EAChB;AAEA,EAAA,MAAM,gBAAA,GAAmB,cAAc,QAAQ,CAAA;AAG/C,EAAA,IAAI,QAAQ,SAAA,EAAW;AACrB,IAAA,IAAI,QAAQ,MAAA,EAAQ;AAClB,MAAA,IAAI,QAAQ,IAAA,EAAM;AAChB,QAAA,OAAA,CAAQ,GAAA,CAAI,IAAA,CAAK,SAAA,CAAU,EAAE,MAAA,EAAQ,IAAA,EAAM,WAAA,EAAa,gBAAA,EAAkB,QAAA,EAAU,WAAA,EAAa,CAAC,CAAA;AAAA,MACpG,CAAA,MAAO;AACL,QAAA,IAAI,gBAAA,EAAkB;AACpB,UAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,uCAAA,EAA0C,WAAW,CAAA,CAAE,CAAA;AAAA,QACrE,CAAA,MAAO;AACL,UAAA,OAAA,CAAQ,IAAI,mCAAmC,CAAA;AAAA,QACjD;AAAA,MACF;AACA,MAAA;AAAA,IACF;AAEA,IAAA,MAAM,OAAA,GAAU,wBAAwB,QAAQ,CAAA;AAChD,IAAA,IAAI,OAAA,EAAS;AACX,MAAA,MAAM,mBAAA,CAAoB,cAAc,QAAQ,CAAA;AAChD,MAAA,IAAI,QAAQ,IAAA,EAAM;AAChB,QAAA,OAAA,CAAQ,GAAA,CAAI,IAAA,CAAK,SAAA,CAAU,EAAE,SAAA,EAAW,KAAA,EAAO,QAAA,EAAU,WAAA,EAAa,MAAA,EAAQ,SAAA,EAAW,CAAC,CAAA;AAAA,MAC5F,CAAA,MAAO;AACL,QAAA,GAAA,CAAI,QAAQ,8BAA8B,CAAA;AAC1C,QAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,YAAA,EAAe,WAAW,CAAA,CAAE,CAAA;AAAA,MAC1C;AAAA,IACF,CAAA,MAAO;AACL,MAAA,IAAI,QAAQ,IAAA,EAAM;AAChB,QAAA,OAAA,CAAQ,GAAA,CAAI,IAAA,CAAK,SAAA,CAAU,EAAE,SAAA,EAAW,KAAA,EAAO,QAAA,EAAU,WAAA,EAAa,MAAA,EAAQ,WAAA,EAAa,CAAC,CAAA;AAAA,MAC9F,CAAA,MAAO;AACL,QAAA,GAAA,CAAI,KAAK,mCAAmC,CAAA;AAAA,MAC9C;AAAA,IACF;AACA,IAAA;AAAA,EACF;AAGA,EAAA,IAAI,QAAQ,MAAA,EAAQ;AAClB,IAAA,IAAI,QAAQ,IAAA,EAAM;AAChB,MAAA,OAAA,CAAQ,GAAA,CAAI,IAAA,CAAK,SAAA,CAAU,EAAE,MAAA,EAAQ,IAAA,EAAM,YAAA,EAAc,CAAC,gBAAA,EAAkB,QAAA,EAAU,WAAA,EAAa,CAAC,CAAA;AAAA,IACtG,CAAA,MAAO;AACL,MAAA,IAAI,gBAAA,EAAkB;AACpB,QAAA,OAAA,CAAQ,IAAI,wCAAwC,CAAA;AAAA,MACtD,CAAA,MAAO;AACL,QAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,sCAAA,EAAyC,WAAW,CAAA,CAAE,CAAA;AAAA,MACpE;AAAA,IACF;AACA,IAAA;AAAA,EACF;AAEA,EAAA,IAAI,gBAAA,EAAkB;AACpB,IAAA,IAAI,QAAQ,IAAA,EAAM;AAChB,MAAA,OAAA,CAAQ,GAAA,CAAI,KAAK,SAAA,CAAU;AAAA,QACzB,SAAA,EAAW,IAAA;AAAA,QACX,QAAA,EAAU,WAAA;AAAA,QACV,KAAA,EAAO,CAAC,cAAc,CAAA;AAAA,QACtB,MAAA,EAAQ;AAAA,OACT,CAAC,CAAA;AAAA,IACJ,CAAA,MAAO;AACL,MAAA,GAAA,CAAI,KAAK,wCAAwC,CAAA;AACjD,MAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,YAAA,EAAe,WAAW,CAAA,CAAE,CAAA;AAAA,IAC1C;AACA,IAAA;AAAA,EACF;AAGA,EAAA,MAAM,UAAA,GAAaH,WAAW,YAAY,CAAA;AAC1C,EAAA,oBAAA,CAAqB,QAAQ,CAAA;AAC7B,EAAA,MAAM,mBAAA,CAAoB,cAAc,QAAQ,CAAA;AAEhD,EAAA,IAAI,QAAQ,IAAA,EAAM;AAChB,IAAA,OAAA,CAAQ,GAAA,CAAI,KAAK,SAAA,CAAU;AAAA,MACzB,SAAA,EAAW,IAAA;AAAA,MACX,QAAA,EAAU,WAAA;AAAA,MACV,KAAA,EAAO,CAAC,cAAc,CAAA;AAAA,MACtB,MAAA,EAAQ,aAAa,SAAA,GAAY;AAAA,KAClC,CAAC,CAAA;AAAA,EACJ,CAAA,MAAO;AACL,IAAA,GAAA,CAAI,OAAA,CAAQ,OAAA,CAAQ,OAAA,GAAU,6CAAA,GAAgD,6BAA6B,CAAA;AAC3G,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,YAAA,EAAe,WAAW,CAAA,CAAE,CAAA;AACxC,IAAA,OAAA,CAAQ,IAAI,+CAA+C,CAAA;AAC3D,IAAA,OAAA,CAAQ,IAAI,EAAE,CAAA;AACd,IAAA,OAAA,CAAQ,IAAI,wDAAwD,CAAA;AACpE,IAAA,IAAI,QAAQ,OAAA,EAAS;AACnB,MAAA,OAAA,CAAQ,IAAI,EAAE,CAAA;AACd,MAAA,OAAA,CAAQ,IAAI,4CAA4C,CAAA;AAAA,IAC1D;AAAA,EACF;AACF,CAAC,CAAA;AAEH,OAAA,CACG,OAAA,CAAQ,iBAAiB,CAAA,CACzB,WAAA,CAAY,sBAAsB,CAAA,CAClC,MAAA,CAAO,sBAAA,EAAwB,4BAA4B,CAAA,CAC3D,MAAA,CAAO,iBAAiB,sBAAA,EAAwB,EAAE,EAClD,MAAA,CAAO,WAAA,EAAa,mBAAmB,CAAA,CACvC,MAAA,CAAO,eAA+B,OAAA,EAAiB,OAAA,EAA4D;AAClH,EAAA,MAAM,WAAW,WAAA,EAAY;AAC7B,EAAA,MAAM,EAAE,KAAA,EAAM,GAAI,aAAA,CAAc,IAAI,CAAA;AAEpC,EAAA,MAAM,MAAA,GAAiB;AAAA,IACrB,EAAA,EAAI,WAAW,OAAO,CAAA;AAAA,IACtB,IAAA,EAAM,OAAA;AAAA,IACN,OAAA,EAAS,QAAQ,OAAA,IAAW,gBAAA;AAAA,IAC5B,OAAA;AAAA,IACA,IAAA,EAAM,OAAA,CAAQ,IAAA,GAAO,OAAA,CAAQ,KAAK,KAAA,CAAM,GAAG,CAAA,CAAE,GAAA,CAAI,CAAC,CAAA,KAAM,CAAA,CAAE,IAAA,EAAM,IAAI,EAAC;AAAA,IACrE,MAAA,EAAQ,QAAA;AAAA,IACR,OAAA,EAAS;AAAA,MACP,IAAA,EAAM,KAAA;AAAA,MACN,MAAA,EAAQ;AAAA,KACV;AAAA,IACA,OAAA,EAAA,iBAAS,IAAI,IAAA,EAAK,EAAE,WAAA,EAAY;AAAA,IAChC,SAAA,EAAW,IAAA;AAAA;AAAA,IACX,YAAY,EAAC;AAAA,IACb,SAAS;AAAC,GACZ;AAEA,EAAA,MAAM,YAAA,CAAa,UAAU,MAAM,CAAA;AACnC,EAAA,GAAA,CAAI,OAAA,CAAQ,CAAA,SAAA,EAAY,OAAO,CAAA,CAAE,CAAA;AACjC,EAAA,IAAI,CAAC,KAAA,EAAO;AACV,IAAA,OAAA,CAAQ,IAAI,CAAA,IAAA,EAAO,KAAA,CAAM,IAAI,MAAA,CAAO,EAAE,CAAC,CAAA,CAAE,CAAA;AAAA,EAC3C;AACF,CAAC,CAAA;AAEH,OAAA,CACG,OAAA,CAAQ,gBAAgB,CAAA,CACxB,WAAA,CAAY,2BAA2B,CAAA,CACvC,MAAA,CAAO,sBAAA,EAAwB,iBAAA,EAAmB,oBAAoB,CAAA,CACtE,MAAA,CAAO,eAA+B,OAAe,OAAA,EAA4B;AAChF,EAAA,MAAM,WAAW,WAAA,EAAY;AAC7B,EAAA,MAAM,KAAA,GAAQ,UAAA,CAAW,OAAA,CAAQ,KAAA,EAAO,OAAO,CAAA;AAC/C,EAAA,MAAM,EAAE,OAAA,EAAS,KAAA,EAAM,GAAI,cAAc,IAAI,CAAA;AAG7C,EAAA,MAAM,aAAa,QAAQ,CAAA;AAE3B,EAAA,MAAM,OAAA,GAAU,MAAM,aAAA,CAAc,QAAA,EAAU,OAAO,KAAK,CAAA;AAE1D,EAAA,IAAI,OAAA,CAAQ,WAAW,CAAA,EAAG;AACxB,IAAA,OAAA,CAAQ,IAAI,uFAAuF,CAAA;AACnG,IAAA;AAAA,EACF;AAEA,EAAA,IAAI,CAAC,KAAA,EAAO;AACV,IAAA,GAAA,CAAI,IAAA,CAAK,CAAA,MAAA,EAAS,OAAA,CAAQ,MAAM,CAAA;AAAA,CAAe,CAAA;AAAA,EACjD;AACA,EAAA,KAAA,MAAW,UAAU,OAAA,EAAS;AAC5B,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,CAAA,EAAI,KAAA,CAAM,IAAA,CAAK,MAAA,CAAO,EAAE,CAAC,CAAA,EAAA,EAAK,MAAA,CAAO,OAAO,CAAA,CAAE,CAAA;AAC1D,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,WAAA,EAAc,MAAA,CAAO,OAAO,CAAA,CAAE,CAAA;AAC1C,IAAA,IAAI,OAAA,IAAW,OAAO,OAAA,EAAS;AAC7B,MAAA,OAAA,CAAQ,GAAA,CAAI,cAAc,MAAA,CAAO,OAAA,CAAQ,IAAI,CAAA,GAAA,EAAM,MAAA,CAAO,OAAA,CAAQ,MAAM,CAAA,CAAE,CAAA;AAC1E,MAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,WAAA,EAAc,MAAA,CAAO,OAAO,CAAA,CAAE,CAAA;AAAA,IAC5C;AACA,IAAA,IAAI,MAAA,CAAO,IAAA,CAAK,MAAA,GAAS,CAAA,EAAG;AAC1B,MAAA,OAAA,CAAQ,IAAI,CAAA,QAAA,EAAW,MAAA,CAAO,KAAK,IAAA,CAAK,IAAI,CAAC,CAAA,CAAE,CAAA;AAAA,IACjD;AACA,IAAA,OAAA,CAAQ,GAAA,EAAI;AAAA,EACd;AACF,CAAC,CAAA;AAEH,OAAA,CACG,OAAA,CAAQ,MAAM,CAAA,CACd,WAAA,CAAY,kBAAkB,CAAA,CAC9B,MAAA,CAAO,sBAAA,EAAwB,iBAAA,EAAmB,kBAAkB,CAAA,CACpE,MAAA,CAAO,eAA+B,OAAA,EAA4B;AACjE,EAAA,MAAM,WAAW,WAAA,EAAY;AAC7B,EAAA,MAAM,KAAA,GAAQ,UAAA,CAAW,OAAA,CAAQ,KAAA,EAAO,OAAO,CAAA;AAC/C,EAAA,MAAM,EAAE,OAAA,EAAS,KAAA,EAAM,GAAI,cAAc,IAAI,CAAA;AAE7C,EAAA,MAAM,EAAE,OAAA,EAAS,YAAA,EAAa,GAAI,MAAM,YAAY,QAAQ,CAAA;AAE5D,EAAA,IAAI,OAAA,CAAQ,WAAW,CAAA,EAAG;AACxB,IAAA,OAAA,CAAQ,IAAI,+DAA+D,CAAA;AAC3E,IAAA,IAAI,eAAe,CAAA,EAAG;AACpB,MAAA,GAAA,CAAI,IAAA,CAAK,CAAA,EAAG,YAAY,CAAA,6BAAA,CAA+B,CAAA;AAAA,IACzD;AACA,IAAA;AAAA,EACF;AAEA,EAAA,MAAM,MAAA,GAAS,OAAA,CAAQ,KAAA,CAAM,CAAA,EAAG,KAAK,CAAA;AAGrC,EAAA,IAAI,CAAC,KAAA,EAAO;AACV,IAAA,GAAA,CAAI,KAAK,CAAA,QAAA,EAAW,MAAA,CAAO,MAAM,CAAA,IAAA,EAAO,QAAQ,MAAM,CAAA;AAAA,CAAe,CAAA;AAAA,EACvE;AAEA,EAAA,KAAA,MAAW,UAAU,MAAA,EAAQ;AAC3B,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,CAAA,EAAI,KAAA,CAAM,IAAA,CAAK,MAAA,CAAO,EAAE,CAAC,CAAA,EAAA,EAAK,MAAA,CAAO,OAAO,CAAA,CAAE,CAAA;AAC1D,IAAA,IAAI,OAAA,EAAS;AACX,MAAA,OAAA,CAAQ,IAAI,CAAA,QAAA,EAAW,MAAA,CAAO,IAAI,CAAA,WAAA,EAAc,MAAA,CAAO,MAAM,CAAA,CAAE,CAAA;AAC/D,MAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,WAAA,EAAc,MAAA,CAAO,OAAO,CAAA,CAAE,CAAA;AAC1C,MAAA,IAAI,OAAO,OAAA,EAAS;AAClB,QAAA,OAAA,CAAQ,GAAA,CAAI,cAAc,MAAA,CAAO,OAAA,CAAQ,IAAI,CAAA,GAAA,EAAM,MAAA,CAAO,OAAA,CAAQ,MAAM,CAAA,CAAE,CAAA;AAAA,MAC5E;AAAA,IACF,CAAA,MAAO;AACL,MAAA,OAAA,CAAQ,IAAI,CAAA,QAAA,EAAW,MAAA,CAAO,IAAI,CAAA,WAAA,EAAc,MAAA,CAAO,MAAM,CAAA,CAAE,CAAA;AAAA,IACjE;AACA,IAAA,IAAI,MAAA,CAAO,IAAA,CAAK,MAAA,GAAS,CAAA,EAAG;AAC1B,MAAA,OAAA,CAAQ,IAAI,CAAA,QAAA,EAAW,MAAA,CAAO,KAAK,IAAA,CAAK,IAAI,CAAC,CAAA,CAAE,CAAA;AAAA,IACjD;AACA,IAAA,OAAA,CAAQ,GAAA,EAAI;AAAA,EACd;AAEA,EAAA,IAAI,eAAe,CAAA,EAAG;AACpB,IAAA,GAAA,CAAI,IAAA,CAAK,CAAA,EAAG,YAAY,CAAA,6BAAA,CAA+B,CAAA;AAAA,EACzD;AACF,CAAC,CAAA;AAEH,OAAA,CACG,OAAA,CAAQ,SAAS,CAAA,CACjB,WAAA,CAAY,iCAAiC,CAAA,CAC7C,MAAA,CAAO,aAAA,EAAe,iCAAiC,CAAA,CACvD,MAAA,CAAO,OAAO,OAAA,KAAiC;AAC9C,EAAA,MAAM,WAAW,WAAA,EAAY;AAC7B,EAAA,IAAI,QAAQ,KAAA,EAAO;AACjB,IAAA,OAAA,CAAQ,IAAI,0BAA0B,CAAA;AACtC,IAAA,MAAM,aAAa,QAAQ,CAAA;AAC3B,IAAA,OAAA,CAAQ,IAAI,gBAAgB,CAAA;AAAA,EAC9B,CAAA,MAAO;AACL,IAAA,MAAM,OAAA,GAAU,MAAM,YAAA,CAAa,QAAQ,CAAA;AAC3C,IAAA,IAAI,OAAA,EAAS;AACX,MAAA,OAAA,CAAQ,IAAI,gCAAgC,CAAA;AAAA,IAC9C,CAAA,MAAO;AACL,MAAA,OAAA,CAAQ,IAAI,sBAAsB,CAAA;AAAA,IACpC;AAAA,EACF;AACF,CAAC,CAAA;AAEH,OAAA,CACG,OAAA,CAAQ,QAAQ,CAAA,CAChB,WAAA,CAAY,qCAAqC,CAAA,CACjD,cAAA,CAAe,kBAAkB,yBAAyB,CAAA,CAC1D,OAAO,QAAA,EAAU,uCAAuC,EACxD,MAAA,CAAO,WAAA,EAAa,qCAAqC,CAAA,CACzD,MAAA,CAAO,QAAA,EAAU,uBAAuB,CAAA,CACxC,MAAA;AAAA,EACC,OAAO,OAAA,KAA8E;AACnF,IAAA,MAAM,WAAW,WAAA,EAAY;AAG7B,IAAA,IAAI,OAAA,CAAQ,IAAA,IAAQ,CAAC,OAAA,CAAQ,GAAA,EAAK;AAChC,MAAA,IAAI,QAAQ,IAAA,EAAM;AAChB,QAAA,OAAA,CAAQ,IAAI,IAAA,CAAK,SAAA,CAAU,EAAE,KAAA,EAAO,6CAAA,EAA+C,CAAC,CAAA;AAAA,MACtF,CAAA,MAAO;AACL,QAAA,GAAA,CAAI,MAAM,6CAA6C,CAAA;AACvD,QAAA,OAAA,CAAQ,IAAI,yCAAyC,CAAA;AAAA,MACvD;AACA,MAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,IAChB;AAEA,IAAA,MAAM,KAAA,GAAQ,MAAM,cAAA,CAAe,OAAA,CAAQ,KAAK,CAAA;AAChD,IAAA,MAAM,MAAA,GAAS,MAAM,gBAAA,CAAiB,QAAA,EAAU,KAAK,CAAA;AAErD,IAAA,IAAI,CAAC,MAAA,EAAQ;AACX,MAAA,IAAI,QAAQ,IAAA,EAAM;AAChB,QAAA,OAAA,CAAQ,IAAI,IAAA,CAAK,SAAA,CAAU,EAAE,QAAA,EAAU,KAAA,EAAO,CAAC,CAAA;AAAA,MACjD,CAAA,MAAO;AACL,QAAA,OAAA,CAAQ,IAAI,+BAA+B,CAAA;AAAA,MAC7C;AACA,MAAA;AAAA,IACF;AAEA,IAAA,IAAI,QAAQ,IAAA,EAAM;AAChB,MAAA,OAAA,CAAQ,GAAA,CAAI,KAAK,SAAA,CAAU,EAAE,UAAU,IAAA,EAAM,GAAG,MAAA,EAAQ,CAAC,CAAA;AACzD,MAAA;AAAA,IACF;AAEA,IAAA,OAAA,CAAQ,IAAI,4BAA4B,CAAA;AACxC,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,WAAA,EAAc,MAAA,CAAO,OAAO,CAAA,CAAE,CAAA;AAC1C,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,UAAA,EAAa,MAAA,CAAO,MAAM,CAAA,CAAE,CAAA;AACxC,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,YAAA,EAAe,MAAA,CAAO,eAAe,CAAA,CAAE,CAAA;AAEnD,IAAA,IAAI,OAAA,CAAQ,IAAA,IAAQ,OAAA,CAAQ,GAAA,EAAK;AAC/B,MAAA,MAAM,MAAA,GAAiB;AAAA,QACrB,EAAA,EAAI,UAAA,CAAW,MAAA,CAAO,eAAe,CAAA;AAAA,QACrC,IAAA,EAAM,OAAA;AAAA,QACN,SAAS,MAAA,CAAO,OAAA;AAAA,QAChB,SAAS,MAAA,CAAO,eAAA;AAAA,QAChB,MAAM,EAAC;AAAA,QACP,QAAQ,MAAA,CAAO,MAAA;AAAA,QACf,OAAA,EAAS,EAAE,IAAA,EAAM,QAAA,EAAU,QAAQ,cAAA,EAAe;AAAA,QAClD,OAAA,EAAA,iBAAS,IAAI,IAAA,EAAK,EAAE,WAAA,EAAY;AAAA,QAChC,SAAA,EAAW,IAAA;AAAA;AAAA,QACX,YAAY,EAAC;AAAA,QACb,SAAS;AAAC,OACZ;AAEA,MAAA,MAAM,YAAA,CAAa,UAAU,MAAM,CAAA;AACnC,MAAA,OAAA,CAAQ,GAAA,CAAI;AAAA,iBAAA,EAAsB,MAAA,CAAO,EAAE,CAAA,CAAE,CAAA;AAAA,IAC/C;AAAA,EACF;AACF,CAAA;AAYF,OAAA,CACG,OAAA,CAAQ,SAAS,CAAA,CACjB,WAAA,CAAY,qDAAqD,CAAA,CACjE,MAAA,CAAO,sBAAA,EAAwB,4BAA4B,CAAA,CAC3D,MAAA,CAAO,sBAAA,EAAwB,+BAA+B,CAAA,CAC9D,MAAA,CAAO,gBAAA,EAAkB,0DAA0D,CAAA,CACnF,MAAA,CAAO,QAAA,EAAU,uBAAuB,CAAA,CACxC,MAAA,CAAO,WAAA,EAAa,wCAAwC,CAAA,CAC5D,MAAA,CAAO,eAA+B,OAAA,EAAyB;AAC9D,EAAA,MAAM,WAAW,WAAA,EAAY;AAC7B,EAAA,MAAM,EAAE,OAAA,EAAQ,GAAI,aAAA,CAAc,IAAI,CAAA;AACtC,EAAA,IAAI,MAAA;AAGJ,EAAA,IAAI,QAAQ,KAAA,EAAO;AACjB,IAAA,MAAM,KAAA,GAAQ,MAAM,cAAA,CAAe,OAAA,CAAQ,KAAK,CAAA;AAChD,IAAA,MAAM,MAAA,GAAS,MAAM,gBAAA,CAAiB,QAAA,EAAU,KAAK,CAAA;AACrD,IAAA,IAAI,CAAC,MAAA,EAAQ;AACX,MAAA,OAAA,CAAQ,IAAA,GACJ,OAAA,CAAQ,GAAA,CAAI,IAAA,CAAK,UAAU,EAAE,QAAA,EAAU,KAAA,EAAO,KAAA,EAAO,OAAO,CAAC,CAAA,GAC7D,OAAA,CAAQ,IAAI,+BAA+B,CAAA;AAC/C,MAAA;AAAA,IACF;AACA,IAAA,MAAA,GAAS,yBAAA,CAA0B,MAAA,EAAQ,OAAA,CAAQ,GAAA,IAAO,KAAK,CAAA;AAAA,EACjE,CAAA,MAAA,IAAW,OAAA,CAAQ,OAAA,IAAW,OAAA,CAAQ,OAAA,EAAS;AAE7C,IAAA,MAAA,GAAS,sBAAsB,OAAA,CAAQ,OAAA,EAAS,QAAQ,OAAA,EAAS,OAAA,CAAQ,OAAO,KAAK,CAAA;AAAA,EACvF,CAAA,MAAO;AAEL,IAAA,MAAM,GAAA,GAAM,0DAAA;AACZ,IAAA,OAAA,CAAQ,IAAA,GAAO,OAAA,CAAQ,GAAA,CAAI,IAAA,CAAK,UAAU,EAAE,KAAA,EAAO,GAAA,EAAK,KAAA,EAAO,OAAO,CAAC,CAAA,GAAI,GAAA,CAAI,MAAM,GAAG,CAAA;AACxF,IAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,EAChB;AAGA,EAAA,IAAI,CAAC,OAAA,CAAQ,GAAA,IAAO,CAAC,OAAA,CAAQ,MAAM,KAAA,EAAO;AACxC,IAAA,IAAI,QAAQ,IAAA,EAAM;AAChB,MAAA,OAAA,CAAQ,GAAA,CAAI,KAAK,SAAA,CAAU,EAAE,OAAO,wCAAA,EAA0C,KAAA,EAAO,KAAA,EAAO,CAAC,CAAA;AAAA,IAC/F,CAAA,MAAO;AACL,MAAA,GAAA,CAAI,MAAM,wCAAwC,CAAA;AAClD,MAAA,OAAA,CAAQ,IAAI,oDAAoD,CAAA;AAAA,IAClE;AACA,IAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,EAChB;AAGA,EAAA,IAAI,QAAQ,IAAA,EAAM;AAChB,IAAA,IAAI,OAAA,CAAQ,GAAA,EAAK,MAAM,YAAA,CAAa,UAAU,MAAM,CAAA;AACpD,IAAA,iBAAA,CAAkB,MAAA,EAAQ,OAAA,CAAQ,GAAA,IAAO,KAAK,CAAA;AAAA,EAChD,CAAA,MAAA,IAAW,QAAQ,GAAA,EAAK;AACtB,IAAA,MAAM,YAAA,CAAa,UAAU,MAAM,CAAA;AACnC,IAAA,GAAA,CAAI,OAAA,CAAQ,CAAA,cAAA,EAAiB,MAAA,CAAO,EAAE,CAAA,CAAE,CAAA;AACxC,IAAA,IAAI,OAAA,UAAiB,GAAA,CAAI,CAAA,QAAA,EAAW,OAAO,IAAI,CAAA,YAAA,EAAe,MAAA,CAAO,OAAO,CAAA,CAAE,CAAA;AAAA,EAChF,CAAA,MAAO;AAEL,IAAA,oBAAA,CAAqB,MAAM,CAAA;AAAA,EAC7B;AACF,CAAC,CAAA;AAEH,OAAA,CACG,QAAQ,SAAS,CAAA,CACjB,WAAA,CAAY,4DAA4D,EACxE,MAAA,CAAO,aAAA,EAAe,wCAAwC,CAAA,CAC9D,OAAO,WAAA,EAAa,gDAAgD,CAAA,CACpE,MAAA,CAAO,OAAO,OAAA,KAAmD;AAChE,EAAA,MAAM,WAAW,WAAA,EAAY;AAE7B,EAAA,MAAM,UAAA,GAAa,MAAM,eAAA,CAAgB,QAAQ,CAAA;AACjD,EAAA,MAAM,KAAA,GAAQ,MAAM,eAAA,CAAgB,QAAQ,CAAA;AAE5C,EAAA,IAAI,QAAQ,MAAA,EAAQ;AAClB,IAAA,OAAA,CAAQ,IAAI,sCAAsC,CAAA;AAClD,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,kBAAA,EAAqB,UAAU,CAAA,CAAE,CAAA;AAC7C,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,mBAAA,EAAsB,KAAA,GAAQ,KAAA,GAAQ,IAAI,CAAA,CAAE,CAAA;AACxD,IAAA;AAAA,EACF;AAEA,EAAA,IAAI,CAAC,KAAA,IAAS,CAAC,OAAA,CAAQ,KAAA,EAAO;AAC5B,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,uBAAA,EAA0B,UAAU,CAAA,0BAAA,EAA6B,mBAAmB,CAAA,EAAA,CAAI,CAAA;AACpG,IAAA,OAAA,CAAQ,IAAI,gCAAgC,CAAA;AAC5C,IAAA;AAAA,EACF;AAEA,EAAA,OAAA,CAAQ,IAAI,uBAAuB,CAAA;AACnC,EAAA,MAAM,MAAA,GAAS,MAAM,OAAA,CAAQ,QAAQ,CAAA;AAErC,EAAA,OAAA,CAAQ,IAAI,wBAAwB,CAAA;AACpC,EAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,YAAA,EAAe,MAAA,CAAO,QAAQ,CAAA,UAAA,CAAY,CAAA;AACtD,EAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,sBAAA,EAAyB,MAAA,CAAO,iBAAiB,CAAA,CAAE,CAAA;AAC/D,EAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,qBAAA,EAAwB,MAAA,CAAO,gBAAgB,CAAA,CAAE,CAAA;AAG7D,EAAA,MAAM,aAAa,QAAQ,CAAA;AAC3B,EAAA,OAAA,CAAQ,IAAI,kBAAkB,CAAA;AAChC,CAAC,CAAA;AAEH,OAAA,CACG,QAAQ,QAAQ,CAAA,CAChB,WAAA,CAAY,kCAAkC,EAC9C,MAAA,CAAO,gBAAA,EAAkB,wDAAwD,CAAA,CACjF,OAAO,eAAA,EAAiB,4CAA4C,CAAA,CACpE,MAAA,CAAO,OAAO,OAAA,KAA+C;AAC5D,EAAA,MAAM,WAAW,WAAA,EAAY;AAE7B,EAAA,MAAM,EAAE,OAAA,EAAQ,GAAI,MAAM,YAAY,QAAQ,CAAA;AAE9C,EAAA,IAAI,QAAA,GAAW,OAAA;AAGf,EAAA,IAAI,QAAQ,KAAA,EAAO;AACjB,IAAA,MAAM,SAAA,GAAY,IAAI,IAAA,CAAK,OAAA,CAAQ,KAAK,CAAA;AACxC,IAAA,IAAI,MAAA,CAAO,KAAA,CAAM,SAAA,CAAU,OAAA,EAAS,CAAA,EAAG;AACrC,MAAA,OAAA,CAAQ,KAAA,CAAM,CAAA,qBAAA,EAAwB,OAAA,CAAQ,KAAK,CAAA,wCAAA,CAA0C,CAAA;AAC7F,MAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,IAChB;AACA,IAAA,QAAA,GAAW,QAAA,CAAS,OAAO,CAAC,MAAA,KAAW,IAAI,IAAA,CAAK,MAAA,CAAO,OAAO,CAAA,IAAK,SAAS,CAAA;AAAA,EAC9E;AAGA,EAAA,IAAI,QAAQ,IAAA,EAAM;AAChB,IAAA,MAAM,UAAA,GAAa,OAAA,CAAQ,IAAA,CAAK,KAAA,CAAM,GAAG,CAAA,CAAE,GAAA,CAAI,CAAC,CAAA,KAAM,CAAA,CAAE,IAAA,EAAM,CAAA;AAC9D,IAAA,QAAA,GAAW,QAAA,CAAS,MAAA,CAAO,CAAC,MAAA,KAAW,MAAA,CAAO,IAAA,CAAK,IAAA,CAAK,CAAC,GAAA,KAAQ,UAAA,CAAW,QAAA,CAAS,GAAG,CAAC,CAAC,CAAA;AAAA,EAC5F;AAGA,EAAA,OAAA,CAAQ,IAAI,IAAA,CAAK,SAAA,CAAU,QAAA,EAAU,IAAA,EAAM,kBAAkB,CAAC,CAAA;AAChE,CAAC,CAAA;AAEH,OAAA,CACG,OAAA,CAAQ,eAAe,CAAA,CACvB,WAAA,CAAY,kCAAkC,CAAA,CAC9C,MAAA,CAAO,OAAO,IAAA,KAAiB;AAC9B,EAAA,MAAM,WAAW,WAAA,EAAY;AAG7B,EAAA,IAAI,OAAA;AACJ,EAAA,IAAI;AACF,IAAA,MAAM,EAAE,QAAA,EAAAJ,SAAAA,EAAS,GAAI,MAAM,OAAO,aAAkB,CAAA;AACpD,IAAA,OAAA,GAAU,MAAMA,SAAAA,CAAS,IAAA,EAAM,OAAO,CAAA;AAAA,EACxC,SAAS,GAAA,EAAK;AACZ,IAAA,MAAM,OAAQ,GAAA,CAA8B,IAAA;AAC5C,IAAA,IAAI,SAAS,QAAA,EAAU;AACrB,MAAA,OAAA,CAAQ,KAAA,CAAM,CAAA,uBAAA,EAA0B,IAAI,CAAA,CAAE,CAAA;AAAA,IAChD,CAAA,MAAO;AACL,MAAA,OAAA,CAAQ,KAAA,CAAM,CAAA,oBAAA,EAAwB,GAAA,CAAc,OAAO,CAAA,CAAE,CAAA;AAAA,IAC/D;AACA,IAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,EAChB;AAGA,EAAA,MAAM,EAAE,OAAA,EAAS,eAAA,EAAgB,GAAI,MAAM,YAAY,QAAQ,CAAA;AAC/D,EAAA,MAAM,WAAA,GAAc,IAAI,GAAA,CAAI,eAAA,CAAgB,IAAI,CAAC,CAAA,KAAM,CAAA,CAAE,EAAE,CAAC,CAAA;AAG5D,EAAA,MAAM,KAAA,GAAQ,OAAA,CAAQ,KAAA,CAAM,IAAI,CAAA;AAChC,EAAA,IAAI,QAAA,GAAW,CAAA;AACf,EAAA,IAAI,OAAA,GAAU,CAAA;AACd,EAAA,IAAI,OAAA,GAAU,CAAA;AAEd,EAAA,KAAA,MAAW,QAAQ,KAAA,EAAO;AACxB,IAAA,MAAM,OAAA,GAAU,KAAK,IAAA,EAAK;AAC1B,IAAA,IAAI,CAAC,OAAA,EAAS;AAGd,IAAA,IAAI,MAAA;AACJ,IAAA,IAAI;AACF,MAAA,MAAA,GAAS,IAAA,CAAK,MAAM,OAAO,CAAA;AAAA,IAC7B,CAAA,CAAA,MAAQ;AACN,MAAA,OAAA,EAAA;AACA,MAAA;AAAA,IACF;AAGA,IAAA,MAAM,MAAA,GAAS,YAAA,CAAa,SAAA,CAAU,MAAM,CAAA;AAC5C,IAAA,IAAI,CAAC,OAAO,OAAA,EAAS;AACnB,MAAA,OAAA,EAAA;AACA,MAAA;AAAA,IACF;AAEA,IAAA,MAAM,SAAiB,MAAA,CAAO,IAAA;AAG9B,IAAA,IAAI,WAAA,CAAY,GAAA,CAAI,MAAA,CAAO,EAAE,CAAA,EAAG;AAC9B,MAAA,OAAA,EAAA;AACA,MAAA;AAAA,IACF;AAGA,IAAA,MAAM,YAAA,CAAa,UAAU,MAAM,CAAA;AACnC,IAAA,WAAA,CAAY,GAAA,CAAI,OAAO,EAAE,CAAA;AACzB,IAAA,QAAA,EAAA;AAAA,EACF;AAGA,EAAA,MAAM,UAAA,GAAa,QAAA,KAAa,CAAA,GAAI,QAAA,GAAW,SAAA;AAC/C,EAAA,MAAM,QAAkB,EAAC;AACzB,EAAA,IAAI,UAAU,CAAA,EAAG,KAAA,CAAM,IAAA,CAAK,CAAA,EAAG,OAAO,CAAA,QAAA,CAAU,CAAA;AAChD,EAAA,IAAI,UAAU,CAAA,EAAG,KAAA,CAAM,IAAA,CAAK,CAAA,EAAG,OAAO,CAAA,QAAA,CAAU,CAAA;AAEhD,EAAA,IAAI,KAAA,CAAM,SAAS,CAAA,EAAG;AACpB,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,SAAA,EAAY,QAAQ,CAAA,CAAA,EAAI,UAAU,KAAK,KAAA,CAAM,IAAA,CAAK,IAAI,CAAC,CAAA,CAAA,CAAG,CAAA;AAAA,EACxE,CAAA,MAAO;AACL,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,SAAA,EAAY,QAAQ,CAAA,CAAA,EAAI,UAAU,CAAA,CAAE,CAAA;AAAA,EAClD;AACF,CAAC,CAAA;AAEH,OAAA,CACG,QAAQ,OAAO,CAAA,CACf,YAAY,qCAAqC,CAAA,CACjD,OAAO,YAAY;AAClB,EAAA,MAAM,WAAW,WAAA,EAAY;AAG7B,EAAA,MAAM,aAAa,QAAQ,CAAA;AAG3B,EAAA,MAAM,EAAE,OAAA,EAAQ,GAAI,MAAM,YAAY,QAAQ,CAAA;AAC9C,EAAA,MAAM,YAAA,GAAe,MAAM,eAAA,CAAgB,QAAQ,CAAA;AACnD,EAAA,MAAM,eAAe,OAAA,CAAQ,MAAA;AAG7B,EAAA,MAAM,cAAA,GAAiB,kBAAkB,QAAQ,CAAA;AACjD,EAAA,MAAM,eAAA,GAAkB,eAAe,MAAA,CAAO,CAAC,KAAK,CAAA,KAAM,GAAA,GAAM,CAAA,CAAE,KAAA,EAAO,CAAC,CAAA;AAC1E,EAAA,MAAM,gBAAgB,YAAA,GAAe,CAAA,GAAA,CAAK,kBAAkB,YAAA,EAAc,OAAA,CAAQ,kBAAkB,CAAA,GAAI,KAAA;AAGxG,EAAA,MAAM,SAAA,GAAYJ,IAAAA,CAAK,QAAA,EAAU,YAAY,CAAA;AAC7C,EAAA,MAAM,MAAA,GAASA,IAAAA,CAAK,QAAA,EAAU,OAAO,CAAA;AAErC,EAAA,IAAI,QAAA,GAAW,CAAA;AACf,EAAA,IAAI,SAAA,GAAY,CAAA;AAEhB,EAAA,IAAI;AACF,IAAA,QAAA,GAAWY,QAAAA,CAAS,SAAS,CAAA,CAAE,IAAA;AAAA,EACjC,CAAA,CAAA,MAAQ;AAAA,EAER;AAEA,EAAA,IAAI;AACF,IAAA,SAAA,GAAYA,QAAAA,CAAS,MAAM,CAAA,CAAE,IAAA;AAAA,EAC/B,CAAA,CAAA,MAAQ;AAAA,EAER;AAEA,EAAA,MAAM,YAAY,QAAA,GAAW,SAAA;AAG7B,EAAA,MAAM,WAAA,GAAc,YAAA,GAAe,CAAA,GAAI,CAAA,EAAA,EAAK,YAAY,CAAA,SAAA,CAAA,GAAc,EAAA;AACtE,EAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,SAAA,EAAY,YAAY,CAAA,MAAA,EAAS,WAAW,CAAA,CAAE,CAAA;AAC1D,EAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,YAAA,EAAe,eAAe,CAAA,QAAA,EAAW,aAAa,CAAA,eAAA,CAAiB,CAAA;AACnF,EAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,SAAA,EAAY,WAAA,CAAY,SAAS,CAAC,CAAA,SAAA,EAAY,WAAA,CAAY,SAAS,CAAC,CAAA,QAAA,EAAW,WAAA,CAAY,QAAQ,CAAC,CAAA,CAAA,CAAG,CAAA;AACrH,CAAC,CAAA;AAUH,OAAA,CACG,OAAA,CAAQ,cAAc,CAAA,CACtB,WAAA,CAAY,gDAAgD,CAAA,CAC5D,MAAA,CAAO,QAAA,EAAU,gBAAgB,CAAA,CACjC,MAAA,CAAO,eAA+B,OAAA,EAA6B;AAClE,EAAA,MAAM,WAAW,WAAA,EAAY;AAC7B,EAAA,MAAM,EAAE,KAAA,EAAM,GAAI,aAAA,CAAc,IAAI,CAAA;AACpC,EAAA,MAAM,OAAA,GAAU,MAAM,kBAAA,CAAmB,QAAQ,CAAA;AAEjD,EAAA,IAAI,QAAQ,IAAA,EAAM;AAChB,IAAA,OAAA,CAAQ,GAAA,CAAI,KAAK,SAAA,CAAU,EAAE,SAAS,KAAA,EAAO,OAAA,CAAQ,MAAA,EAAQ,CAAC,CAAA;AAC9D,IAAA;AAAA,EACF;AAEA,EAAA,IAAI,OAAA,CAAQ,WAAW,CAAA,EAAG;AACxB,IAAA,OAAA,CAAQ,IAAI,iCAAiC,CAAA;AAC7C,IAAA;AAAA,EACF;AAEA,EAAA,yBAAA,CAA0B,SAAS,KAAK,CAAA;AAC1C,CAAC,CAAA;AAWH,OAAA,CACG,OAAA,CAAQ,YAAY,CAAA,CACpB,WAAA,CAAY,qCAAqC,CAAA,CACjD,MAAA,CAAO,iBAAiB,oBAAoB,CAAA,CAC5C,OAAO,QAAA,EAAU,gBAAgB,EACjC,MAAA,CAAO,sBAAA,EAAwB,mBAAmB,wBAAwB,CAAA,CAC1E,MAAA,CAAO,eAA+B,OAAA,EAA2D;AAChG,EAAA,MAAM,WAAW,WAAA,EAAY;AAC7B,EAAA,MAAM,KAAA,GAAQ,UAAA,CAAW,OAAA,CAAQ,KAAA,EAAO,OAAO,CAAA;AAC/C,EAAA,MAAM,EAAE,KAAA,EAAM,GAAI,aAAA,CAAc,IAAI,CAAA;AAGpC,EAAA,MAAM,QAAA,GAAW,OAAA,CAAQ,IAAA,IAAS,MAAM,iBAAA,EAAkB;AAE1D,EAAA,IAAI,CAAC,QAAA,EAAU;AACb,IAAA,GAAA,CAAI,MAAM,4DAA4D,CAAA;AACtE,IAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,EAChB;AAGA,EAAA,IAAI,CAAC,kBAAiB,EAAG;AACvB,IAAA,IAAI,QAAQ,IAAA,EAAM;AAChB,MAAA,OAAA,CAAQ,GAAA,CAAI,KAAK,SAAA,CAAU;AAAA,QACzB,KAAA,EAAO,+BAAA;AAAA,QACP,MAAA,EAAQ;AAAA,OACT,CAAC,CAAA;AAAA,IACJ,CAAA,MAAO;AACL,MAAA,GAAA,CAAI,MAAM,+BAA+B,CAAA;AACzC,MAAA,OAAA,CAAQ,IAAI,EAAE,CAAA;AACd,MAAA,OAAA,CAAQ,IAAI,wCAAwC,CAAA;AAAA,IACtD;AACA,IAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,EAChB;AAEA,EAAA,IAAI;AACF,IAAA,MAAM,MAAA,GAAS,MAAM,eAAA,CAAgB,QAAA,EAAU,UAAU,KAAK,CAAA;AAE9D,IAAA,IAAI,QAAQ,IAAA,EAAM;AAChB,MAAA,mBAAA,CAAoB,OAAO,OAAO,CAAA;AAClC,MAAA;AAAA,IACF;AAEA,IAAA,IAAI,MAAA,CAAO,OAAA,CAAQ,MAAA,KAAW,CAAA,EAAG;AAC/B,MAAA,OAAA,CAAQ,IAAI,0CAA0C,CAAA;AACtD,MAAA;AAAA,IACF;AAEA,IAAA,oBAAA,CAAqB,MAAA,CAAO,SAAS,KAAK,CAAA;AAAA,EAC5C,SAAS,GAAA,EAAK;AAEZ,IAAA,MAAM,OAAA,GAAU,GAAA,YAAe,KAAA,GAAQ,GAAA,CAAI,OAAA,GAAU,eAAA;AACrD,IAAA,IAAI,QAAQ,IAAA,EAAM;AAChB,MAAA,OAAA,CAAQ,IAAI,IAAA,CAAK,SAAA,CAAU,EAAE,KAAA,EAAO,OAAA,EAAS,CAAC,CAAA;AAAA,IAChD,CAAA,MAAO;AACL,MAAA,GAAA,CAAI,KAAA,CAAM,CAAA,sBAAA,EAAyB,OAAO,CAAA,CAAE,CAAA;AAAA,IAC9C;AACA,IAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,EAChB;AACF,CAAC,CAAA;AAEH,OAAA,CAAQ,KAAA,EAAM","file":"cli.js","sourcesContent":["/**\n * Lesson type definitions using Zod schemas\n */\n\nimport { createHash } from 'node:crypto';\nimport { z } from 'zod';\n\n// Source of lesson capture\nexport const SourceSchema = z.enum([\n 'user_correction',\n 'self_correction',\n 'test_failure',\n 'manual',\n]);\n\n// Context about when lesson was learned\nexport const ContextSchema = z.object({\n tool: z.string(),\n intent: z.string(),\n});\n\n// Code pattern (bad -> good)\nexport const PatternSchema = z.object({\n bad: z.string(),\n good: z.string(),\n});\n\n// Severity levels for lessons\nexport const SeveritySchema = z.enum(['high', 'medium', 'low']);\n\n// Lesson type - semantic marker for lesson quality tier\nexport const LessonTypeSchema = z.enum(['quick', 'full']);\n\n/**\n * Unified Lesson schema.\n *\n * The `type` field is a semantic marker:\n * - 'quick': Minimal lesson for fast capture\n * - 'full': Important lesson (typically has evidence/severity)\n *\n * All fields except core identity are optional for flexibility.\n * Semantic meaning is preserved through convention, not schema enforcement.\n */\nexport const LessonSchema = z.object({\n // Core identity (required)\n id: z.string(),\n type: LessonTypeSchema,\n trigger: z.string(),\n insight: z.string(),\n\n // Metadata (required)\n tags: z.array(z.string()),\n source: SourceSchema,\n context: ContextSchema,\n created: z.string(), // ISO8601\n confirmed: z.boolean(),\n\n // Relationships (required, can be empty arrays)\n supersedes: z.array(z.string()),\n related: z.array(z.string()),\n\n // Extended fields (optional - typically present for 'full' type)\n evidence: z.string().optional(),\n severity: SeveritySchema.optional(),\n pattern: PatternSchema.optional(),\n\n // Lifecycle fields (optional)\n deleted: z.boolean().optional(),\n retrievalCount: z.number().optional(),\n});\n\n// Tombstone for deletions (append-only delete marker)\nexport const TombstoneSchema = z.object({\n id: z.string(),\n deleted: z.literal(true),\n deletedAt: z.string(), // ISO8601\n});\n\n// Type exports\nexport type Lesson = z.infer<typeof LessonSchema>;\nexport type LessonType = z.infer<typeof LessonTypeSchema>;\nexport type Tombstone = z.infer<typeof TombstoneSchema>;\nexport type Source = z.infer<typeof SourceSchema>;\nexport type Severity = z.infer<typeof SeveritySchema>;\nexport type Context = z.infer<typeof ContextSchema>;\nexport type Pattern = z.infer<typeof PatternSchema>;\n\n/**\n * Generate deterministic lesson ID from insight text.\n * Format: L + 8 hex characters from SHA-256 hash\n */\nexport function generateId(insight: string): string {\n const hash = createHash('sha256').update(insight).digest('hex');\n return `L${hash.slice(0, 8)}`;\n}\n","/**\n * JSONL storage layer for lessons\n *\n * Append-only storage with last-write-wins deduplication.\n * Source of truth - git trackable.\n */\n\nimport { appendFile, mkdir, readFile } from 'node:fs/promises';\nimport { dirname, join } from 'node:path';\nimport { LessonSchema, type Lesson } from '../types.js';\n\n/** Relative path to lessons file from repo root */\nexport const LESSONS_PATH = '.claude/lessons/index.jsonl';\n\n/** Options for reading lessons */\nexport interface ReadLessonsOptions {\n /** If true, throw on first parse error. Default: false (skip errors) */\n strict?: boolean;\n /** Callback for each parse error in non-strict mode */\n onParseError?: (error: ParseError) => void;\n}\n\n/** Parse error details */\nexport interface ParseError {\n /** 1-based line number */\n line: number;\n /** Error message */\n message: string;\n /** Original error */\n cause: unknown;\n}\n\n/** Result of reading lessons */\nexport interface ReadLessonsResult {\n /** Successfully parsed lessons */\n lessons: Lesson[];\n /** Number of lines skipped due to errors */\n skippedCount: number;\n}\n\n/**\n * Append a lesson to the JSONL file.\n * Creates directory structure if missing.\n */\nexport async function appendLesson(repoRoot: string, lesson: Lesson): Promise<void> {\n const filePath = join(repoRoot, LESSONS_PATH);\n await mkdir(dirname(filePath), { recursive: true });\n\n const line = JSON.stringify(lesson) + '\\n';\n await appendFile(filePath, line, 'utf-8');\n}\n\n/**\n * Parse and validate a single JSON line.\n * @returns Parsed lesson or null if invalid\n */\nfunction parseJsonLine(\n line: string,\n lineNumber: number,\n strict: boolean,\n onParseError?: (error: ParseError) => void\n): Lesson | null {\n // Try to parse JSON\n let parsed: unknown;\n try {\n parsed = JSON.parse(line);\n } catch (err) {\n const parseError: ParseError = {\n line: lineNumber,\n message: `Invalid JSON: ${(err as Error).message}`,\n cause: err,\n };\n if (strict) {\n throw new Error(`Parse error on line ${lineNumber}: ${parseError.message}`);\n }\n onParseError?.(parseError);\n return null;\n }\n\n // Validate against schema\n const result = LessonSchema.safeParse(parsed);\n if (!result.success) {\n const parseError: ParseError = {\n line: lineNumber,\n message: `Schema validation failed: ${result.error.message}`,\n cause: result.error,\n };\n if (strict) {\n throw new Error(`Parse error on line ${lineNumber}: ${parseError.message}`);\n }\n onParseError?.(parseError);\n return null;\n }\n\n return result.data;\n}\n\n/**\n * Read all non-deleted lessons from the JSONL file.\n * Applies last-write-wins deduplication by ID.\n * Returns result object with lessons and skippedCount.\n *\n * @param repoRoot - Repository root directory\n * @param options - Optional settings for error handling\n * @returns Result with lessons array and count of skipped lines\n */\nexport async function readLessons(\n repoRoot: string,\n options: ReadLessonsOptions = {}\n): Promise<ReadLessonsResult> {\n const { strict = false, onParseError } = options;\n const filePath = join(repoRoot, LESSONS_PATH);\n\n let content: string;\n try {\n content = await readFile(filePath, 'utf-8');\n } catch (err) {\n if ((err as NodeJS.ErrnoException).code === 'ENOENT') {\n return { lessons: [], skippedCount: 0 };\n }\n throw err;\n }\n\n const lessons = new Map<string, Lesson>();\n let skippedCount = 0;\n\n const lines = content.split('\\n');\n for (let i = 0; i < lines.length; i++) {\n const trimmed = lines[i]!.trim();\n if (!trimmed) continue;\n\n const lesson = parseJsonLine(trimmed, i + 1, strict, onParseError);\n if (!lesson) {\n skippedCount++;\n continue;\n }\n\n if (lesson.deleted) {\n lessons.delete(lesson.id);\n } else {\n lessons.set(lesson.id, lesson);\n }\n }\n\n return { lessons: Array.from(lessons.values()), skippedCount };\n}\n","/**\n * SQLite storage layer with FTS5 for full-text search\n *\n * Rebuildable index - not the source of truth.\n * Stored in .claude/.cache (gitignored).\n */\n\nimport { createHash } from 'node:crypto';\nimport { mkdirSync, statSync } from 'node:fs';\nimport { dirname, join } from 'node:path';\nimport Database from 'better-sqlite3';\nimport type { Database as DatabaseType } from 'better-sqlite3';\n\nimport type { Lesson } from '../types.js';\n\nimport { LESSONS_PATH, readLessons } from './jsonl.js';\n\n/** Relative path to database file from repo root */\nexport const DB_PATH = '.claude/.cache/lessons.sqlite';\n\n/** SQL schema for lessons database */\nconst SCHEMA_SQL = `\n -- Main lessons table\n CREATE TABLE IF NOT EXISTS lessons (\n id TEXT PRIMARY KEY,\n type TEXT NOT NULL,\n trigger TEXT NOT NULL,\n insight TEXT NOT NULL,\n evidence TEXT,\n severity TEXT,\n tags TEXT NOT NULL DEFAULT '',\n source TEXT NOT NULL,\n context TEXT NOT NULL DEFAULT '{}',\n supersedes TEXT NOT NULL DEFAULT '[]',\n related TEXT NOT NULL DEFAULT '[]',\n created TEXT NOT NULL,\n confirmed INTEGER NOT NULL DEFAULT 0,\n deleted INTEGER NOT NULL DEFAULT 0,\n retrieval_count INTEGER NOT NULL DEFAULT 0,\n last_retrieved TEXT,\n embedding BLOB,\n content_hash TEXT\n );\n\n -- FTS5 virtual table for full-text search\n CREATE VIRTUAL TABLE IF NOT EXISTS lessons_fts USING fts5(\n id,\n trigger,\n insight,\n tags,\n content='lessons',\n content_rowid='rowid'\n );\n\n -- Trigger to sync FTS on INSERT\n CREATE TRIGGER IF NOT EXISTS lessons_ai AFTER INSERT ON lessons BEGIN\n INSERT INTO lessons_fts(rowid, id, trigger, insight, tags)\n VALUES (new.rowid, new.id, new.trigger, new.insight, new.tags);\n END;\n\n -- Trigger to sync FTS on DELETE\n CREATE TRIGGER IF NOT EXISTS lessons_ad AFTER DELETE ON lessons BEGIN\n INSERT INTO lessons_fts(lessons_fts, rowid, id, trigger, insight, tags)\n VALUES ('delete', old.rowid, old.id, old.trigger, old.insight, old.tags);\n END;\n\n -- Trigger to sync FTS on UPDATE\n CREATE TRIGGER IF NOT EXISTS lessons_au AFTER UPDATE ON lessons BEGIN\n INSERT INTO lessons_fts(lessons_fts, rowid, id, trigger, insight, tags)\n VALUES ('delete', old.rowid, old.id, old.trigger, old.insight, old.tags);\n INSERT INTO lessons_fts(rowid, id, trigger, insight, tags)\n VALUES (new.rowid, new.id, new.trigger, new.insight, new.tags);\n END;\n\n -- Index for common queries\n CREATE INDEX IF NOT EXISTS idx_lessons_created ON lessons(created);\n CREATE INDEX IF NOT EXISTS idx_lessons_confirmed ON lessons(confirmed);\n CREATE INDEX IF NOT EXISTS idx_lessons_severity ON lessons(severity);\n\n -- Metadata table for sync tracking\n CREATE TABLE IF NOT EXISTS metadata (\n key TEXT PRIMARY KEY,\n value TEXT NOT NULL\n );\n`;\n\n/**\n * Create database schema for lessons storage.\n */\nfunction createSchema(database: DatabaseType): void {\n database.exec(SCHEMA_SQL);\n}\n\nlet db: DatabaseType | null = null;\n\n/**\n * Compute deterministic content hash for embedding cache validation.\n * Format: SHA-256 hex of \"trigger insight\"\n */\nexport function contentHash(trigger: string, insight: string): string {\n return createHash('sha256').update(`${trigger} ${insight}`).digest('hex');\n}\n\n/**\n * Open or create the SQLite database.\n *\n * Creates directory structure and schema if needed.\n * Returns a singleton instance - subsequent calls return the same connection.\n *\n * **Resource lifecycle:**\n * - First call creates the database file (if needed) and opens a connection\n * - Connection uses WAL mode for better concurrent access\n * - Connection remains open until `closeDb()` is called\n *\n * **Note:** Most code should not call this directly. Higher-level functions\n * like `searchKeyword` and `rebuildIndex` call it internally.\n *\n * @param repoRoot - Path to repository root (database stored at `.claude/.cache/lessons.sqlite`)\n * @returns The singleton database connection\n *\n * @see {@link closeDb} for releasing resources\n */\nexport function openDb(repoRoot: string): DatabaseType {\n if (db) return db;\n\n const dbPath = join(repoRoot, DB_PATH);\n\n // Create directory synchronously (better-sqlite3 is sync)\n const dir = dirname(dbPath);\n mkdirSync(dir, { recursive: true });\n\n db = new Database(dbPath);\n\n // Enable WAL mode for better concurrent access\n db.pragma('journal_mode = WAL');\n\n createSchema(db);\n\n return db;\n}\n\n/**\n * Close the database connection and release resources.\n *\n * **Resource lifecycle:**\n * - The database is opened lazily on first call to `openDb()` or any function that uses it\n * (e.g., `searchKeyword`, `rebuildIndex`, `syncIfNeeded`, `getCachedEmbedding`)\n * - Once opened, the connection remains active until `closeDb()` is called\n * - After closing, subsequent database operations will reopen the connection\n *\n * **When to call:**\n * - At the end of CLI commands to ensure clean process exit\n * - When transitioning between repositories in long-running processes\n * - Before process exit in graceful shutdown handlers\n *\n * **Best practices for long-running processes:**\n * - In single-operation scripts: call before exit\n * - In daemon/server processes: call in shutdown handler\n * - Not necessary to call between operations in the same repository\n *\n * @example\n * ```typescript\n * // CLI command pattern\n * try {\n * await searchKeyword(repoRoot, 'typescript', 10);\n * // ... process results\n * } finally {\n * closeDb();\n * }\n *\n * // Graceful shutdown pattern\n * process.on('SIGTERM', () => {\n * closeDb();\n * process.exit(0);\n * });\n * ```\n */\nexport function closeDb(): void {\n if (db) {\n db.close();\n db = null;\n }\n}\n\n/**\n * Get cached embedding for a lesson if content hash matches.\n * Returns null if no cache exists or hash mismatches.\n */\nexport function getCachedEmbedding(\n repoRoot: string,\n lessonId: string,\n expectedHash?: string\n): number[] | null {\n const database = openDb(repoRoot);\n const row = database\n .prepare('SELECT embedding, content_hash FROM lessons WHERE id = ?')\n .get(lessonId) as { embedding: Buffer | null; content_hash: string | null } | undefined;\n\n if (!row || !row.embedding || !row.content_hash) {\n return null;\n }\n\n // If expected hash provided, validate it matches\n if (expectedHash && row.content_hash !== expectedHash) {\n return null;\n }\n\n // Convert Buffer to Float32Array then to number[]\n const float32 = new Float32Array(\n row.embedding.buffer,\n row.embedding.byteOffset,\n row.embedding.byteLength / 4\n );\n return Array.from(float32);\n}\n\n/**\n * Cache an embedding for a lesson with content hash.\n */\nexport function setCachedEmbedding(\n repoRoot: string,\n lessonId: string,\n embedding: Float32Array | number[],\n hash: string\n): void {\n const database = openDb(repoRoot);\n\n // Convert to Buffer for storage\n const float32 = embedding instanceof Float32Array ? embedding : new Float32Array(embedding);\n const buffer = Buffer.from(float32.buffer, float32.byteOffset, float32.byteLength);\n\n database\n .prepare('UPDATE lessons SET embedding = ?, content_hash = ? WHERE id = ?')\n .run(buffer, hash, lessonId);\n}\n\n/** DB row type for lessons table */\ninterface LessonRow {\n id: string;\n type: string;\n trigger: string;\n insight: string;\n evidence: string | null;\n severity: string | null;\n tags: string;\n source: string;\n context: string;\n supersedes: string;\n related: string;\n created: string;\n confirmed: number;\n deleted: number;\n retrieval_count: number;\n last_retrieved: string | null;\n embedding: Buffer | null;\n}\n\n/**\n * Convert a database row to a typed Lesson object.\n * Maps NULL to undefined for optional fields (lossless roundtrip).\n */\nfunction rowToLesson(row: LessonRow): Lesson {\n const lesson: Lesson = {\n id: row.id,\n type: row.type as 'quick' | 'full',\n trigger: row.trigger,\n insight: row.insight,\n tags: row.tags ? row.tags.split(',').filter(Boolean) : [],\n source: row.source as Lesson['source'],\n context: JSON.parse(row.context) as Lesson['context'],\n supersedes: JSON.parse(row.supersedes) as string[],\n related: JSON.parse(row.related) as string[],\n created: row.created,\n confirmed: row.confirmed === 1,\n };\n\n // Optional fields: map NULL -> undefined (lossless roundtrip)\n if (row.evidence !== null) {\n lesson.evidence = row.evidence;\n }\n if (row.severity !== null) {\n lesson.severity = row.severity as 'high' | 'medium' | 'low';\n }\n if (row.deleted === 1) {\n lesson.deleted = true;\n }\n if (row.retrieval_count > 0) {\n lesson.retrievalCount = row.retrieval_count;\n }\n\n return lesson;\n}\n\n/** Cached embedding with its content hash */\ninterface CachedEmbeddingData {\n embedding: Buffer;\n contentHash: string;\n}\n\n/**\n * Collect cached embeddings from existing lessons for preservation.\n */\nfunction collectCachedEmbeddings(database: DatabaseType): Map<string, CachedEmbeddingData> {\n const cache = new Map<string, CachedEmbeddingData>();\n const rows = database\n .prepare('SELECT id, embedding, content_hash FROM lessons WHERE embedding IS NOT NULL')\n .all() as Array<{ id: string; embedding: Buffer; content_hash: string | null }>;\n\n for (const row of rows) {\n if (row.embedding && row.content_hash) {\n cache.set(row.id, { embedding: row.embedding, contentHash: row.content_hash });\n }\n }\n return cache;\n}\n\n/** SQL for inserting a lesson row */\nconst INSERT_LESSON_SQL = `\n INSERT INTO lessons (id, type, trigger, insight, evidence, severity, tags, source, context, supersedes, related, created, confirmed, deleted, retrieval_count, last_retrieved, embedding, content_hash)\n VALUES (@id, @type, @trigger, @insight, @evidence, @severity, @tags, @source, @context, @supersedes, @related, @created, @confirmed, @deleted, @retrieval_count, @last_retrieved, @embedding, @content_hash)\n`;\n\n/**\n * Get the mtime of the JSONL file, or null if it doesn't exist.\n */\nfunction getJsonlMtime(repoRoot: string): number | null {\n const jsonlPath = join(repoRoot, LESSONS_PATH);\n try {\n const stat = statSync(jsonlPath);\n return stat.mtimeMs;\n } catch {\n return null;\n }\n}\n\n/**\n * Get the last synced mtime from metadata table.\n */\nfunction getLastSyncMtime(database: DatabaseType): number | null {\n const row = database\n .prepare('SELECT value FROM metadata WHERE key = ?')\n .get('last_sync_mtime') as { value: string } | undefined;\n return row ? parseFloat(row.value) : null;\n}\n\n/**\n * Store the last synced mtime in metadata table.\n */\nfunction setLastSyncMtime(database: DatabaseType, mtime: number): void {\n database\n .prepare('INSERT OR REPLACE INTO metadata (key, value) VALUES (?, ?)')\n .run('last_sync_mtime', mtime.toString());\n}\n\n/**\n * Rebuild the SQLite index from the JSONL source of truth.\n * Preserves embeddings where content hash is unchanged.\n * Updates the last sync mtime after successful rebuild.\n */\nexport async function rebuildIndex(repoRoot: string): Promise<void> {\n const database = openDb(repoRoot);\n const { lessons } = await readLessons(repoRoot);\n\n const cachedEmbeddings = collectCachedEmbeddings(database);\n database.exec('DELETE FROM lessons');\n\n if (lessons.length === 0) {\n // Still update mtime even for empty file\n const mtime = getJsonlMtime(repoRoot);\n if (mtime !== null) {\n setLastSyncMtime(database, mtime);\n }\n return;\n }\n\n const insert = database.prepare(INSERT_LESSON_SQL);\n const insertMany = database.transaction((items: Lesson[]) => {\n for (const lesson of items) {\n const newHash = contentHash(lesson.trigger, lesson.insight);\n const cached = cachedEmbeddings.get(lesson.id);\n const hasValidCache = cached && cached.contentHash === newHash;\n\n insert.run({\n id: lesson.id,\n type: lesson.type,\n trigger: lesson.trigger,\n insight: lesson.insight,\n evidence: lesson.evidence ?? null,\n severity: lesson.severity ?? null,\n tags: lesson.tags.join(','),\n source: lesson.source,\n context: JSON.stringify(lesson.context),\n supersedes: JSON.stringify(lesson.supersedes),\n related: JSON.stringify(lesson.related),\n created: lesson.created,\n confirmed: lesson.confirmed ? 1 : 0,\n deleted: lesson.deleted ? 1 : 0,\n retrieval_count: lesson.retrievalCount ?? 0,\n last_retrieved: null, // Reset on rebuild since we're rebuilding from source\n embedding: hasValidCache ? cached.embedding : null,\n content_hash: hasValidCache ? cached.contentHash : null,\n });\n }\n });\n\n insertMany(lessons);\n\n // Update last sync mtime\n const mtime = getJsonlMtime(repoRoot);\n if (mtime !== null) {\n setLastSyncMtime(database, mtime);\n }\n}\n\n/** Options for syncIfNeeded */\nexport interface SyncOptions {\n /** Force rebuild even if JSONL unchanged */\n force?: boolean;\n}\n\n/**\n * Sync the index if JSONL has changed since last sync.\n * Returns true if a rebuild was performed, false if skipped.\n */\nexport async function syncIfNeeded(\n repoRoot: string,\n options: SyncOptions = {}\n): Promise<boolean> {\n const { force = false } = options;\n\n // Check JSONL mtime\n const jsonlMtime = getJsonlMtime(repoRoot);\n if (jsonlMtime === null && !force) {\n // No JSONL file exists\n return false;\n }\n\n const database = openDb(repoRoot);\n const lastSyncMtime = getLastSyncMtime(database);\n\n // Rebuild if forced, no previous sync, or JSONL is newer\n const needsRebuild = force || lastSyncMtime === null || (jsonlMtime !== null && jsonlMtime > lastSyncMtime);\n\n if (needsRebuild) {\n await rebuildIndex(repoRoot);\n return true;\n }\n\n return false;\n}\n\n/**\n * Search lessons using FTS5 keyword search.\n * Returns matching lessons up to the specified limit.\n * Increments retrieval count for all returned lessons.\n */\nexport async function searchKeyword(\n repoRoot: string,\n query: string,\n limit: number\n): Promise<Lesson[]> {\n const database = openDb(repoRoot);\n\n // Check if there are any lessons\n const countResult = database.prepare('SELECT COUNT(*) as cnt FROM lessons').get() as {\n cnt: number;\n };\n if (countResult.cnt === 0) return [];\n\n // Use FTS5 MATCH for search\n const rows = database\n .prepare(\n `\n SELECT l.*\n FROM lessons l\n JOIN lessons_fts fts ON l.rowid = fts.rowid\n WHERE lessons_fts MATCH ?\n LIMIT ?\n `\n )\n .all(query, limit) as LessonRow[];\n\n // Increment retrieval count for matched lessons\n if (rows.length > 0) {\n incrementRetrievalCount(repoRoot, rows.map((r) => r.id));\n }\n\n return rows.map(rowToLesson);\n}\n\n/** Retrieval statistics for a lesson */\nexport interface RetrievalStat {\n id: string;\n count: number;\n lastRetrieved: string | null;\n}\n\n/**\n * Increment retrieval count for a list of lesson IDs.\n * Updates both count and last_retrieved timestamp.\n * Non-existent IDs are silently ignored.\n */\nexport function incrementRetrievalCount(repoRoot: string, lessonIds: string[]): void {\n if (lessonIds.length === 0) return;\n\n const database = openDb(repoRoot);\n const now = new Date().toISOString();\n\n const update = database.prepare(`\n UPDATE lessons\n SET retrieval_count = retrieval_count + 1,\n last_retrieved = ?\n WHERE id = ?\n `);\n\n const updateMany = database.transaction((ids: string[]) => {\n for (const id of ids) {\n update.run(now, id);\n }\n });\n\n updateMany(lessonIds);\n}\n\n/**\n * Get retrieval statistics for all lessons.\n * Returns id, retrieval count, and last retrieved timestamp for each lesson.\n */\nexport function getRetrievalStats(repoRoot: string): RetrievalStat[] {\n const database = openDb(repoRoot);\n\n const rows = database\n .prepare('SELECT id, retrieval_count, last_retrieved FROM lessons')\n .all() as Array<{ id: string; retrieval_count: number; last_retrieved: string | null }>;\n\n return rows.map((row) => ({\n id: row.id,\n count: row.retrieval_count,\n lastRetrieved: row.last_retrieved,\n }));\n}\n","/**\n * Quality filters for lesson capture\n *\n * Filters to ensure lessons are:\n * - Novel (not duplicate)\n * - Specific (not vague)\n * - Actionable (contains action words)\n */\n\nimport { searchKeyword, syncIfNeeded } from '../storage/sqlite.js';\n\n/** Default similarity threshold for duplicate detection */\nconst DEFAULT_SIMILARITY_THRESHOLD = 0.8;\n\n/** Result of novelty check */\nexport interface NoveltyResult {\n novel: boolean;\n reason?: string;\n existingId?: string;\n}\n\n/** Options for novelty check */\nexport interface NoveltyOptions {\n threshold?: number;\n}\n\n/**\n * Check if an insight is novel (not a duplicate of existing lessons).\n * Uses keyword search to find potentially similar lessons.\n */\nexport async function isNovel(\n repoRoot: string,\n insight: string,\n options: NoveltyOptions = {}\n): Promise<NoveltyResult> {\n const threshold = options.threshold ?? DEFAULT_SIMILARITY_THRESHOLD;\n\n // Sync index if JSONL has changed\n await syncIfNeeded(repoRoot);\n\n // Extract key words for search (take first 3 significant words)\n const words = insight\n .toLowerCase()\n .replace(/[^a-z0-9\\s]/g, '')\n .split(/\\s+/)\n .filter((w) => w.length > 3)\n .slice(0, 3);\n\n if (words.length === 0) {\n return { novel: true };\n }\n\n // Search for each word and collect results\n const searchQuery = words.join(' OR ');\n const results = await searchKeyword(repoRoot, searchQuery, 10);\n\n if (results.length === 0) {\n return { novel: true };\n }\n\n // Check similarity using simple word overlap (since we may not have embeddings)\n const insightWords = new Set(insight.toLowerCase().split(/\\s+/));\n\n for (const lesson of results) {\n const lessonWords = new Set(lesson.insight.toLowerCase().split(/\\s+/));\n\n // Calculate Jaccard similarity\n const intersection = [...insightWords].filter((w) => lessonWords.has(w)).length;\n const union = new Set([...insightWords, ...lessonWords]).size;\n const similarity = union > 0 ? intersection / union : 0;\n\n if (similarity >= threshold) {\n return {\n novel: false,\n reason: `Found similar existing lesson: \"${lesson.insight.slice(0, 50)}...\"`,\n existingId: lesson.id,\n };\n }\n\n // Also check exact match\n if (lesson.insight.toLowerCase() === insight.toLowerCase()) {\n return {\n novel: false,\n reason: `Exact duplicate found`,\n existingId: lesson.id,\n };\n }\n }\n\n return { novel: true };\n}\n\n/** Minimum word count for a specific insight */\nconst MIN_WORD_COUNT = 4;\n\n/** Vague patterns that indicate non-specific advice */\nconst VAGUE_PATTERNS = [\n /\\bwrite better\\b/i,\n /\\bbe careful\\b/i,\n /\\bremember to\\b/i,\n /\\bmake sure\\b/i,\n /\\btry to\\b/i,\n /\\bdouble check\\b/i,\n];\n\n/** Generic \"always/never\" phrases (short, lacking specificity) */\nconst GENERIC_IMPERATIVE_PATTERN = /^(always|never)\\s+\\w+(\\s+\\w+){0,2}$/i;\n\n/** Result of specificity check */\nexport interface SpecificityResult {\n specific: boolean;\n reason?: string;\n}\n\n/**\n * Check if an insight is specific enough to be useful.\n * Rejects vague, generic advice that doesn't provide actionable guidance.\n */\nexport function isSpecific(insight: string): SpecificityResult {\n // Check minimum length first\n const words = insight.trim().split(/\\s+/).filter((w) => w.length > 0);\n if (words.length < MIN_WORD_COUNT) {\n return { specific: false, reason: 'Insight is too short to be actionable' };\n }\n\n // Check for vague patterns\n for (const pattern of VAGUE_PATTERNS) {\n if (pattern.test(insight)) {\n return { specific: false, reason: 'Insight matches a vague pattern' };\n }\n }\n\n // Check for generic \"Always X\" or \"Never X\" phrases\n if (GENERIC_IMPERATIVE_PATTERN.test(insight)) {\n return { specific: false, reason: 'Insight matches a vague pattern' };\n }\n\n return { specific: true };\n}\n\n/** Action word patterns that indicate actionable guidance */\nconst ACTION_PATTERNS = [\n /\\buse\\s+.+\\s+instead\\s+of\\b/i, // \"use X instead of Y\"\n /\\bprefer\\s+.+\\s+(over|to)\\b/i, // \"prefer X over Y\" or \"prefer X to Y\"\n /\\balways\\s+.+\\s+when\\b/i, // \"always X when Y\"\n /\\bnever\\s+.+\\s+without\\b/i, // \"never X without Y\"\n /\\bavoid\\s+(using\\s+)?\\w+/i, // \"avoid X\" or \"avoid using X\"\n /\\bcheck\\s+.+\\s+before\\b/i, // \"check X before Y\"\n /^(run|use|add|remove|install|update|configure|set|enable|disable)\\s+/i, // Imperative commands at start\n];\n\n/** Result of actionability check */\nexport interface ActionabilityResult {\n actionable: boolean;\n reason?: string;\n}\n\n/**\n * Check if an insight contains actionable guidance.\n * Returns false for pure observations or questions.\n */\nexport function isActionable(insight: string): ActionabilityResult {\n // Check for action patterns\n for (const pattern of ACTION_PATTERNS) {\n if (pattern.test(insight)) {\n return { actionable: true };\n }\n }\n\n return { actionable: false, reason: 'Insight lacks clear action guidance' };\n}\n\n/** Result of combined quality check */\nexport interface ProposeResult {\n shouldPropose: boolean;\n reason?: string;\n}\n\n/**\n * Combined quality check for lesson proposals.\n * Returns true only if insight is novel, specific, AND actionable.\n */\nexport async function shouldPropose(\n repoRoot: string,\n insight: string\n): Promise<ProposeResult> {\n // Check specificity first (fast, no DB)\n const specificResult = isSpecific(insight);\n if (!specificResult.specific) {\n return { shouldPropose: false, reason: specificResult.reason };\n }\n\n // Check actionability (fast, no DB)\n const actionableResult = isActionable(insight);\n if (!actionableResult.actionable) {\n return { shouldPropose: false, reason: actionableResult.reason };\n }\n\n // Check novelty (requires DB lookup)\n const noveltyResult = await isNovel(repoRoot, insight);\n if (!noveltyResult.novel) {\n return { shouldPropose: false, reason: noveltyResult.reason };\n }\n\n return { shouldPropose: true };\n}\n","/**\n * Trigger detection for automatic lesson capture\n *\n * Detects patterns that indicate potential learning opportunities:\n * - User corrections\n * - Self-corrections\n * - Test failures\n */\n\nimport type { Context } from '../types.js';\n\n/** Signal data for correction detection */\nexport interface CorrectionSignal {\n messages: string[];\n context: Context;\n}\n\n/** Detected correction result */\nexport interface DetectedCorrection {\n trigger: string;\n correctionMessage: string;\n context: Context;\n}\n\n/** User correction patterns */\nconst USER_CORRECTION_PATTERNS = [\n /\\bno\\b[,.]?\\s/i, // \"no, ...\" or \"no ...\"\n /\\bwrong\\b/i, // \"wrong\"\n /\\bactually\\b/i, // \"actually...\"\n /\\bnot that\\b/i, // \"not that\"\n /\\bi meant\\b/i, // \"I meant\"\n];\n\n/**\n * Detect user correction signals in conversation.\n *\n * Looks for patterns that indicate the user is correcting Claude's\n * understanding or actions.\n *\n * @param signals - Messages and context to analyze\n * @returns Detected correction or null if none found\n */\nexport function detectUserCorrection(signals: CorrectionSignal): DetectedCorrection | null {\n const { messages, context } = signals;\n\n if (messages.length < 2) {\n return null;\n }\n\n // Check later messages for correction patterns\n for (let i = 1; i < messages.length; i++) {\n const message = messages[i];\n if (!message) continue;\n\n for (const pattern of USER_CORRECTION_PATTERNS) {\n if (pattern.test(message)) {\n return {\n trigger: `User correction during ${context.intent}`,\n correctionMessage: message,\n context,\n };\n }\n }\n }\n\n return null;\n}\n\n/** Edit history entry */\nexport interface EditEntry {\n file: string;\n success: boolean;\n timestamp: number;\n}\n\n/** Edit history for self-correction detection */\nexport interface EditHistory {\n edits: EditEntry[];\n}\n\n/** Detected self-correction */\nexport interface DetectedSelfCorrection {\n file: string;\n trigger: string;\n}\n\n/**\n * Detect self-correction patterns in edit history.\n *\n * Looks for edit→fail→re-edit patterns on the same file,\n * which indicate Claude had to correct its own work.\n *\n * @param history - Edit history to analyze\n * @returns Detected self-correction or null if none found\n */\nexport function detectSelfCorrection(history: EditHistory): DetectedSelfCorrection | null {\n const { edits } = history;\n\n if (edits.length < 3) {\n return null;\n }\n\n // Look for edit→fail→re-edit pattern on same file\n for (let i = 0; i <= edits.length - 3; i++) {\n const first = edits[i];\n const second = edits[i + 1];\n const third = edits[i + 2];\n\n if (!first || !second || !third) continue;\n\n // Pattern: success → fail → success on same file\n if (\n first.file === second.file &&\n second.file === third.file &&\n first.success &&\n !second.success &&\n third.success\n ) {\n return {\n file: first.file,\n trigger: `Self-correction on ${first.file}`,\n };\n }\n }\n\n return null;\n}\n\n/** Test result for failure detection */\nexport interface TestResult {\n passed: boolean;\n output: string;\n testFile: string;\n}\n\n/** Detected test failure */\nexport interface DetectedTestFailure {\n testFile: string;\n errorOutput: string;\n trigger: string;\n}\n\n/**\n * Detect test failure patterns.\n *\n * When tests fail, this creates a potential learning opportunity\n * if the failure is later fixed.\n *\n * @param testResult - Test result to analyze\n * @returns Detected test failure or null if tests passed\n */\nexport function detectTestFailure(testResult: TestResult): DetectedTestFailure | null {\n if (testResult.passed) {\n return null;\n }\n\n // Extract first meaningful error line for trigger\n const lines = testResult.output.split('\\n').filter((line) => line.trim().length > 0);\n const errorLine = lines.find((line) => /error|fail|assert/i.test(line)) ?? lines[0] ?? '';\n\n return {\n testFile: testResult.testFile,\n errorOutput: testResult.output,\n trigger: `Test failure in ${testResult.testFile}: ${errorLine.slice(0, 100)}`,\n };\n}\n","/**\n * Trigger detection integration\n *\n * Orchestrates detection -> quality filter -> lesson proposal flow.\n * Provides a high-level API for CLI and hooks.\n */\n\nimport * as fs from 'node:fs/promises';\n\nimport type { Source } from '../types.js';\nimport { shouldPropose } from './quality.js';\nimport {\n detectUserCorrection,\n detectSelfCorrection,\n detectTestFailure,\n} from './triggers.js';\nimport type {\n CorrectionSignal,\n EditHistory,\n TestResult,\n} from './triggers.js';\n\n/** Detection input types */\nexport type DetectionType = 'user' | 'self' | 'test';\n\n/** Input for user correction detection */\nexport interface UserDetectionInput {\n type: 'user';\n data: CorrectionSignal;\n}\n\n/** Input for self correction detection */\nexport interface SelfDetectionInput {\n type: 'self';\n data: EditHistory;\n}\n\n/** Input for test failure detection */\nexport interface TestDetectionInput {\n type: 'test';\n data: TestResult;\n}\n\n/** Union type for all detection inputs */\nexport type DetectionInput = UserDetectionInput | SelfDetectionInput | TestDetectionInput;\n\n/** Result of successful detection */\nexport interface DetectionResult {\n trigger: string;\n source: Source;\n proposedInsight: string;\n}\n\n/**\n * Detect triggers and propose lessons.\n *\n * Runs the appropriate detector based on input type, then filters\n * through quality checks. Returns a proposal if detection passes\n * all quality filters.\n *\n * @param repoRoot - Repository root path\n * @param input - Detection input with type and data\n * @returns Detection result with proposed insight, or null\n */\nexport async function detectAndPropose(\n repoRoot: string,\n input: DetectionInput\n): Promise<DetectionResult | null> {\n const detected = runDetector(input);\n if (!detected) {\n return null;\n }\n\n const { trigger, source, proposedInsight } = detected;\n\n // Run quality filters on proposed insight\n const quality = await shouldPropose(repoRoot, proposedInsight);\n if (!quality.shouldPropose) {\n return null;\n }\n\n return { trigger, source, proposedInsight };\n}\n\n/** Internal detection result before quality filtering */\ninterface RawDetection {\n trigger: string;\n source: Source;\n proposedInsight: string;\n}\n\n/**\n * Run the appropriate detector based on input type.\n */\nfunction runDetector(input: DetectionInput): RawDetection | null {\n switch (input.type) {\n case 'user':\n return detectUserCorrectionFlow(input.data);\n case 'self':\n return detectSelfCorrectionFlow(input.data);\n case 'test':\n return detectTestFailureFlow(input.data);\n }\n}\n\n/**\n * Detect user correction and extract insight.\n */\nfunction detectUserCorrectionFlow(data: CorrectionSignal): RawDetection | null {\n const result = detectUserCorrection(data);\n if (!result) {\n return null;\n }\n\n return {\n trigger: result.trigger,\n source: 'user_correction',\n proposedInsight: result.correctionMessage,\n };\n}\n\n/**\n * Detect self correction and extract insight.\n */\nfunction detectSelfCorrectionFlow(data: EditHistory): RawDetection | null {\n const result = detectSelfCorrection(data);\n if (!result) {\n return null;\n }\n\n return {\n trigger: result.trigger,\n source: 'self_correction',\n // Self-corrections need context to form useful insights\n proposedInsight: `Check ${result.file} for common errors before editing`,\n };\n}\n\n/**\n * Detect test failure and extract insight.\n */\nfunction detectTestFailureFlow(data: TestResult): RawDetection | null {\n const result = detectTestFailure(data);\n if (!result) {\n return null;\n }\n\n return {\n trigger: result.trigger,\n source: 'test_failure',\n proposedInsight: result.errorOutput,\n };\n}\n\n/** Valid detection types for validation */\nconst VALID_TYPES = new Set<string>(['user', 'self', 'test']);\n\n/**\n * Parse detection input from a JSON file.\n *\n * @param filePath - Path to JSON input file\n * @returns Parsed detection input\n * @throws Error if file is invalid or type is unknown\n */\nexport async function parseInputFile(filePath: string): Promise<DetectionInput> {\n const content = await fs.readFile(filePath, 'utf-8');\n const data = JSON.parse(content) as { type: string; data: unknown };\n\n if (!VALID_TYPES.has(data.type)) {\n throw new Error(`Invalid detection type: ${data.type}. Must be one of: user, self, test`);\n }\n\n return data as DetectionInput;\n}\n","/**\n * CLI utility functions.\n *\n * Pure functions extracted from cli.ts for testability.\n */\n\n/**\n * Format bytes to human-readable string.\n *\n * @param bytes - Number of bytes\n * @returns Formatted string (e.g., \"1.5 KB\", \"2.0 MB\")\n */\nexport function formatBytes(bytes: number): string {\n if (bytes === 0) return '0 B';\n if (bytes < 1024) return `${bytes} B`;\n const kb = bytes / 1024;\n if (kb < 1024) return `${kb.toFixed(1)} KB`;\n const mb = kb / 1024;\n return `${mb.toFixed(1)} MB`;\n}\n\n/**\n * Parse limit option and validate it's a positive integer.\n *\n * @param value - String value from command option\n * @param name - Option name for error message\n * @returns Parsed integer\n * @throws Error if value is not a valid positive integer\n */\nexport function parseLimit(value: string, name: string): number {\n const parsed = parseInt(value, 10);\n if (Number.isNaN(parsed) || parsed <= 0) {\n throw new Error(`Invalid ${name}: must be a positive integer`);\n }\n return parsed;\n}\n\n/**\n * Get repository root from environment variable or current directory.\n *\n * @returns Repository root path for lesson storage\n */\nexport function getRepoRoot(): string {\n return process.env['LEARNING_AGENT_ROOT'] ?? process.cwd();\n}\n","/**\n * Embedding model resolution using node-llama-cpp's built-in resolver.\n *\n * Uses resolveModelFile for automatic download and caching.\n * Model is stored in ~/.node-llama-cpp/models/ by default.\n */\n\nimport { existsSync } from 'node:fs';\nimport { homedir } from 'node:os';\nimport { join } from 'node:path';\nimport { resolveModelFile } from 'node-llama-cpp';\n\n/**\n * HuggingFace URI for EmbeddingGemma-300M (Q4_0 quantization).\n *\n * - Size: ~278MB\n * - Dimensions: 768 (default), supports MRL truncation to 512/256/128\n * - Context: 2048 tokens\n */\nexport const MODEL_URI = 'hf:ggml-org/embeddinggemma-300M-qat-q4_0-GGUF/embeddinggemma-300M-qat-Q4_0.gguf';\n\n/**\n * Expected model filename after download.\n * node-llama-cpp uses format: hf_{org}_{filename}\n */\nexport const MODEL_FILENAME = 'hf_ggml-org_embeddinggemma-300M-qat-Q4_0.gguf';\n\n/** Default model directory used by node-llama-cpp */\nconst DEFAULT_MODEL_DIR = join(homedir(), '.node-llama-cpp', 'models');\n\n/**\n * Check if the embedding model is available locally.\n *\n * @returns true if model file exists\n */\nexport function isModelAvailable(): boolean {\n return existsSync(join(DEFAULT_MODEL_DIR, MODEL_FILENAME));\n}\n\n/**\n * Resolve the embedding model path, downloading if necessary.\n *\n * Uses node-llama-cpp's resolveModelFile for automatic download with progress.\n *\n * @param options - Optional configuration\n * @param options.cli - Show download progress in console (default: true)\n * @returns Path to the resolved model file\n *\n * @example\n * ```typescript\n * const modelPath = await resolveModel();\n * const llama = await getLlama();\n * const model = await llama.loadModel({ modelPath });\n * ```\n */\nexport async function resolveModel(options: { cli?: boolean } = {}): Promise<string> {\n const { cli = true } = options;\n return resolveModelFile(MODEL_URI, { cli });\n}\n","/**\n * Text embedding via node-llama-cpp with EmbeddingGemma model\n *\n * **Resource lifecycle:**\n * - Model is loaded lazily on first embedding call (~150MB in memory)\n * - Once loaded, the model remains in memory until `unloadEmbedding()` is called\n * - Loading is slow (~1-3s); keeping loaded improves subsequent call performance\n *\n * **Memory usage:**\n * - Embedding model: ~150MB RAM when loaded\n * - Embeddings themselves: ~3KB per embedding (768 dimensions x 4 bytes)\n *\n * @see {@link unloadEmbedding} for releasing memory\n * @see {@link getEmbedding} for the lazy-loading mechanism\n */\n\nimport { getLlama, LlamaEmbeddingContext } from 'node-llama-cpp';\n\nimport { isModelAvailable, resolveModel } from './model.js';\n\n/** Singleton embedding context */\nlet embeddingContext: LlamaEmbeddingContext | null = null;\n\n/**\n * Get the LlamaEmbeddingContext instance for generating embeddings.\n *\n * **Lazy loading behavior:**\n * - First call loads the embedding model (~150MB) into memory\n * - Loading takes ~1-3 seconds depending on hardware\n * - Subsequent calls return the cached instance immediately\n * - Downloads model automatically if not present\n *\n * **Resource lifecycle:**\n * - Once loaded, model stays in memory until `unloadEmbedding()` is called\n * - For CLI commands: typically load once, use, then unload on exit\n * - For long-running processes: keep loaded for performance\n *\n * @returns The singleton embedding context\n * @throws Error if model download fails\n *\n * @example\n * ```typescript\n * // Direct usage (prefer embedText for simple cases)\n * const ctx = await getEmbedding();\n * const result = await ctx.getEmbeddingFor('some text');\n *\n * // Ensure cleanup\n * process.on('exit', () => unloadEmbedding());\n * ```\n *\n * @see {@link embedText} for simpler text-to-vector conversion\n * @see {@link unloadEmbedding} for releasing memory\n */\nexport async function getEmbedding(): Promise<LlamaEmbeddingContext> {\n if (embeddingContext) return embeddingContext;\n\n // Resolve model path (downloads if needed)\n const modelPath = await resolveModel({ cli: true });\n\n // Load llama and model\n const llama = await getLlama();\n const model = await llama.loadModel({ modelPath });\n embeddingContext = await model.createEmbeddingContext();\n\n return embeddingContext;\n}\n\n/**\n * Unload the embedding context to free memory (~150MB).\n *\n * **Resource lifecycle:**\n * - Disposes the underlying LlamaEmbeddingContext\n * - Releases ~150MB of RAM used by the model\n * - After unloading, subsequent embedding calls will reload the model\n *\n * **When to call:**\n * - At the end of CLI commands to ensure clean process exit\n * - In memory-constrained environments after batch processing\n * - Before process exit in graceful shutdown handlers\n * - When switching to a different model (if supported in future)\n *\n * **Best practices:**\n * - For single-operation scripts: call before exit\n * - For daemon/server processes: call in shutdown handler\n * - Not needed between embedding calls in the same process\n *\n * @example\n * ```typescript\n * // CLI command pattern\n * try {\n * const embedding = await embedText('some text');\n * // ... use embedding\n * } finally {\n * unloadEmbedding();\n * closeDb();\n * }\n *\n * // Graceful shutdown pattern\n * process.on('SIGTERM', () => {\n * unloadEmbedding();\n * closeDb();\n * process.exit(0);\n * });\n * ```\n *\n * @see {@link getEmbedding} for loading the model\n * @see {@link closeDb} for database cleanup (often used together)\n */\nexport function unloadEmbedding(): void {\n if (embeddingContext) {\n embeddingContext.dispose();\n embeddingContext = null;\n }\n}\n\n/**\n * Embed a single text string into a vector.\n *\n * **Lazy loading:** First call loads the embedding model (~150MB, ~1-3s).\n * Subsequent calls use the cached model and complete in milliseconds.\n *\n * @param text - The text to embed\n * @returns A 768-dimensional vector (number[])\n * @throws Error if model download fails\n *\n * @example\n * ```typescript\n * const vector = await embedText('TypeScript error handling');\n * console.log(vector.length); // 768\n *\n * // Remember to clean up when done\n * unloadEmbedding();\n * ```\n *\n * @see {@link embedTexts} for batch embedding\n * @see {@link unloadEmbedding} for releasing memory\n */\nexport async function embedText(text: string): Promise<number[]> {\n const ctx = await getEmbedding();\n const result = await ctx.getEmbeddingFor(text);\n return Array.from(result.vector);\n}\n\n/**\n * Embed multiple texts into vectors.\n *\n * **Lazy loading:** First call loads the embedding model (~150MB, ~1-3s).\n * Subsequent calls use the cached model.\n *\n * **Performance:** More efficient than calling `embedText` in a loop\n * when processing multiple texts, as model loading happens only once.\n *\n * @param texts - Array of texts to embed\n * @returns Array of 768-dimensional vectors, same order as input\n * @throws Error if model download fails\n *\n * @example\n * ```typescript\n * const texts = ['first text', 'second text'];\n * const vectors = await embedTexts(texts);\n * console.log(vectors.length); // 2\n * console.log(vectors[0].length); // 768\n *\n * // Remember to clean up when done\n * unloadEmbedding();\n * ```\n *\n * @see {@link embedText} for single text embedding\n * @see {@link unloadEmbedding} for releasing memory\n */\nexport async function embedTexts(texts: string[]): Promise<number[][]> {\n if (texts.length === 0) return [];\n\n const ctx = await getEmbedding();\n const results: number[][] = [];\n\n for (const text of texts) {\n const result = await ctx.getEmbeddingFor(text);\n results.push(Array.from(result.vector));\n }\n\n return results;\n}\n\n// Re-export isModelAvailable for test utilities\nexport { isModelAvailable };\n","/**\n * Vector search with cosine similarity\n *\n * Embeds query text and ranks lessons by semantic similarity.\n * Uses SQLite cache to avoid recomputing embeddings.\n */\n\nimport { embedText } from '../embeddings/nomic.js';\nimport { contentHash, getCachedEmbedding, setCachedEmbedding } from '../storage/sqlite.js';\nimport { readLessons } from '../storage/jsonl.js';\nimport type { Lesson } from '../types.js';\n\n/**\n * Calculate cosine similarity between two vectors.\n * Returns value between -1 (opposite) and 1 (identical).\n */\nexport function cosineSimilarity(a: number[], b: number[]): number {\n if (a.length !== b.length) {\n throw new Error('Vectors must have same length');\n }\n\n let dotProduct = 0;\n let normA = 0;\n let normB = 0;\n\n for (let i = 0; i < a.length; i++) {\n dotProduct += a[i]! * b[i]!;\n normA += a[i]! * a[i]!;\n normB += b[i]! * b[i]!;\n }\n\n const magnitude = Math.sqrt(normA) * Math.sqrt(normB);\n if (magnitude === 0) return 0;\n\n return dotProduct / magnitude;\n}\n\n/** Lesson with similarity score */\nexport interface ScoredLesson {\n lesson: Lesson;\n score: number;\n}\n\n/** Options for vector search */\nexport interface SearchVectorOptions {\n /** Maximum number of results to return (default: 10) */\n limit?: number;\n}\n\n/** Default number of results to return */\nconst DEFAULT_LIMIT = 10;\n\n/**\n * Search lessons by vector similarity to query text.\n * Returns top N lessons sorted by similarity score (descending).\n * Uses embedding cache to avoid recomputing embeddings.\n */\nexport async function searchVector(\n repoRoot: string,\n query: string,\n options?: SearchVectorOptions\n): Promise<ScoredLesson[]> {\n const limit = options?.limit ?? DEFAULT_LIMIT;\n // Read all lessons\n const { lessons } = await readLessons(repoRoot);\n if (lessons.length === 0) return [];\n\n // Embed the query\n const queryVector = await embedText(query);\n\n // Score each lesson\n const scored: ScoredLesson[] = [];\n for (const lesson of lessons) {\n const lessonText = `${lesson.trigger} ${lesson.insight}`;\n const hash = contentHash(lesson.trigger, lesson.insight);\n\n // Try cache first\n let lessonVector = getCachedEmbedding(repoRoot, lesson.id, hash);\n\n if (!lessonVector) {\n // Cache miss - compute and store\n lessonVector = await embedText(lessonText);\n setCachedEmbedding(repoRoot, lesson.id, lessonVector, hash);\n }\n\n const score = cosineSimilarity(queryVector, lessonVector);\n scored.push({ lesson, score });\n }\n\n // Sort by score descending and take top N\n scored.sort((a, b) => b.score - a.score);\n return scored.slice(0, limit);\n}\n","/**\n * Multi-factor lesson ranking system\n *\n * Combines vector similarity with semantic boosts:\n * - Severity: high=1.5, medium=1.0, low=0.8\n * - Recency: 1.2 for lessons ≤30 days old\n * - Confirmation: 1.3 for confirmed lessons\n */\n\nimport type { Lesson } from '../types.js';\n\nimport type { ScoredLesson } from './vector.js';\n\n/** Lesson with final ranked score */\nexport interface RankedLesson extends ScoredLesson {\n finalScore?: number;\n}\n\nconst RECENCY_THRESHOLD_DAYS = 30;\nconst HIGH_SEVERITY_BOOST = 1.5;\nconst MEDIUM_SEVERITY_BOOST = 1.0;\nconst LOW_SEVERITY_BOOST = 0.8;\nconst RECENCY_BOOST = 1.2;\nconst CONFIRMATION_BOOST = 1.3;\n\n/**\n * Calculate severity boost based on lesson severity.\n * Lessons without severity get 1.0 (medium boost).\n */\nexport function severityBoost(lesson: Lesson): number {\n switch (lesson.severity) {\n case 'high':\n return HIGH_SEVERITY_BOOST;\n case 'medium':\n return MEDIUM_SEVERITY_BOOST;\n case 'low':\n return LOW_SEVERITY_BOOST;\n default:\n return MEDIUM_SEVERITY_BOOST;\n }\n}\n\n/**\n * Calculate recency boost based on lesson age.\n * Lessons ≤30 days old get 1.2, older get 1.0.\n */\nexport function recencyBoost(lesson: Lesson): number {\n const created = new Date(lesson.created);\n const now = new Date();\n const ageMs = now.getTime() - created.getTime();\n const ageDays = Math.floor(ageMs / (1000 * 60 * 60 * 24));\n\n return ageDays <= RECENCY_THRESHOLD_DAYS ? RECENCY_BOOST : 1.0;\n}\n\n/**\n * Calculate confirmation boost.\n * Confirmed lessons get 1.3, unconfirmed get 1.0.\n */\nexport function confirmationBoost(lesson: Lesson): number {\n return lesson.confirmed ? CONFIRMATION_BOOST : 1.0;\n}\n\n/**\n * Calculate combined score for a lesson.\n * score = vectorSimilarity * severity * recency * confirmation\n */\nexport function calculateScore(lesson: Lesson, vectorSimilarity: number): number {\n return (\n vectorSimilarity * severityBoost(lesson) * recencyBoost(lesson) * confirmationBoost(lesson)\n );\n}\n\n/**\n * Rank lessons by combined score.\n * Returns new array sorted by finalScore descending.\n */\nexport function rankLessons(lessons: ScoredLesson[]): RankedLesson[] {\n return lessons\n .map((scored) => ({\n ...scored,\n finalScore: calculateScore(scored.lesson, scored.score),\n }))\n .sort((a, b) => (b.finalScore ?? 0) - (a.finalScore ?? 0));\n}\n","/**\n * Session-start lesson retrieval\n *\n * Loads high-severity lessons at the start of a session.\n * No vector search - just filter by severity and recency.\n */\n\nimport { readLessons } from '../storage/jsonl.js';\nimport type { Lesson, Severity } from '../types.js';\n\n/** Default number of lessons to load at session start */\nconst DEFAULT_LIMIT = 5;\n\n/** A full lesson with severity field present */\ntype FullLesson = Lesson & { type: 'full'; severity: Severity };\n\n/**\n * Type guard to check if a lesson is a full lesson with severity\n */\nfunction isFullLesson(lesson: Lesson): lesson is FullLesson {\n return lesson.type === 'full' && lesson.severity !== undefined;\n}\n\n/**\n * Load high-severity lessons for session start.\n *\n * Returns confirmed, high-severity lessons sorted by recency.\n * These are the most important lessons to surface at the start\n * of a coding session.\n *\n * @param repoRoot - Repository root directory\n * @param limit - Maximum number of lessons to return (default: 5)\n * @returns Array of high-severity lessons, most recent first\n */\nexport async function loadSessionLessons(\n repoRoot: string,\n limit: number = DEFAULT_LIMIT\n): Promise<FullLesson[]> {\n const { lessons: allLessons } = await readLessons(repoRoot);\n\n // Filter for high-severity, confirmed, full lessons\n const highSeverityLessons = allLessons.filter(\n (lesson): lesson is FullLesson =>\n isFullLesson(lesson) && lesson.severity === 'high' && lesson.confirmed\n );\n\n // Sort by recency (most recent first)\n highSeverityLessons.sort((a, b) => {\n const dateA = new Date(a.created).getTime();\n const dateB = new Date(b.created).getTime();\n return dateB - dateA;\n });\n\n // Return top N\n return highSeverityLessons.slice(0, limit);\n}\n","/**\n * Plan-time lesson retrieval\n *\n * Retrieves relevant lessons when planning an implementation.\n * Uses vector search to find semantically similar lessons.\n */\n\nimport { searchVector, type ScoredLesson } from '../search/vector.js';\nimport { rankLessons, type RankedLesson } from '../search/ranking.js';\n\n/** Default number of lessons to retrieve */\nconst DEFAULT_LIMIT = 5;\n\n/** Result of plan-time retrieval */\nexport interface PlanRetrievalResult {\n lessons: RankedLesson[];\n message: string;\n}\n\n/**\n * Retrieve relevant lessons for a plan.\n *\n * Uses vector search to find semantically similar lessons,\n * then applies ranking boosts for severity, recency, and confirmation.\n *\n * Hard-fails if embeddings are unavailable (propagates error from embedText).\n *\n * @param repoRoot - Repository root directory\n * @param planText - The plan text to search against\n * @param limit - Maximum number of lessons to return (default: 5)\n * @returns Ranked lessons and formatted message\n */\nexport async function retrieveForPlan(\n repoRoot: string,\n planText: string,\n limit: number = DEFAULT_LIMIT\n): Promise<PlanRetrievalResult> {\n // Get lessons by vector similarity (will throw if embeddings unavailable)\n const scored = await searchVector(repoRoot, planText, { limit: limit * 2 });\n\n // Apply ranking boosts\n const ranked = rankLessons(scored);\n\n // Take top N after ranking\n const topLessons = ranked.slice(0, limit);\n\n // Format the Lessons Check message\n const message = formatLessonsCheck(topLessons);\n\n return { lessons: topLessons, message };\n}\n\n/**\n * Format a \"Lessons Check\" message for display.\n *\n * This message is intended to be shown at plan-time to remind\n * the developer of relevant lessons before implementation.\n *\n * @param lessons - Ranked lessons to include in the message\n * @returns Formatted message string\n */\nexport function formatLessonsCheck(lessons: ScoredLesson[]): string {\n const header = '📚 Lessons Check\\n' + '─'.repeat(40);\n\n if (lessons.length === 0) {\n return `${header}\\nNo relevant lessons found for this plan.`;\n }\n\n const lessonLines = lessons.map((l, i) => {\n const bullet = `${i + 1}.`;\n const insight = l.lesson.insight;\n return `${bullet} ${insight}`;\n });\n\n return `${header}\\n${lessonLines.join('\\n')}`;\n}\n","/**\n * Learning Agent - Repository-scoped learning system for Claude Code\n *\n * This package helps Claude Code learn from mistakes and avoid repeating them.\n * It captures lessons during coding sessions and retrieves relevant lessons\n * when planning new work.\n *\n * ## Quick Start\n *\n * ```typescript\n * import { appendLesson, retrieveForPlan, loadSessionLessons } from 'learning-agent';\n *\n * // At session start, load high-severity lessons\n * const criticalLessons = await loadSessionLessons(repoRoot);\n *\n * // When planning, retrieve relevant lessons\n * const { lessons, message } = await retrieveForPlan(repoRoot, planText);\n *\n * // When capturing a lesson\n * await appendLesson(repoRoot, lesson);\n * ```\n *\n * ## Hook Integration\n *\n * Add to your `.claude/settings.json`:\n *\n * ```json\n * {\n * \"hooks\": {\n * \"session_start\": \"npx learning-agent load-session\",\n * \"pre_tool\": \"npx learning-agent check-plan\"\n * }\n * }\n * ```\n *\n * ## Resource Management\n *\n * This library manages two heavyweight resources that require cleanup:\n *\n * ### SQLite Database\n * - **Acquired:** Lazily on first database operation (search, rebuild, etc.)\n * - **Memory:** Minimal (~few KB for connection, index cached by OS)\n * - **Cleanup:** Call `closeDb()` before process exit\n *\n * ### Embedding Model\n * - **Acquired:** Lazily on first embedding call (embedText, embedTexts, searchVector)\n * - **Memory:** ~150MB RAM for the EmbeddingGemma model\n * - **Cleanup:** Call `unloadEmbedding()` before process exit\n *\n * ### Recommended Cleanup Pattern\n *\n * ```typescript\n * import { closeDb, unloadEmbedding } from 'learning-agent';\n *\n * // For CLI commands - use try/finally\n * async function main() {\n * try {\n * // ... your code that uses learning-agent\n * } finally {\n * unloadEmbedding();\n * closeDb();\n * }\n * }\n *\n * // For long-running processes - use shutdown handlers\n * process.on('SIGTERM', () => {\n * unloadEmbedding();\n * closeDb();\n * process.exit(0);\n * });\n * process.on('SIGINT', () => {\n * unloadEmbedding();\n * closeDb();\n * process.exit(0);\n * });\n * ```\n *\n * **Note:** Failing to clean up will not corrupt data, but may cause:\n * - Memory leaks in long-running processes\n * - Unclean process exits (warnings in some environments)\n *\n * @see {@link closeDb} for database cleanup\n * @see {@link unloadEmbedding} for embedding model cleanup\n * @module learning-agent\n */\n\nexport const VERSION = '0.1.0';\n\n// Storage API\nexport { appendLesson, readLessons, LESSONS_PATH } from './storage/jsonl.js';\nexport type { ReadLessonsOptions, ReadLessonsResult, ParseError } from './storage/jsonl.js';\nexport { rebuildIndex, searchKeyword, closeDb, DB_PATH } from './storage/sqlite.js';\n\n// Embeddings API\nexport { embedText, embedTexts, getEmbedding, isModelAvailable, unloadEmbedding } from './embeddings/nomic.js';\nexport { MODEL_FILENAME, MODEL_URI, resolveModel } from './embeddings/model.js';\n\n// Search API\nexport { searchVector, cosineSimilarity } from './search/vector.js';\nexport type { ScoredLesson, SearchVectorOptions } from './search/vector.js';\nexport { rankLessons, calculateScore, severityBoost, recencyBoost, confirmationBoost } from './search/ranking.js';\nexport type { RankedLesson } from './search/ranking.js';\n\n// Capture API - Quality filters\nexport { shouldPropose, isNovel, isSpecific, isActionable } from './capture/quality.js';\nexport type { NoveltyResult, NoveltyOptions, SpecificityResult, ActionabilityResult, ProposeResult } from './capture/quality.js';\n\n// Capture API - Triggers\nexport { detectUserCorrection, detectSelfCorrection, detectTestFailure } from './capture/triggers.js';\nexport type {\n CorrectionSignal,\n DetectedCorrection,\n EditHistory,\n EditEntry,\n DetectedSelfCorrection,\n TestResult,\n DetectedTestFailure,\n} from './capture/triggers.js';\n\n// Retrieval API\nexport { loadSessionLessons } from './retrieval/session.js';\nexport { retrieveForPlan, formatLessonsCheck } from './retrieval/plan.js';\nexport type { PlanRetrievalResult } from './retrieval/plan.js';\n\n// Types and schemas\nexport {\n generateId,\n LessonSchema,\n LessonTypeSchema,\n TombstoneSchema,\n} from './types.js';\nexport type {\n Lesson,\n LessonType,\n Tombstone,\n Source,\n Severity,\n Context,\n} from './types.js';\n","/**\n * Compaction and auto-archive for lessons\n *\n * Handles:\n * - Archiving old lessons (>90 days with 0 retrievals)\n * - Removing tombstones through JSONL rewrite\n * - Tracking compaction thresholds\n */\n\nimport { appendFile, mkdir, readFile, rename, writeFile } from 'node:fs/promises';\nimport { dirname, join } from 'node:path';\n\nimport type { Lesson } from '../types.js';\n\nimport { LESSONS_PATH, readLessons } from './jsonl.js';\n\n/** Relative path to archive directory from repo root */\nexport const ARCHIVE_DIR = '.claude/lessons/archive';\n\n/** Number of tombstones that triggers automatic compaction */\nexport const TOMBSTONE_THRESHOLD = 100;\n\n/** Age threshold for archiving (in days) */\nexport const ARCHIVE_AGE_DAYS = 90;\n\n/** Milliseconds per day for time calculations */\nconst MS_PER_DAY = 1000 * 60 * 60 * 24;\n\n/** Month offset for JavaScript's 0-indexed months */\nconst MONTH_INDEX_OFFSET = 1;\n\n/** Padding length for month in archive filename (e.g., \"01\" not \"1\") */\nconst MONTH_PAD_LENGTH = 2;\n\n/**\n * Result of a compaction operation\n */\nexport interface CompactResult {\n /** Number of lessons moved to archive */\n archived: number;\n /** Number of tombstones removed */\n tombstonesRemoved: number;\n /** Number of lessons remaining in index.jsonl */\n lessonsRemaining: number;\n}\n\n/**\n * Generate archive file path for a given date.\n * Format: .claude/lessons/archive/YYYY-MM.jsonl\n */\nexport function getArchivePath(repoRoot: string, date: Date): string {\n const year = date.getFullYear();\n const month = String(date.getMonth() + MONTH_INDEX_OFFSET).padStart(MONTH_PAD_LENGTH, '0');\n return join(repoRoot, ARCHIVE_DIR, `${year}-${month}.jsonl`);\n}\n\n/**\n * Parse raw JSONL lines from the lessons file.\n * Returns all lines (including invalid ones) as parsed objects or null.\n */\nasync function parseRawJsonlLines(\n repoRoot: string\n): Promise<Array<{ line: string; parsed: Record<string, unknown> | null }>> {\n const filePath = join(repoRoot, LESSONS_PATH);\n let content: string;\n try {\n content = await readFile(filePath, 'utf-8');\n } catch {\n return [];\n }\n\n const results: Array<{ line: string; parsed: Record<string, unknown> | null }> = [];\n for (const line of content.split('\\n')) {\n const trimmed = line.trim();\n if (!trimmed) continue;\n\n try {\n const parsed = JSON.parse(trimmed) as Record<string, unknown>;\n results.push({ line: trimmed, parsed });\n } catch {\n results.push({ line: trimmed, parsed: null });\n }\n }\n return results;\n}\n\n/**\n * Count the number of tombstones (deleted: true records) in the JSONL file.\n */\nexport async function countTombstones(repoRoot: string): Promise<number> {\n const lines = await parseRawJsonlLines(repoRoot);\n let count = 0;\n for (const { parsed } of lines) {\n if (parsed && parsed['deleted'] === true) {\n count++;\n }\n }\n return count;\n}\n\n/**\n * Check if compaction is needed based on tombstone count.\n */\nexport async function needsCompaction(repoRoot: string): Promise<boolean> {\n const count = await countTombstones(repoRoot);\n return count >= TOMBSTONE_THRESHOLD;\n}\n\n/**\n * Rewrite the JSONL file without tombstones.\n * Applies last-write-wins deduplication.\n */\nexport async function rewriteWithoutTombstones(repoRoot: string): Promise<number> {\n const filePath = join(repoRoot, LESSONS_PATH);\n const tempPath = filePath + '.tmp';\n\n // Read deduplicated lessons (already handles last-write-wins)\n const { lessons } = await readLessons(repoRoot);\n\n // Count tombstones before rewrite\n const tombstoneCount = await countTombstones(repoRoot);\n\n // Ensure directory exists\n await mkdir(dirname(filePath), { recursive: true });\n\n // Write clean lessons to temp file\n const lines = lessons.map((lesson) => JSON.stringify(lesson) + '\\n');\n await writeFile(tempPath, lines.join(''), 'utf-8');\n\n // Atomic rename\n await rename(tempPath, filePath);\n\n return tombstoneCount;\n}\n\n/**\n * Determine if a lesson should be archived based on age and retrieval count.\n * Lessons are archived if older than ARCHIVE_AGE_DAYS and never retrieved.\n *\n * @param lesson - The lesson to evaluate\n * @param now - Current date for age calculation\n * @returns true if lesson should be archived\n */\nfunction shouldArchive(lesson: Lesson, now: Date): boolean {\n const created = new Date(lesson.created);\n const ageMs = now.getTime() - created.getTime();\n const ageDays = ageMs / MS_PER_DAY;\n\n // Archive if: older than threshold AND never retrieved\n return ageDays > ARCHIVE_AGE_DAYS && (lesson.retrievalCount === undefined || lesson.retrievalCount === 0);\n}\n\n/**\n * Archive old lessons that haven't been retrieved.\n * Moves lessons >90 days old with 0 retrievals to archive files.\n * Returns the number of lessons archived.\n */\nexport async function archiveOldLessons(repoRoot: string): Promise<number> {\n const { lessons } = await readLessons(repoRoot);\n const now = new Date();\n\n const toArchive: Lesson[] = [];\n const toKeep: Lesson[] = [];\n\n for (const lesson of lessons) {\n if (shouldArchive(lesson, now)) {\n toArchive.push(lesson);\n } else {\n toKeep.push(lesson);\n }\n }\n\n if (toArchive.length === 0) {\n return 0;\n }\n\n // Group lessons by archive file (YYYY-MM)\n const archiveGroups = new Map<string, Lesson[]>();\n for (const lesson of toArchive) {\n const created = new Date(lesson.created);\n const archivePath = getArchivePath(repoRoot, created);\n const group = archiveGroups.get(archivePath) ?? [];\n group.push(lesson);\n archiveGroups.set(archivePath, group);\n }\n\n // Create archive directory\n const archiveDir = join(repoRoot, ARCHIVE_DIR);\n await mkdir(archiveDir, { recursive: true });\n\n // Append to archive files\n for (const [archivePath, archiveLessons] of archiveGroups) {\n const lines = archiveLessons.map((l) => JSON.stringify(l) + '\\n').join('');\n await appendFile(archivePath, lines, 'utf-8');\n }\n\n // Rewrite main file without archived lessons\n const filePath = join(repoRoot, LESSONS_PATH);\n const tempPath = filePath + '.tmp';\n await mkdir(dirname(filePath), { recursive: true });\n\n const lines = toKeep.map((lesson) => JSON.stringify(lesson) + '\\n');\n await writeFile(tempPath, lines.join(''), 'utf-8');\n await rename(tempPath, filePath);\n\n return toArchive.length;\n}\n\n/**\n * Run full compaction: archive old lessons and remove tombstones.\n */\nexport async function compact(repoRoot: string): Promise<CompactResult> {\n // Count tombstones BEFORE any operations (archiving also rewrites the file)\n const tombstonesBefore = await countTombstones(repoRoot);\n\n // First, archive old lessons\n const archived = await archiveOldLessons(repoRoot);\n\n // Then, remove tombstones (may be fewer now if archiving removed some)\n const tombstonesAfterArchive = await countTombstones(repoRoot);\n await rewriteWithoutTombstones(repoRoot);\n\n // Total tombstones removed = before - after rewrite (which is 0 after rewrite)\n // But we want to report what was actually in the file before compaction\n const tombstonesRemoved = archived > 0 ? tombstonesBefore : tombstonesAfterArchive;\n\n // Get final count\n const { lessons } = await readLessons(repoRoot);\n\n return {\n archived,\n tombstonesRemoved,\n lessonsRemaining: lessons.length,\n };\n}\n","#!/usr/bin/env node\n/**\n * Learning Agent CLI\n *\n * Commands:\n * init - Initialize learning-agent in a repository\n * learn <insight> - Capture a new lesson\n * search <query> - Search lessons by keyword\n * list - List all lessons\n * detect --input - Detect learning triggers from input\n * capture - Capture lesson from trigger/insight or input file\n * compact - Archive old lessons and remove tombstones\n */\n\nimport chalk from 'chalk';\nimport { Command } from 'commander';\n\nimport { chmodSync, existsSync, statSync } from 'node:fs';\nimport { mkdir, readFile, rename, writeFile } from 'node:fs/promises';\nimport { homedir } from 'node:os';\nimport { dirname, join } from 'node:path';\n\n// ============================================================================\n// Hooks Constants\n// ============================================================================\n\n/** Pre-commit hook reminder message */\nconst PRE_COMMIT_MESSAGE = `Before committing, have you captured any valuable lessons from this session?\nConsider: corrections, mistakes, or insights worth remembering.\n\nTo capture a lesson:\n npx learning-agent capture --trigger \"what happened\" --insight \"what to do\" --yes`;\n\n/** Pre-commit hook shell script template */\nconst PRE_COMMIT_HOOK_TEMPLATE = `#!/bin/sh\n# Learning Agent pre-commit hook\n# Reminds Claude to consider capturing lessons before commits\n\nnpx learning-agent hooks run pre-commit\n`;\n\n// ============================================================================\n// Claude Code Hooks Configuration\n// ============================================================================\n\n/** Marker to identify our hook in Claude Code settings */\nconst CLAUDE_HOOK_MARKER = 'learning-agent load-session';\n\n/** Claude Code SessionStart hook configuration */\nconst CLAUDE_HOOK_CONFIG = {\n matcher: 'startup|resume|compact',\n hooks: [\n {\n type: 'command',\n command: 'npx learning-agent load-session 2>/dev/null || true',\n },\n ],\n};\n\n/** Marker comment to identify our hook */\nconst HOOK_MARKER = '# Learning Agent pre-commit hook';\n\nimport { detectAndPropose, parseInputFile } from './capture/index.js';\nimport type { DetectionResult } from './capture/index.js';\nimport { formatBytes, getRepoRoot, parseLimit } from './cli-utils.js';\nimport { isModelAvailable, loadSessionLessons, retrieveForPlan, VERSION } from './index.js';\nimport {\n appendLesson,\n compact,\n countTombstones,\n DB_PATH,\n getRetrievalStats,\n LESSONS_PATH,\n needsCompaction,\n readLessons,\n rebuildIndex,\n searchKeyword,\n syncIfNeeded,\n TOMBSTONE_THRESHOLD,\n} from './storage/index.js';\nimport { generateId, LessonSchema } from './types.js';\nimport type { Lesson } from './types.js';\n\n// ============================================================================\n// Output Formatting Helpers\n// ============================================================================\n\n/** Output helper functions for consistent formatting */\nconst out = {\n success: (msg: string): void => console.log(chalk.green('[ok]'), msg),\n error: (msg: string): void => console.error(chalk.red('[error]'), msg),\n info: (msg: string): void => console.log(chalk.blue('[info]'), msg),\n warn: (msg: string): void => console.log(chalk.yellow('[warn]'), msg),\n};\n\n/** Global options interface */\ninterface GlobalOpts {\n verbose: boolean;\n quiet: boolean;\n}\n\n/**\n * Get global options from command.\n */\nfunction getGlobalOpts(cmd: Command): GlobalOpts {\n const opts = cmd.optsWithGlobals() as { verbose?: boolean; quiet?: boolean };\n return {\n verbose: opts.verbose ?? false,\n quiet: opts.quiet ?? false,\n };\n}\n\n/** Default limit for search results */\nconst DEFAULT_SEARCH_LIMIT = '10';\n\n/** Default limit for list results */\nconst DEFAULT_LIST_LIMIT = '20';\n\n/** Default limit for check-plan results */\nconst DEFAULT_CHECK_PLAN_LIMIT = '5';\n\n/** Length of ISO date prefix (YYYY-MM-DD) */\nconst ISO_DATE_PREFIX_LENGTH = 10;\n\n/** Decimal places for average calculations */\nconst AVG_DECIMAL_PLACES = 1;\n\n/** Decimal places for relevance scores */\nconst RELEVANCE_DECIMAL_PLACES = 2;\n\n/** Indentation for JSON pretty-printing */\nconst JSON_INDENT_SPACES = 2;\n\n// ============================================================================\n// Capture Command Helpers\n// ============================================================================\n\n/** Options for capture command */\ninterface CaptureOptions {\n trigger?: string;\n insight?: string;\n input?: string;\n json?: boolean;\n yes?: boolean;\n}\n\n/**\n * Create a lesson from explicit trigger and insight.\n */\nfunction createLessonFromFlags(trigger: string, insight: string, confirmed: boolean): Lesson {\n return {\n id: generateId(insight),\n type: 'quick',\n trigger,\n insight,\n tags: [],\n source: 'manual',\n context: { tool: 'capture', intent: 'manual capture' },\n created: new Date().toISOString(),\n confirmed,\n supersedes: [],\n related: [],\n };\n}\n\n/**\n * Output lesson in JSON format for capture command.\n */\nfunction outputCaptureJson(lesson: Lesson, saved: boolean): void {\n console.log(JSON.stringify({\n id: lesson.id,\n trigger: lesson.trigger,\n insight: lesson.insight,\n type: lesson.type,\n saved,\n }));\n}\n\n/**\n * Output lesson preview in human-readable format.\n */\nfunction outputCapturePreview(lesson: Lesson): void {\n console.log('Lesson captured:');\n console.log(` ID: ${lesson.id}`);\n console.log(` Trigger: ${lesson.trigger}`);\n console.log(` Insight: ${lesson.insight}`);\n console.log(` Type: ${lesson.type}`);\n console.log(` Tags: ${lesson.tags.length > 0 ? lesson.tags.join(', ') : '(none)'}`);\n console.log('\\nSave this lesson? [y/n]');\n}\n\n/**\n * Create lesson from input file detection result.\n */\nfunction createLessonFromInputFile(result: DetectionResult, confirmed: boolean): Lesson {\n return {\n id: generateId(result.proposedInsight),\n type: 'quick',\n trigger: result.trigger,\n insight: result.proposedInsight,\n tags: [],\n source: result.source,\n context: { tool: 'capture', intent: 'auto-capture' },\n created: new Date().toISOString(),\n confirmed,\n supersedes: [],\n related: [],\n };\n}\n\n// ============================================================================\n// Check-Plan Command Helpers\n// ============================================================================\n\n/**\n * Read plan text from stdin (non-TTY mode).\n */\nasync function readPlanFromStdin(): Promise<string | undefined> {\n const { stdin } = await import('node:process');\n if (!stdin.isTTY) {\n const chunks: Buffer[] = [];\n for await (const chunk of stdin) {\n chunks.push(chunk as Buffer);\n }\n return Buffer.concat(chunks).toString('utf-8').trim();\n }\n return undefined;\n}\n\n/**\n * Output check-plan results in JSON format.\n */\nfunction outputCheckPlanJson(lessons: Array<{ lesson: Lesson; score: number }>): void {\n const jsonOutput = {\n lessons: lessons.map((l) => ({\n id: l.lesson.id,\n insight: l.lesson.insight,\n relevance: l.score,\n source: l.lesson.source,\n })),\n count: lessons.length,\n };\n console.log(JSON.stringify(jsonOutput));\n}\n\n/**\n * Output check-plan results in human-readable format.\n */\nfunction outputCheckPlanHuman(lessons: Array<{ lesson: Lesson; score: number }>, quiet: boolean): void {\n console.log('## Lessons Check\\n');\n console.log('Relevant to your plan:\\n');\n\n lessons.forEach((item, i) => {\n const num = i + 1;\n console.log(`${num}. ${chalk.bold(`[${item.lesson.id}]`)} ${item.lesson.insight}`);\n console.log(` - Relevance: ${item.score.toFixed(RELEVANCE_DECIMAL_PLACES)}`);\n console.log(` - Source: ${item.lesson.source}`);\n console.log();\n });\n\n if (!quiet) {\n console.log('---');\n console.log('Consider these lessons while implementing.');\n }\n}\n\n// ============================================================================\n// Load-Session Command Helpers\n// ============================================================================\n\n/**\n * Output load-session results in human-readable format.\n */\nfunction outputSessionLessonsHuman(lessons: Lesson[], quiet: boolean): void {\n console.log('## Session Lessons (High Severity)\\n');\n\n lessons.forEach((lesson, i) => {\n const num = i + 1;\n const date = lesson.created.slice(0, ISO_DATE_PREFIX_LENGTH);\n\n console.log(`${num}. ${chalk.bold(`[${lesson.id}]`)} ${lesson.insight}`);\n console.log(` - Source: ${lesson.source} (${date})`);\n if (lesson.tags.length > 0) {\n console.log(` - Tags: ${lesson.tags.join(', ')}`);\n }\n console.log();\n });\n\n const lessonWord = lessons.length === 1 ? 'lesson' : 'lessons';\n if (!quiet) {\n console.log('---');\n console.log(`${lessons.length} high-severity ${lessonWord} loaded.`);\n }\n}\n\n// ============================================================================\n// Init Command Helpers\n// ============================================================================\n\n/** Section header to check for idempotency */\nconst LEARNING_AGENT_SECTION_HEADER = '## Learning Agent Integration';\n\n/** Template content for AGENTS.md */\nconst AGENTS_MD_TEMPLATE = `\n## Learning Agent Integration\n\nThis project uses learning-agent for session memory.\n\n### Retrieval Points\n\n- **Session start**: High-severity lessons loaded automatically\n- **Plan-time**: BEFORE implementing a plan, run check-plan to retrieve relevant lessons\n\n### Plan-Time Retrieval (Explicit Step)\n\n**BEFORE implementing any plan**, run:\n\n\\`\\`\\`bash\nnpx learning-agent check-plan --plan \"your plan description\" --json\n\\`\\`\\`\n\nDisplay results as a **Lessons Check** section after your plan:\n\n\\`\\`\\`\n## Lessons Check\n1. [insight from lesson 1] (relevance: 0.85)\n2. [insight from lesson 2] (relevance: 0.72)\n\\`\\`\\`\n\nConsider each lesson while implementing.\n\n### Proposing Lessons\n\nPropose when: user correction, self-correction, test failure fix, or manual request.\n\n**Quality gate (ALL must pass):**\n\n- Novel (not already stored)\n- Specific (clear guidance)\n- Actionable (obvious what to do)\n\n**Confirmation format:**\n\n\\`\\`\\`\nLearned: [insight]. Save? [y/n]\n\\`\\`\\`\n\n### Session-End Protocol\n\nBefore closing a session, reflect on lessons learned:\n\n1. **Review**: What mistakes or corrections happened?\n2. **Quality gate**: Is it novel, specific, actionable?\n3. **Propose**: \"Learned: [insight]. Save? [y/n]\"\n4. **Capture**: \\`npx learning-agent capture --trigger \"...\" --insight \"...\" --yes\\`\n\n### CLI Commands\n\n\\`\\`\\`bash\nnpx learning-agent load-session --json # Session start\nnpx learning-agent check-plan --plan \"...\" --json # Before implementing\nnpx learning-agent capture --trigger \"...\" --insight \"...\" --yes\n\\`\\`\\`\n\nSee [AGENTS.md](https://github.com/Nathandela/learning_agent/blob/main/AGENTS.md) for full documentation.\n`;\n\n/**\n * Check if AGENTS.md already has the Learning Agent section.\n */\nfunction hasLearningAgentSection(content: string): boolean {\n return content.includes(LEARNING_AGENT_SECTION_HEADER);\n}\n\n/**\n * Create the lessons directory structure.\n */\nasync function createLessonsDirectory(repoRoot: string): Promise<void> {\n const lessonsDir = dirname(join(repoRoot, LESSONS_PATH));\n await mkdir(lessonsDir, { recursive: true });\n}\n\n/**\n * Create empty index.jsonl if it doesn't exist.\n */\nasync function createIndexFile(repoRoot: string): Promise<void> {\n const indexPath = join(repoRoot, LESSONS_PATH);\n if (!existsSync(indexPath)) {\n await writeFile(indexPath, '', 'utf-8');\n }\n}\n\n/**\n * Create or update AGENTS.md with Learning Agent section.\n */\nasync function updateAgentsMd(repoRoot: string): Promise<boolean> {\n const agentsPath = join(repoRoot, 'AGENTS.md');\n let content = '';\n let existed = false;\n\n if (existsSync(agentsPath)) {\n content = await readFile(agentsPath, 'utf-8');\n existed = true;\n if (hasLearningAgentSection(content)) {\n return false; // Already has section, no update needed\n }\n }\n\n // Append the template\n const newContent = existed ? content.trimEnd() + '\\n' + AGENTS_MD_TEMPLATE : AGENTS_MD_TEMPLATE.trim() + '\\n';\n await writeFile(agentsPath, newContent, 'utf-8');\n return true;\n}\n\n// ============================================================================\n// Hooks Helpers\n// ============================================================================\n\n/** Make hook file executable (mode 0o755) */\nconst HOOK_FILE_MODE = 0o755;\n\n/**\n * Check if a pre-commit hook already exists with our marker.\n */\nfunction hasLearningAgentHook(content: string): boolean {\n return content.includes(HOOK_MARKER);\n}\n\n/**\n * Get the git hooks directory, respecting core.hooksPath if set.\n */\nasync function getGitHooksDir(repoRoot: string): Promise<string | null> {\n const gitDir = join(repoRoot, '.git');\n\n // Check if .git directory exists\n if (!existsSync(gitDir)) {\n return null;\n }\n\n // Check for core.hooksPath in .git/config\n const configPath = join(gitDir, 'config');\n if (existsSync(configPath)) {\n const config = await readFile(configPath, 'utf-8');\n const match = /hooksPath\\s*=\\s*(.+)$/m.exec(config);\n if (match?.[1]) {\n const hooksPath = match[1].trim();\n // Resolve relative paths from repo root\n return hooksPath.startsWith('/') ? hooksPath : join(repoRoot, hooksPath);\n }\n }\n\n // Default to .git/hooks\n const defaultHooksDir = join(gitDir, 'hooks');\n return existsSync(defaultHooksDir) ? defaultHooksDir : null;\n}\n\n/** Block to append to existing hooks */\nconst LEARNING_AGENT_HOOK_BLOCK = `\n# Learning Agent pre-commit hook (appended)\nnpx learning-agent hooks run pre-commit\n`;\n\n/**\n * Install pre-commit hook, respecting core.hooksPath and existing hooks.\n *\n * - Respects core.hooksPath when configured\n * - Appends to existing hooks instead of overwriting\n * - Uses marker to ensure idempotency\n */\nasync function installPreCommitHook(repoRoot: string): Promise<boolean> {\n const gitHooksDir = await getGitHooksDir(repoRoot);\n\n // Skip if not a git repo or no hooks directory\n if (!gitHooksDir) {\n return false;\n }\n\n // Ensure hooks directory exists\n await mkdir(gitHooksDir, { recursive: true });\n\n const hookPath = join(gitHooksDir, 'pre-commit');\n\n // Check if hook already exists\n if (existsSync(hookPath)) {\n const content = await readFile(hookPath, 'utf-8');\n if (hasLearningAgentHook(content)) {\n return false; // Already installed\n }\n\n // Append our block to existing hook (non-destructive)\n const newContent = content.trimEnd() + '\\n' + LEARNING_AGENT_HOOK_BLOCK;\n await writeFile(hookPath, newContent, 'utf-8');\n chmodSync(hookPath, HOOK_FILE_MODE);\n return true;\n }\n\n // Create new hook file with full template\n await writeFile(hookPath, PRE_COMMIT_HOOK_TEMPLATE, 'utf-8');\n chmodSync(hookPath, HOOK_FILE_MODE);\n\n return true;\n}\n\nconst program = new Command();\n\n// Add global options\nprogram\n .option('-v, --verbose', 'Show detailed output')\n .option('-q, --quiet', 'Suppress non-essential output');\n\nprogram\n .name('learning-agent')\n .description('Repository-scoped learning system for Claude Code')\n .version(VERSION);\n\n/**\n * Init command - Initialize learning-agent in a repository.\n *\n * Creates the lessons directory structure and optionally injects\n * the Learning Agent Integration section into AGENTS.md.\n *\n * @example npx learning-agent init\n * @example npx learning-agent init --skip-agents\n */\nprogram\n .command('init')\n .description('Initialize learning-agent in this repository')\n .option('--skip-agents', 'Skip AGENTS.md modification')\n .option('--skip-hooks', 'Skip git hooks installation')\n .option('--json', 'Output result as JSON')\n .action(async function (this: Command, options: { skipAgents?: boolean; skipHooks?: boolean; json?: boolean }) {\n const repoRoot = getRepoRoot();\n const { quiet } = getGlobalOpts(this);\n\n // Create directory structure\n await createLessonsDirectory(repoRoot);\n await createIndexFile(repoRoot);\n const lessonsDir = dirname(join(repoRoot, LESSONS_PATH));\n\n // Update AGENTS.md unless skipped\n let agentsMdUpdated = false;\n if (!options.skipAgents) {\n agentsMdUpdated = await updateAgentsMd(repoRoot);\n }\n\n // Install hooks unless skipped\n let hooksInstalled = false;\n if (!options.skipHooks) {\n hooksInstalled = await installPreCommitHook(repoRoot);\n }\n\n // Output\n if (options.json) {\n console.log(JSON.stringify({\n initialized: true,\n lessonsDir,\n agentsMd: agentsMdUpdated,\n hooks: hooksInstalled,\n }));\n } else if (!quiet) {\n out.success('Learning agent initialized');\n console.log(` Lessons directory: ${lessonsDir}`);\n if (agentsMdUpdated) {\n console.log(' AGENTS.md: Updated with Learning Agent section');\n } else if (options.skipAgents) {\n console.log(' AGENTS.md: Skipped (--skip-agents)');\n } else {\n console.log(' AGENTS.md: Already has Learning Agent section');\n }\n if (hooksInstalled) {\n console.log(' Git hooks: pre-commit hook installed');\n } else if (options.skipHooks) {\n console.log(' Git hooks: Skipped (--skip-hooks)');\n } else {\n console.log(' Git hooks: Already installed or not a git repo');\n }\n }\n });\n\n/**\n * Hooks command - Run git hook scripts.\n *\n * Called by git hooks to output prompts/reminders.\n *\n * @example npx learning-agent hooks run pre-commit\n */\nconst hooksCommand = program.command('hooks').description('Git hooks management');\n\nhooksCommand\n .command('run <hook>')\n .description('Run a hook script (called by git hooks)')\n .option('--json', 'Output as JSON')\n .action((hook: string, options: { json?: boolean }) => {\n if (hook === 'pre-commit') {\n if (options.json) {\n console.log(JSON.stringify({ hook: 'pre-commit', message: PRE_COMMIT_MESSAGE }));\n } else {\n console.log(PRE_COMMIT_MESSAGE);\n }\n } else {\n if (options.json) {\n console.log(JSON.stringify({ error: `Unknown hook: ${hook}` }));\n } else {\n out.error(`Unknown hook: ${hook}`);\n }\n process.exit(1);\n }\n });\n\n// ============================================================================\n// Setup Command - Configure Claude Code hooks\n// ============================================================================\n\n/**\n * Get the path to Claude Code settings file.\n */\nfunction getClaudeSettingsPath(project: boolean): string {\n if (project) {\n const repoRoot = getRepoRoot();\n return join(repoRoot, '.claude', 'settings.json');\n }\n return join(homedir(), '.claude', 'settings.json');\n}\n\n/**\n * Read and parse Claude Code settings.\n */\nasync function readClaudeSettings(settingsPath: string): Promise<Record<string, unknown>> {\n if (!existsSync(settingsPath)) {\n return {};\n }\n const content = await readFile(settingsPath, 'utf-8');\n return JSON.parse(content) as Record<string, unknown>;\n}\n\n/**\n * Check if our hook is already installed.\n */\nfunction hasClaudeHook(settings: Record<string, unknown>): boolean {\n const hooks = settings.hooks as Record<string, unknown[]> | undefined;\n if (!hooks?.SessionStart) return false;\n\n return hooks.SessionStart.some((entry) => {\n const hookEntry = entry as { hooks?: Array<{ command?: string }> };\n return hookEntry.hooks?.some((h) => h.command?.includes(CLAUDE_HOOK_MARKER));\n });\n}\n\n/**\n * Add our hook to SessionStart array.\n */\nfunction addLearningAgentHook(settings: Record<string, unknown>): void {\n if (!settings.hooks) {\n settings.hooks = {};\n }\n const hooks = settings.hooks as Record<string, unknown[]>;\n if (!hooks.SessionStart) {\n hooks.SessionStart = [];\n }\n hooks.SessionStart.push(CLAUDE_HOOK_CONFIG);\n}\n\n/**\n * Remove our hook from SessionStart array.\n */\nfunction removeLearningAgentHook(settings: Record<string, unknown>): boolean {\n const hooks = settings.hooks as Record<string, unknown[]> | undefined;\n if (!hooks?.SessionStart) return false;\n\n const originalLength = hooks.SessionStart.length;\n hooks.SessionStart = hooks.SessionStart.filter((entry) => {\n const hookEntry = entry as { hooks?: Array<{ command?: string }> };\n return !hookEntry.hooks?.some((h) => h.command?.includes(CLAUDE_HOOK_MARKER));\n });\n\n return hooks.SessionStart.length < originalLength;\n}\n\n/**\n * Write Claude Code settings atomically.\n */\nasync function writeClaudeSettings(settingsPath: string, settings: Record<string, unknown>): Promise<void> {\n const dir = dirname(settingsPath);\n await mkdir(dir, { recursive: true });\n\n // Write to temp file, then rename (atomic)\n const tempPath = settingsPath + '.tmp';\n await writeFile(tempPath, JSON.stringify(settings, null, 2) + '\\n', 'utf-8');\n await rename(tempPath, settingsPath);\n}\n\nconst setupCommand = program.command('setup').description('Setup integrations');\n\nsetupCommand\n .command('claude')\n .description('Install Claude Code SessionStart hooks')\n .option('--project', 'Install to project .claude directory instead of global')\n .option('--uninstall', 'Remove learning-agent hooks')\n .option('--dry-run', 'Show what would change without writing')\n .option('--json', 'Output as JSON')\n .action(async (options: { project?: boolean; uninstall?: boolean; dryRun?: boolean; json?: boolean }) => {\n const settingsPath = getClaudeSettingsPath(options.project ?? false);\n const displayPath = options.project ? '.claude/settings.json' : '~/.claude/settings.json';\n\n let settings: Record<string, unknown>;\n try {\n settings = await readClaudeSettings(settingsPath);\n } catch {\n if (options.json) {\n console.log(JSON.stringify({ error: 'Failed to parse settings file' }));\n } else {\n out.error('Failed to parse settings file. Check if JSON is valid.');\n }\n process.exit(1);\n }\n\n const alreadyInstalled = hasClaudeHook(settings);\n\n // Handle uninstall\n if (options.uninstall) {\n if (options.dryRun) {\n if (options.json) {\n console.log(JSON.stringify({ dryRun: true, wouldRemove: alreadyInstalled, location: displayPath }));\n } else {\n if (alreadyInstalled) {\n console.log(`Would remove learning-agent hooks from ${displayPath}`);\n } else {\n console.log('No learning-agent hooks to remove');\n }\n }\n return;\n }\n\n const removed = removeLearningAgentHook(settings);\n if (removed) {\n await writeClaudeSettings(settingsPath, settings);\n if (options.json) {\n console.log(JSON.stringify({ installed: false, location: displayPath, action: 'removed' }));\n } else {\n out.success('Learning agent hooks removed');\n console.log(` Location: ${displayPath}`);\n }\n } else {\n if (options.json) {\n console.log(JSON.stringify({ installed: false, location: displayPath, action: 'unchanged' }));\n } else {\n out.info('No learning agent hooks to remove');\n }\n }\n return;\n }\n\n // Handle install\n if (options.dryRun) {\n if (options.json) {\n console.log(JSON.stringify({ dryRun: true, wouldInstall: !alreadyInstalled, location: displayPath }));\n } else {\n if (alreadyInstalled) {\n console.log('Learning agent hooks already installed');\n } else {\n console.log(`Would install learning-agent hooks to ${displayPath}`);\n }\n }\n return;\n }\n\n if (alreadyInstalled) {\n if (options.json) {\n console.log(JSON.stringify({\n installed: true,\n location: displayPath,\n hooks: ['SessionStart'],\n action: 'unchanged',\n }));\n } else {\n out.info('Learning agent hooks already installed');\n console.log(` Location: ${displayPath}`);\n }\n return;\n }\n\n // Add hook\n const fileExists = existsSync(settingsPath);\n addLearningAgentHook(settings);\n await writeClaudeSettings(settingsPath, settings);\n\n if (options.json) {\n console.log(JSON.stringify({\n installed: true,\n location: displayPath,\n hooks: ['SessionStart'],\n action: fileExists ? 'updated' : 'created',\n }));\n } else {\n out.success(options.project ? 'Claude Code hooks installed (project-level)' : 'Claude Code hooks installed');\n console.log(` Location: ${displayPath}`);\n console.log(' Hook: SessionStart (startup|resume|compact)');\n console.log('');\n console.log('Lessons will be loaded automatically at session start.');\n if (options.project) {\n console.log('');\n console.log('Note: Project hooks override global hooks.');\n }\n }\n });\n\nprogram\n .command('learn <insight>')\n .description('Capture a new lesson')\n .option('-t, --trigger <text>', 'What triggered this lesson')\n .option('--tags <tags>', 'Comma-separated tags', '')\n .option('-y, --yes', 'Skip confirmation')\n .action(async function (this: Command, insight: string, options: { trigger?: string; tags: string; yes?: boolean }) {\n const repoRoot = getRepoRoot();\n const { quiet } = getGlobalOpts(this);\n\n const lesson: Lesson = {\n id: generateId(insight),\n type: 'quick',\n trigger: options.trigger ?? 'Manual capture',\n insight,\n tags: options.tags ? options.tags.split(',').map((t) => t.trim()) : [],\n source: 'manual',\n context: {\n tool: 'cli',\n intent: 'manual learning',\n },\n created: new Date().toISOString(),\n confirmed: true, // learn command is explicit confirmation\n supersedes: [],\n related: [],\n };\n\n await appendLesson(repoRoot, lesson);\n out.success(`Learned: ${insight}`);\n if (!quiet) {\n console.log(`ID: ${chalk.dim(lesson.id)}`);\n }\n });\n\nprogram\n .command('search <query>')\n .description('Search lessons by keyword')\n .option('-n, --limit <number>', 'Maximum results', DEFAULT_SEARCH_LIMIT)\n .action(async function (this: Command, query: string, options: { limit: string }) {\n const repoRoot = getRepoRoot();\n const limit = parseLimit(options.limit, 'limit');\n const { verbose, quiet } = getGlobalOpts(this);\n\n // Sync index if JSONL has changed\n await syncIfNeeded(repoRoot);\n\n const results = await searchKeyword(repoRoot, query, limit);\n\n if (results.length === 0) {\n console.log('No lessons match your search. Try a different query or use \"list\" to see all lessons.');\n return;\n }\n\n if (!quiet) {\n out.info(`Found ${results.length} lesson(s):\\n`);\n }\n for (const lesson of results) {\n console.log(`[${chalk.cyan(lesson.id)}] ${lesson.insight}`);\n console.log(` Trigger: ${lesson.trigger}`);\n if (verbose && lesson.context) {\n console.log(` Context: ${lesson.context.tool} - ${lesson.context.intent}`);\n console.log(` Created: ${lesson.created}`);\n }\n if (lesson.tags.length > 0) {\n console.log(` Tags: ${lesson.tags.join(', ')}`);\n }\n console.log();\n }\n });\n\nprogram\n .command('list')\n .description('List all lessons')\n .option('-n, --limit <number>', 'Maximum results', DEFAULT_LIST_LIMIT)\n .action(async function (this: Command, options: { limit: string }) {\n const repoRoot = getRepoRoot();\n const limit = parseLimit(options.limit, 'limit');\n const { verbose, quiet } = getGlobalOpts(this);\n\n const { lessons, skippedCount } = await readLessons(repoRoot);\n\n if (lessons.length === 0) {\n console.log('No lessons found. Get started with: learn \"Your first lesson\"');\n if (skippedCount > 0) {\n out.warn(`${skippedCount} corrupted lesson(s) skipped.`);\n }\n return;\n }\n\n const toShow = lessons.slice(0, limit);\n\n // Show summary unless quiet mode\n if (!quiet) {\n out.info(`Showing ${toShow.length} of ${lessons.length} lesson(s):\\n`);\n }\n\n for (const lesson of toShow) {\n console.log(`[${chalk.cyan(lesson.id)}] ${lesson.insight}`);\n if (verbose) {\n console.log(` Type: ${lesson.type} | Source: ${lesson.source}`);\n console.log(` Created: ${lesson.created}`);\n if (lesson.context) {\n console.log(` Context: ${lesson.context.tool} - ${lesson.context.intent}`);\n }\n } else {\n console.log(` Type: ${lesson.type} | Source: ${lesson.source}`);\n }\n if (lesson.tags.length > 0) {\n console.log(` Tags: ${lesson.tags.join(', ')}`);\n }\n console.log();\n }\n\n if (skippedCount > 0) {\n out.warn(`${skippedCount} corrupted lesson(s) skipped.`);\n }\n });\n\nprogram\n .command('rebuild')\n .description('Rebuild SQLite index from JSONL')\n .option('-f, --force', 'Force rebuild even if unchanged')\n .action(async (options: { force?: boolean }) => {\n const repoRoot = getRepoRoot();\n if (options.force) {\n console.log('Forcing index rebuild...');\n await rebuildIndex(repoRoot);\n console.log('Index rebuilt.');\n } else {\n const rebuilt = await syncIfNeeded(repoRoot);\n if (rebuilt) {\n console.log('Index rebuilt (JSONL changed).');\n } else {\n console.log('Index is up to date.');\n }\n }\n });\n\nprogram\n .command('detect')\n .description('Detect learning triggers from input')\n .requiredOption('--input <file>', 'Path to JSON input file')\n .option('--save', 'Save proposed lesson (requires --yes)')\n .option('-y, --yes', 'Confirm save (required with --save)')\n .option('--json', 'Output result as JSON')\n .action(\n async (options: { input: string; save?: boolean; yes?: boolean; json?: boolean }) => {\n const repoRoot = getRepoRoot();\n\n // --save requires --yes\n if (options.save && !options.yes) {\n if (options.json) {\n console.log(JSON.stringify({ error: '--save requires --yes flag for confirmation' }));\n } else {\n out.error('--save requires --yes flag for confirmation');\n console.log('Use: detect --input <file> --save --yes');\n }\n process.exit(1);\n }\n\n const input = await parseInputFile(options.input);\n const result = await detectAndPropose(repoRoot, input);\n\n if (!result) {\n if (options.json) {\n console.log(JSON.stringify({ detected: false }));\n } else {\n console.log('No learning trigger detected.');\n }\n return;\n }\n\n if (options.json) {\n console.log(JSON.stringify({ detected: true, ...result }));\n return;\n }\n\n console.log('Learning trigger detected!');\n console.log(` Trigger: ${result.trigger}`);\n console.log(` Source: ${result.source}`);\n console.log(` Proposed: ${result.proposedInsight}`);\n\n if (options.save && options.yes) {\n const lesson: Lesson = {\n id: generateId(result.proposedInsight),\n type: 'quick',\n trigger: result.trigger,\n insight: result.proposedInsight,\n tags: [],\n source: result.source,\n context: { tool: 'detect', intent: 'auto-capture' },\n created: new Date().toISOString(),\n confirmed: true, // --yes confirms the lesson\n supersedes: [],\n related: [],\n };\n\n await appendLesson(repoRoot, lesson);\n console.log(`\\nSaved as lesson: ${lesson.id}`);\n }\n }\n );\n\n/**\n * Capture command - Capture a lesson from trigger/insight or input file.\n *\n * Modes:\n * - Explicit: --trigger \"what happened\" --insight \"what to do\"\n * - From file: --input conversation.json (auto-detect trigger)\n *\n * @example npx learning-agent capture --trigger \"Wrong API\" --insight \"Use v2\" --yes\n * @example npx learning-agent capture --input session.json --json\n */\nprogram\n .command('capture')\n .description('Capture a lesson from trigger/insight or input file')\n .option('-t, --trigger <text>', 'What triggered this lesson')\n .option('-i, --insight <text>', 'The insight or lesson learned')\n .option('--input <file>', 'Path to JSON input file (alternative to trigger/insight)')\n .option('--json', 'Output result as JSON')\n .option('-y, --yes', 'Skip confirmation and save immediately')\n .action(async function (this: Command, options: CaptureOptions) {\n const repoRoot = getRepoRoot();\n const { verbose } = getGlobalOpts(this);\n let lesson: Lesson | undefined;\n\n // Mode 1: From --input file\n if (options.input) {\n const input = await parseInputFile(options.input);\n const result = await detectAndPropose(repoRoot, input);\n if (!result) {\n options.json\n ? console.log(JSON.stringify({ detected: false, saved: false }))\n : console.log('No learning trigger detected.');\n return;\n }\n lesson = createLessonFromInputFile(result, options.yes ?? false);\n } else if (options.trigger && options.insight) {\n // Mode 2: From explicit flags\n lesson = createLessonFromFlags(options.trigger, options.insight, options.yes ?? false);\n } else {\n // Missing required options\n const msg = 'Provide either --trigger and --insight, or --input file.';\n options.json ? console.log(JSON.stringify({ error: msg, saved: false })) : out.error(msg);\n process.exit(1);\n }\n\n // In non-interactive mode, --yes is required\n if (!options.yes && !process.stdin.isTTY) {\n if (options.json) {\n console.log(JSON.stringify({ error: '--yes required in non-interactive mode', saved: false }));\n } else {\n out.error('--yes required in non-interactive mode');\n console.log('Use: capture --trigger \"...\" --insight \"...\" --yes');\n }\n process.exit(1);\n }\n\n // Output and optionally save\n if (options.json) {\n if (options.yes) await appendLesson(repoRoot, lesson);\n outputCaptureJson(lesson, options.yes ?? false);\n } else if (options.yes) {\n await appendLesson(repoRoot, lesson);\n out.success(`Lesson saved: ${lesson.id}`);\n if (verbose) console.log(` Type: ${lesson.type} | Trigger: ${lesson.trigger}`);\n } else {\n // Interactive mode - show preview (TTY only)\n outputCapturePreview(lesson);\n }\n });\n\nprogram\n .command('compact')\n .description('Compact lessons: archive old lessons and remove tombstones')\n .option('-f, --force', 'Run compaction even if below threshold')\n .option('--dry-run', 'Show what would be done without making changes')\n .action(async (options: { force?: boolean; dryRun?: boolean }) => {\n const repoRoot = getRepoRoot();\n\n const tombstones = await countTombstones(repoRoot);\n const needs = await needsCompaction(repoRoot);\n\n if (options.dryRun) {\n console.log('Dry run - no changes will be made.\\n');\n console.log(`Tombstones found: ${tombstones}`);\n console.log(`Compaction needed: ${needs ? 'yes' : 'no'}`);\n return;\n }\n\n if (!needs && !options.force) {\n console.log(`Compaction not needed (${tombstones} tombstones, threshold is ${TOMBSTONE_THRESHOLD}).`);\n console.log('Use --force to compact anyway.');\n return;\n }\n\n console.log('Running compaction...');\n const result = await compact(repoRoot);\n\n console.log('\\nCompaction complete:');\n console.log(` Archived: ${result.archived} lesson(s)`);\n console.log(` Tombstones removed: ${result.tombstonesRemoved}`);\n console.log(` Lessons remaining: ${result.lessonsRemaining}`);\n\n // Rebuild SQLite index after compaction\n await rebuildIndex(repoRoot);\n console.log(' Index rebuilt.');\n });\n\nprogram\n .command('export')\n .description('Export lessons as JSON to stdout')\n .option('--since <date>', 'Only include lessons created after this date (ISO8601)')\n .option('--tags <tags>', 'Filter by tags (comma-separated, OR logic)')\n .action(async (options: { since?: string; tags?: string }) => {\n const repoRoot = getRepoRoot();\n\n const { lessons } = await readLessons(repoRoot);\n\n let filtered = lessons;\n\n // Filter by date if --since provided\n if (options.since) {\n const sinceDate = new Date(options.since);\n if (Number.isNaN(sinceDate.getTime())) {\n console.error(`Invalid date format: ${options.since}. Use ISO8601 format (e.g., 2024-01-15).`);\n process.exit(1);\n }\n filtered = filtered.filter((lesson) => new Date(lesson.created) >= sinceDate);\n }\n\n // Filter by tags if --tags provided (OR logic)\n if (options.tags) {\n const filterTags = options.tags.split(',').map((t) => t.trim());\n filtered = filtered.filter((lesson) => lesson.tags.some((tag) => filterTags.includes(tag)));\n }\n\n // Output JSON to stdout (portable format for sharing)\n console.log(JSON.stringify(filtered, null, JSON_INDENT_SPACES));\n });\n\nprogram\n .command('import <file>')\n .description('Import lessons from a JSONL file')\n .action(async (file: string) => {\n const repoRoot = getRepoRoot();\n\n // Read input file\n let content: string;\n try {\n const { readFile } = await import('node:fs/promises');\n content = await readFile(file, 'utf-8');\n } catch (err) {\n const code = (err as NodeJS.ErrnoException).code;\n if (code === 'ENOENT') {\n console.error(`Error: File not found: ${file}`);\n } else {\n console.error(`Error reading file: ${(err as Error).message}`);\n }\n process.exit(1);\n }\n\n // Get existing lesson IDs\n const { lessons: existingLessons } = await readLessons(repoRoot);\n const existingIds = new Set(existingLessons.map((l) => l.id));\n\n // Parse and validate each line\n const lines = content.split('\\n');\n let imported = 0;\n let skipped = 0;\n let invalid = 0;\n\n for (const line of lines) {\n const trimmed = line.trim();\n if (!trimmed) continue;\n\n // Parse JSON\n let parsed: unknown;\n try {\n parsed = JSON.parse(trimmed);\n } catch {\n invalid++;\n continue;\n }\n\n // Validate schema\n const result = LessonSchema.safeParse(parsed);\n if (!result.success) {\n invalid++;\n continue;\n }\n\n const lesson: Lesson = result.data;\n\n // Skip if ID already exists\n if (existingIds.has(lesson.id)) {\n skipped++;\n continue;\n }\n\n // Append lesson\n await appendLesson(repoRoot, lesson);\n existingIds.add(lesson.id);\n imported++;\n }\n\n // Format summary\n const lessonWord = imported === 1 ? 'lesson' : 'lessons';\n const parts: string[] = [];\n if (skipped > 0) parts.push(`${skipped} skipped`);\n if (invalid > 0) parts.push(`${invalid} invalid`);\n\n if (parts.length > 0) {\n console.log(`Imported ${imported} ${lessonWord} (${parts.join(', ')})`);\n } else {\n console.log(`Imported ${imported} ${lessonWord}`);\n }\n });\n\nprogram\n .command('stats')\n .description('Show database health and statistics')\n .action(async () => {\n const repoRoot = getRepoRoot();\n\n // Sync index to ensure accurate stats\n await syncIfNeeded(repoRoot);\n\n // Read lessons from JSONL to get accurate counts\n const { lessons } = await readLessons(repoRoot);\n const deletedCount = await countTombstones(repoRoot);\n const totalLessons = lessons.length;\n\n // Get retrieval stats from SQLite\n const retrievalStats = getRetrievalStats(repoRoot);\n const totalRetrievals = retrievalStats.reduce((sum, s) => sum + s.count, 0);\n const avgRetrievals = totalLessons > 0 ? (totalRetrievals / totalLessons).toFixed(AVG_DECIMAL_PLACES) : '0.0';\n\n // Get storage sizes\n const jsonlPath = join(repoRoot, LESSONS_PATH);\n const dbPath = join(repoRoot, DB_PATH);\n\n let dataSize = 0;\n let indexSize = 0;\n\n try {\n dataSize = statSync(jsonlPath).size;\n } catch {\n // File doesn't exist\n }\n\n try {\n indexSize = statSync(dbPath).size;\n } catch {\n // File doesn't exist\n }\n\n const totalSize = dataSize + indexSize;\n\n // Format output\n const deletedInfo = deletedCount > 0 ? ` (${deletedCount} deleted)` : '';\n console.log(`Lessons: ${totalLessons} total${deletedInfo}`);\n console.log(`Retrievals: ${totalRetrievals} total, ${avgRetrievals} avg per lesson`);\n console.log(`Storage: ${formatBytes(totalSize)} (index: ${formatBytes(indexSize)}, data: ${formatBytes(dataSize)})`);\n });\n\n/**\n * Load-session command - Load high-severity lessons for session startup.\n *\n * Used by Claude Code hooks to inject critical lessons at session start.\n * Returns lessons sorted by severity/recency for immediate context.\n *\n * @example npx learning-agent load-session --json\n */\nprogram\n .command('load-session')\n .description('Load high-severity lessons for session context')\n .option('--json', 'Output as JSON')\n .action(async function (this: Command, options: { json?: boolean }) {\n const repoRoot = getRepoRoot();\n const { quiet } = getGlobalOpts(this);\n const lessons = await loadSessionLessons(repoRoot);\n\n if (options.json) {\n console.log(JSON.stringify({ lessons, count: lessons.length }));\n return;\n }\n\n if (lessons.length === 0) {\n console.log('No high-severity lessons found.');\n return;\n }\n\n outputSessionLessonsHuman(lessons, quiet);\n });\n\n/**\n * Check-plan command - Check a plan against relevant lessons.\n *\n * Used by Claude Code hooks during plan mode to retrieve lessons\n * that are semantically relevant to the proposed implementation.\n *\n * @example echo \"Add authentication\" | npx learning-agent check-plan --json\n * @example npx learning-agent check-plan --plan \"Refactor the API\"\n */\nprogram\n .command('check-plan')\n .description('Check plan against relevant lessons')\n .option('--plan <text>', 'Plan text to check')\n .option('--json', 'Output as JSON')\n .option('-n, --limit <number>', 'Maximum results', DEFAULT_CHECK_PLAN_LIMIT)\n .action(async function (this: Command, options: { plan?: string; json?: boolean; limit: string }) {\n const repoRoot = getRepoRoot();\n const limit = parseLimit(options.limit, 'limit');\n const { quiet } = getGlobalOpts(this);\n\n // Get plan text from --plan flag or stdin\n const planText = options.plan ?? (await readPlanFromStdin());\n\n if (!planText) {\n out.error('No plan provided. Use --plan <text> or pipe text to stdin.');\n process.exit(1);\n }\n\n // Check model availability - hard fail if not available\n if (!isModelAvailable()) {\n if (options.json) {\n console.log(JSON.stringify({\n error: 'Embedding model not available',\n action: 'Run: npx learning-agent download-model',\n }));\n } else {\n out.error('Embedding model not available');\n console.log('');\n console.log('Run: npx learning-agent download-model');\n }\n process.exit(1);\n }\n\n try {\n const result = await retrieveForPlan(repoRoot, planText, limit);\n\n if (options.json) {\n outputCheckPlanJson(result.lessons);\n return;\n }\n\n if (result.lessons.length === 0) {\n console.log('No relevant lessons found for this plan.');\n return;\n }\n\n outputCheckPlanHuman(result.lessons, quiet);\n } catch (err) {\n // Don't mask errors - surface them clearly\n const message = err instanceof Error ? err.message : 'Unknown error';\n if (options.json) {\n console.log(JSON.stringify({ error: message }));\n } else {\n out.error(`Failed to check plan: ${message}`);\n }\n process.exit(1);\n }\n });\n\nprogram.parse();\n"]}
|