compound-agent 1.8.0 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +30 -1
- package/README.md +30 -47
- package/bin/ca +32 -0
- package/package.json +19 -78
- package/scripts/postinstall.cjs +221 -0
- package/dist/cli.d.ts +0 -1
- package/dist/cli.js +0 -13655
- package/dist/cli.js.map +0 -1
- package/dist/index.d.ts +0 -3730
- package/dist/index.js +0 -3251
- package/dist/index.js.map +0 -1
- package/docs/research/AgenticAiCodebaseGuide.md +0 -1206
- package/docs/research/BuildingACCompilerAnthropic.md +0 -116
- package/docs/research/HarnessEngineeringOpenAi.md +0 -220
- package/docs/research/code-review/systematic-review-methodology.md +0 -409
- package/docs/research/index.md +0 -76
- package/docs/research/learning-systems/knowledge-compounding-for-agents.md +0 -695
- package/docs/research/property-testing/property-based-testing-and-invariants.md +0 -742
- package/docs/research/scenario-testing/advanced-and-emerging.md +0 -470
- package/docs/research/scenario-testing/core-foundations.md +0 -507
- package/docs/research/scenario-testing/domain-specific-and-human-factors.md +0 -474
- package/docs/research/security/auth-patterns.md +0 -138
- package/docs/research/security/data-exposure.md +0 -185
- package/docs/research/security/dependency-security.md +0 -91
- package/docs/research/security/injection-patterns.md +0 -249
- package/docs/research/security/overview.md +0 -81
- package/docs/research/security/secrets-checklist.md +0 -92
- package/docs/research/security/secure-coding-failure.md +0 -297
- package/docs/research/software_architecture/01-science-of-decomposition.md +0 -615
- package/docs/research/software_architecture/02-architecture-under-uncertainty.md +0 -649
- package/docs/research/software_architecture/03-emergent-behavior-in-composed-systems.md +0 -644
- package/docs/research/spec_design/decision_theory_specifications_and_multi_criteria_tradeoffs.md +0 -0
- package/docs/research/spec_design/design_by_contract.md +0 -251
- package/docs/research/spec_design/domain_driven_design_strategic_modeling.md +0 -183
- package/docs/research/spec_design/formal_specification_methods.md +0 -161
- package/docs/research/spec_design/logic_and_proof_theory_under_the_curry_howard_correspondence.md +0 -250
- package/docs/research/spec_design/natural_language_formal_semantics_abuguity_in_specifications.md +0 -259
- package/docs/research/spec_design/requirements_engineering.md +0 -234
- package/docs/research/spec_design/systems_engineering_specifications_emergent_behavior_interface_contracts.md +0 -149
- package/docs/research/spec_design/what_is_this_about.md +0 -305
- package/docs/research/tdd/test-driven-development-methodology.md +0 -547
- package/docs/research/test-optimization-strategies.md +0 -401
- package/scripts/postinstall.mjs +0 -102
package/dist/index.js.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/memory/types.ts","../src/memory/storage/jsonl.ts","../src/memory/storage/sqlite/availability.ts","../src/memory/storage/sqlite/schema.ts","../src/memory/storage/sqlite/connection.ts","../src/memory/storage/sqlite/cache.ts","../src/memory/storage/sqlite/sync.ts","../src/memory/search/hybrid.ts","../src/memory/storage/sqlite/search.ts","../src/memory/storage/sqlite/index.ts","../src/memory/storage/compact.ts","../src/memory/storage/index.ts","../src/memory/embeddings/model.ts","../src/memory/embeddings/nomic.ts","../src/memory/embeddings/index.ts","../src/compound/clustering.ts","../src/compound/types.ts","../src/compound/io.ts","../src/compound/synthesis.ts","../src/compound/index.ts","../src/memory/search/vector.ts","../src/utils.ts","../src/memory/search/ranking.ts","../src/memory/search/prewarm.ts","../src/memory/search/index.ts","../src/memory/storage/sqlite-knowledge/schema.ts","../src/memory/storage/sqlite-knowledge/connection.ts","../src/memory/knowledge/types.ts","../src/memory/storage/sqlite-knowledge/cache.ts","../src/memory/storage/sqlite-knowledge/search.ts","../src/memory/storage/sqlite-knowledge/sync.ts","../src/memory/storage/sqlite-knowledge/index.ts","../src/memory/knowledge/chunking.ts","../src/memory/knowledge/embed-chunks.ts","../src/memory/knowledge/indexing.ts","../src/memory/knowledge/embed-lock.ts","../src/memory/knowledge/embed-status.ts","../src/memory/knowledge/embed-background.ts","../src/version.ts","../src/index.ts","../src/memory/capture/quality.ts","../src/memory/capture/triggers.ts","../src/memory/capture/integration.ts","../src/memory/retrieval/session.ts","../src/memory/retrieval/plan.ts","../src/memory/knowledge/index.ts","../src/memory/knowledge/search.ts","../src/cli-utils.ts","../src/setup/all.ts","../src/setup/display-utils.ts","../src/commands/phase-check.ts","../src/setup/primitives.ts","../src/setup/download-model.ts","../src/setup/init.ts","../src/commands/management-crud.ts","../src/memory/index.ts","../src/commands/shared.ts","../src/commands/doctor.ts","../src/commands/management-invalidation.ts","../src/commands/management-io.ts","../src/commands/management-maintenance.ts","../src/commands/management-prime.ts","../src/update-check.ts","../src/audit/checks/lessons.ts","../src/audit/checks/patterns.ts","../src/rules/types.ts","../src/rules/checks/glob-utils.ts","../src/rules/checks/file-pattern.ts","../src/rules/checks/file-size.ts","../src/rules/checks/script.ts","../src/rules/engine.ts","../src/audit/checks/rules.ts","../src/audit/engine.ts","../src/audit/types.ts","../src/commands/knowledge.ts","../src/commands/knowledge-index.ts","../src/commands/clean-lessons.ts","../src/commands/retrieval.ts","../src/lint/detect.ts"],"names":["require","createRequire","join","dirname","createHash","mtime","getLlama","LlamaLogLevel","init_search","init_types","z","readFile","mkdir","appendFile","DEFAULT_THRESHOLD","SCHEMA_SQL","init_schema","mkdirSync","unlinkSync","init_connection","init_cache","init_sync","overlapLines","extname","isModelUsable","embedChunks","existsSync","writeFileSync","readFileSync","openKnowledgeDb","indexDocs","DEFAULT_LIMIT","keywordResults","statSync","SeveritySchema","relative","execSync"],"mappings":";;;;;;;;;;;;;;;;;;;;;AA6QO,SAAS,UAAA,CAAW,SAAiB,IAAA,EAA+B;AACzE,EAAA,MAAM,MAAA,GAAS,aAAA,CAAc,IAAA,IAAQ,QAAQ,CAAA;AAC7C,EAAA,MAAM,IAAA,GAAO,WAAW,QAAQ,CAAA,CAAE,OAAO,OAAO,CAAA,CAAE,OAAO,KAAK,CAAA;AAC9D,EAAA,OAAO,GAAG,MAAM,CAAA,EAAG,KAAK,KAAA,CAAM,CAAA,EAAG,EAAE,CAAC,CAAA,CAAA;AACtC;AAjRA,IAoBa,cAQA,aAAA,CAAA,CAMA,aAAA,CAAA,CAMA,gBAOA,cAAA,CAAA,CAGA,qBAAA,CAAA,CAOA,kBAGA,oBAAA,CAAA,CAMP,UAAA,CAAA,CAgDO,gBAAA,CAAA,CAWA,kBAAA,CAAA,CAUA,mBAWA,oBAAA,CAAA,CAcA,gBAAA,CAAA,CAeA,oBAcA,YAAA,CAAA,CAMA,qBAAA,CAAA,CAcA,oBAUA,sBAAA,CAAA,CAmCP;AA9PN,IAAA,UAAA,GAAA,KAAA,CAAA;AAAA,EAAA,qBAAA,GAAA;AAoBO,IAAM,YAAA,GAAe,EAAE,IAAA,CAAK;AAAA,MACjC,iBAAA;AAAA,MACA,iBAAA;AAAA,MACA,cAAA;AAAA,MACA;AAAA,KACD,CAAA;AAGM,IAAM,aAAA,GAAgB,EAAE,MAAA,CAAO;AAAA,MACpC,IAAA,EAAM,EAAE,MAAA,EAAO;AAAA,MACf,MAAA,EAAQ,EAAE,MAAA;AAAO,KAClB,CAAA;AAGM,IAAM,aAAA,GAAgB,EAAE,MAAA,CAAO;AAAA,MACpC,GAAA,EAAK,EAAE,MAAA,EAAO;AAAA,MACd,IAAA,EAAM,EAAE,MAAA;AAAO,KAChB,CAAA;AAGM,IAAM,cAAA,GAAiB,EAAE,MAAA,CAAO;AAAA,MACrC,IAAA,EAAM,CAAA,CAAE,MAAA,EAAO,CAAE,IAAI,CAAC,CAAA;AAAA;AAAA,MACtB,IAAA,EAAM,EAAE,MAAA,EAAO,CAAE,KAAI,CAAE,QAAA,GAAW,QAAA,EAAS;AAAA;AAAA,MAC3C,MAAA,EAAQ,CAAA,CAAE,MAAA,EAAO,CAAE,QAAA;AAAS;AAAA,KAC7B,CAAA;AAGM,IAAM,iBAAiB,CAAA,CAAE,IAAA,CAAK,CAAC,MAAA,EAAQ,QAAA,EAAU,KAAK,CAAC,CAAA;AAGvD,IAAM,qBAAA,GAAwB,EAAE,KAAA,CAAM;AAAA,MAC3C,CAAA,CAAE,QAAQ,CAAC,CAAA;AAAA;AAAA,MACX,CAAA,CAAE,QAAQ,CAAC,CAAA;AAAA;AAAA,MACX,CAAA,CAAE,QAAQ,CAAC;AAAA;AAAA,KACZ,CAAA;AAGM,IAAM,mBAAmB,CAAA,CAAE,IAAA,CAAK,CAAC,OAAA,EAAS,MAAM,CAAC,CAAA;AAGjD,IAAM,oBAAA,GAAuB,EAAE,IAAA,CAAK,CAAC,UAAU,UAAA,EAAY,SAAA,EAAW,YAAY,CAAC,CAAA;AAM1F,IAAM,UAAA,GAAa;AAAA;AAAA,MAEjB,EAAA,EAAI,EAAE,MAAA,EAAO;AAAA,MACb,OAAA,EAAS,EAAE,MAAA,EAAO;AAAA,MAClB,OAAA,EAAS,EAAE,MAAA,EAAO;AAAA;AAAA,MAGlB,IAAA,EAAM,CAAA,CAAE,KAAA,CAAM,CAAA,CAAE,QAAQ,CAAA;AAAA,MACxB,MAAA,EAAQ,YAAA;AAAA,MACR,OAAA,EAAS,aAAA;AAAA,MACT,OAAA,EAAS,EAAE,MAAA,EAAO;AAAA;AAAA,MAClB,SAAA,EAAW,EAAE,OAAA,EAAQ;AAAA;AAAA,MAGrB,UAAA,EAAY,CAAA,CAAE,KAAA,CAAM,CAAA,CAAE,QAAQ,CAAA;AAAA,MAC9B,OAAA,EAAS,CAAA,CAAE,KAAA,CAAM,CAAA,CAAE,QAAQ,CAAA;AAAA;AAAA,MAG3B,QAAA,EAAU,CAAA,CAAE,MAAA,EAAO,CAAE,QAAA,EAAS;AAAA,MAC9B,QAAA,EAAU,eAAe,QAAA,EAAS;AAAA;AAAA,MAGlC,OAAA,EAAS,CAAA,CAAE,OAAA,EAAQ,CAAE,QAAA,EAAS;AAAA,MAC9B,SAAA,EAAW,CAAA,CAAE,MAAA,EAAO,CAAE,QAAA,EAAS;AAAA,MAC/B,cAAA,EAAgB,CAAA,CAAE,MAAA,EAAO,CAAE,QAAA,EAAS;AAAA;AAAA,MAGpC,QAAA,EAAU,eAAe,QAAA,EAAS;AAAA;AAAA,MAGlC,eAAA,EAAiB,sBAAsB,QAAA,EAAS;AAAA,MAChD,WAAA,EAAa,CAAA,CAAE,MAAA,EAAO,CAAE,QAAA,EAAS;AAAA,MACjC,aAAA,EAAe,CAAA,CAAE,MAAA,EAAO,CAAE,QAAA,EAAS;AAAA;AAAA,MAGnC,aAAA,EAAe,CAAA,CAAE,MAAA,EAAO,CAAE,QAAA,EAAS;AAAA,MACnC,kBAAA,EAAoB,CAAA,CAAE,MAAA,EAAO,CAAE,QAAA;AAAS,KAC1C;AAWO,IAAM,gBAAA,GAAmB,EAAE,MAAA,CAAO;AAAA,MACvC,GAAG,UAAA;AAAA,MACH,IAAA,EAAM,CAAA,CAAE,OAAA,CAAQ,QAAQ,CAAA;AAAA,MACxB,OAAA,EAAS,cAAc,QAAA;AAAS,KACjC,CAAA;AAOM,IAAM,kBAAA,GAAqB,EAAE,MAAA,CAAO;AAAA,MACzC,GAAG,UAAA;AAAA,MACH,IAAA,EAAM,CAAA,CAAE,OAAA,CAAQ,UAAU,CAAA;AAAA,MAC1B,OAAA,EAAS,cAAc,QAAA;AAAS,KACjC,CAAA;AAMM,IAAM,iBAAA,GAAoB,EAAE,MAAA,CAAO;AAAA,MACxC,GAAG,UAAA;AAAA,MACH,IAAA,EAAM,CAAA,CAAE,OAAA,CAAQ,SAAS,CAAA;AAAA,MACzB,OAAA,EAAS;AAAA,KACV,CAAA;AAOM,IAAM,oBAAA,GAAuB,EAAE,MAAA,CAAO;AAAA,MAC3C,GAAG,UAAA;AAAA,MACH,IAAA,EAAM,CAAA,CAAE,OAAA,CAAQ,YAAY,CAAA;AAAA,MAC5B,OAAA,EAAS,cAAc,QAAA;AAAS,KACjC,CAAA;AAUM,IAAM,gBAAA,GAAmB,CAAA,CAAE,kBAAA,CAAmB,MAAA,EAAQ;AAAA,MAC3D,gBAAA;AAAA,MACA,kBAAA;AAAA,MACA,iBAAA;AAAA,MACA;AAAA,KACD,CAAA;AAUM,IAAM,kBAAA,GAAqB,EAAE,MAAA,CAAO;AAAA,MACzC,GAAG,UAAA;AAAA,MACH,IAAA,EAAM,gBAAA;AAAA,MACN,OAAA,EAAS,cAAc,QAAA;AAAS,KACjC,CAAA;AAUM,IAAM,YAAA,GAAe,gBAAA;AAMrB,IAAM,qBAAA,GAAwB,EAAE,MAAA,CAAO;AAAA,MAC5C,EAAA,EAAI,EAAE,MAAA,EAAO;AAAA,MACb,OAAA,EAAS,CAAA,CAAE,OAAA,CAAQ,IAAI,CAAA;AAAA,MACvB,SAAA,EAAW,EAAE,MAAA;AAAO;AAAA,KACrB,CAAA;AAUM,IAAM,kBAAA,GAAqB,EAAE,KAAA,CAAM;AAAA,MACxC,gBAAA;AAAA,MACA,kBAAA;AAAA,MACA;AAAA,KACD,CAAA;AAMM,IAAM,sBAAA,GAAyB,kBAAA;AAmCtC,IAAM,aAAA,GAAgD;AAAA,MACpD,MAAA,EAAQ,GAAA;AAAA,MACR,QAAA,EAAU,GAAA;AAAA,MACV,OAAA,EAAS,GAAA;AAAA,MACT,UAAA,EAAY;AAAA,KACd;AAAA,EAAA;AAAA,CAAA,CAAA;ACvLA,eAAsB,gBAAA,CAAiB,UAAkB,IAAA,EAAiC;AACxF,EAAA,MAAM,QAAA,GAAW,IAAA,CAAK,QAAA,EAAU,YAAY,CAAA;AAC5C,EAAA,MAAM,MAAM,OAAA,CAAQ,QAAQ,GAAG,EAAE,SAAA,EAAW,MAAM,CAAA;AAElD,EAAA,MAAM,IAAA,GAAO,IAAA,CAAK,SAAA,CAAU,IAAI,CAAA,GAAI,IAAA;AACpC,EAAA,MAAM,UAAA,CAAW,QAAA,EAAU,IAAA,EAAM,OAAO,CAAA;AAC1C;AASA,eAAsB,YAAA,CAAa,UAAkB,MAAA,EAA+B;AAClF,EAAA,OAAO,gBAAA,CAAiB,UAAU,MAAM,CAAA;AAC1C;AAaA,SAAS,aAAA,CACP,IAAA,EACA,UAAA,EACA,MAAA,EACA,YAAA,EACqB;AAErB,EAAA,IAAI,MAAA;AACJ,EAAA,IAAI;AACF,IAAA,MAAA,GAAS,IAAA,CAAK,MAAM,IAAI,CAAA;AAAA,EAC1B,SAAS,GAAA,EAAK;AACZ,IAAA,MAAM,UAAA,GAAyB;AAAA,MAC7B,IAAA,EAAM,UAAA;AAAA,MACN,OAAA,EAAS,CAAA,cAAA,EAAkB,GAAA,CAAc,OAAO,CAAA,CAAA;AAAA,MAChD,KAAA,EAAO;AAAA,KACT;AACA,IAAA,IAAI,MAAA,EAAQ;AACV,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,oBAAA,EAAuB,UAAU,CAAA,EAAA,EAAK,UAAA,CAAW,OAAO,CAAA,CAAE,CAAA;AAAA,IAC5E;AACA,IAAA,YAAA,GAAe,UAAU,CAAA;AACzB,IAAA,OAAO,IAAA;AAAA,EACT;AAGA,EAAA,MAAM,MAAA,GAAS,sBAAA,CAAuB,SAAA,CAAU,MAAM,CAAA;AACtD,EAAA,IAAI,CAAC,OAAO,OAAA,EAAS;AACnB,IAAA,MAAM,UAAA,GAAyB;AAAA,MAC7B,IAAA,EAAM,UAAA;AAAA,MACN,OAAA,EAAS,CAAA,0BAAA,EAA6B,MAAA,CAAO,KAAA,CAAM,OAAO,CAAA,CAAA;AAAA,MAC1D,OAAO,MAAA,CAAO;AAAA,KAChB;AACA,IAAA,IAAI,MAAA,EAAQ;AACV,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,oBAAA,EAAuB,UAAU,CAAA,EAAA,EAAK,UAAA,CAAW,OAAO,CAAA,CAAE,CAAA;AAAA,IAC5E;AACA,IAAA,YAAA,GAAe,UAAU,CAAA;AACzB,IAAA,OAAO,IAAA;AAAA,EACT;AAEA,EAAA,OAAO,MAAA,CAAO,IAAA;AAChB;AAOA,SAAS,aAAa,MAAA,EAAyC;AAE7D,EAAA,IAAI,MAAA,CAAO,OAAA,KAAY,IAAA,EAAM,OAAO,IAAA;AAGpC,EAAA,IAAI,MAAA,CAAO,IAAA,KAAS,OAAA,IAAW,MAAA,CAAO,SAAS,MAAA,EAAQ;AACrD,IAAA,OAAO,EAAE,GAAG,MAAA,EAAQ,IAAA,EAAM,QAAA,EAAS;AAAA,EACrC;AAGA,EAAA,OAAO,MAAA;AACT;AAiBA,eAAsB,eAAA,CACpB,QAAA,EACA,OAAA,GAA8B,EAAC,EACC;AAChC,EAAA,MAAM,EAAE,MAAA,GAAS,KAAA,EAAO,YAAA,EAAa,GAAI,OAAA;AACzC,EAAA,MAAM,QAAA,GAAW,IAAA,CAAK,QAAA,EAAU,YAAY,CAAA;AAE5C,EAAA,IAAI,OAAA;AACJ,EAAA,IAAI;AACF,IAAA,OAAA,GAAU,MAAM,QAAA,CAAS,QAAA,EAAU,OAAO,CAAA;AAAA,EAC5C,SAAS,GAAA,EAAK;AACZ,IAAA,IAAK,GAAA,CAA8B,SAAS,QAAA,EAAU;AACpD,MAAA,OAAO,EAAE,OAAO,EAAC,EAAG,4BAAY,IAAI,GAAA,EAAY,EAAG,YAAA,EAAc,CAAA,EAAE;AAAA,IACrE;AACA,IAAA,MAAM,GAAA;AAAA,EACR;AAEA,EAAA,MAAM,KAAA,uBAAY,GAAA,EAAwB;AAC1C,EAAA,MAAM,UAAA,uBAAiB,GAAA,EAAY;AACnC,EAAA,IAAI,YAAA,GAAe,CAAA;AAEnB,EAAA,MAAM,KAAA,GAAQ,OAAA,CAAQ,KAAA,CAAM,IAAI,CAAA;AAChC,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,KAAA,CAAM,QAAQ,CAAA,EAAA,EAAK;AACrC,IAAA,MAAM,OAAA,GAAU,KAAA,CAAM,CAAC,CAAA,CAAG,IAAA,EAAK;AAC/B,IAAA,IAAI,CAAC,OAAA,EAAS;AAEd,IAAA,MAAM,SAAS,aAAA,CAAc,OAAA,EAAS,CAAA,GAAI,CAAA,EAAG,QAAQ,YAAY,CAAA;AACjE,IAAA,IAAI,CAAC,MAAA,EAAQ;AACX,MAAA,YAAA,EAAA;AACA,MAAA;AAAA,IACF;AAGA,IAAA,IAAI,MAAA,CAAO,YAAY,IAAA,EAAM;AAC3B,MAAA,KAAA,CAAM,MAAA,CAAO,OAAO,EAAE,CAAA;AACtB,MAAA,UAAA,CAAW,GAAA,CAAI,OAAO,EAAE,CAAA;AAAA,IAC1B,CAAA,MAAO;AACL,MAAA,MAAM,IAAA,GAAO,aAAa,MAAM,CAAA;AAChC,MAAA,IAAI,IAAA,EAAM;AACR,QAAA,KAAA,CAAM,GAAA,CAAI,MAAA,CAAO,EAAA,EAAI,IAAI,CAAA;AAAA,MAC3B;AAAA,IACF;AAAA,EACF;AAEA,EAAA,OAAO,EAAE,OAAO,KAAA,CAAM,IAAA,CAAK,MAAM,MAAA,EAAQ,CAAA,EAAG,UAAA,EAAY,YAAA,EAAa;AACvE;AAUA,eAAsB,WAAA,CACpB,QAAA,EACA,OAAA,GAA8B,EAAC,EACH;AAC5B,EAAA,MAAM,MAAA,GAAS,MAAM,eAAA,CAAgB,QAAA,EAAU,OAAO,CAAA;AAGtD,EAAA,MAAM,OAAA,GAAU,OAAO,KAAA,CAAM,MAAA,CAAO,CAAC,IAAA,KAAyB,IAAA,CAAK,SAAS,QAAQ,CAAA;AAEpF,EAAA,OAAO,EAAE,OAAA,EAAS,YAAA,EAAc,MAAA,CAAO,YAAA,EAAa;AACtD;AArPA,IA6Ba;AA7Bb,IAAA,UAAA,GAAA,KAAA,CAAA;AAAA,EAAA,6BAAA,GAAA;AAqBA,IAAA,UAAA,EAAA;AAQO,IAAM,YAAA,GAAe,6BAAA;AAAA,EAAA;AAAA,CAAA,CAAA;ACRrB,SAAS,qBAAA,GAA8B;AAC5C,EAAA,IAAI,OAAA,EAAS;AAEb,EAAA,IAAI;AACF,IAAA,MAAM,MAAA,GAASA,SAAQ,gBAAgB,CAAA;AACvC,IAAA,MAAM,WAAA,GAAc,OAAO,OAAA,IAAW,MAAA;AACtC,IAAA,MAAM,MAAA,GAAS,IAAI,WAAA,CAAY,UAAU,CAAA;AACzC,IAAA,MAAA,CAAO,KAAA,EAAM;AACb,IAAA,mBAAA,GAAsB,WAAA;AACtB,IAAA,OAAA,GAAU,IAAA;AAAA,EACZ,SAAS,KAAA,EAAO;AACd,IAAA,MAAM,IAAI,KAAA;AAAA,MACR,gVAAA;AAAA,MAOA,EAAE,KAAA;AAAM,KACV;AAAA,EACF;AACF;AAMO,SAAS,sBAAA,GAA6D;AAC3E,EAAA,qBAAA,EAAsB;AACtB,EAAA,OAAO,mBAAA;AACT;AApDA,IAWMA,UAGF,OAAA,EACA,mBAAA;AAfJ,IAAA,iBAAA,GAAA,KAAA,CAAA;AAAA,EAAA,2CAAA,GAAA;AAWA,IAAMA,QAAAA,GAAUC,aAAAA,CAAc,MAAA,CAAA,IAAA,CAAY,GAAG,CAAA;AAG7C,IAAI,OAAA,GAAU,KAAA;AACd,IAAI,mBAAA,GAAmE,IAAA;AAAA,EAAA;AAAA,CAAA,CAAA;;;ACwEhE,SAAS,aAAa,QAAA,EAA8B;AACzD,EAAA,QAAA,CAAS,KAAK,UAAU,CAAA;AACxB,EAAA,MAAM,UAAU,QAAA,CAAS,MAAA,CAAO,gBAAgB,EAAE,MAAA,EAAQ,MAAM,CAAA;AAChE,EAAA,IAAI,YAAY,cAAA,EAAgB;AAC9B,IAAA,QAAA,CAAS,MAAA,CAAO,CAAA,eAAA,EAAkB,cAAc,CAAA,CAAE,CAAA;AAAA,EACpD;AACF;AA7FA,IAca,cAAA,EAGP,UAAA;AAjBN,IAAA,WAAA,GAAA,KAAA,CAAA;AAAA,EAAA,qCAAA,GAAA;AAcO,IAAM,cAAA,GAAiB,CAAA;AAG9B,IAAM,UAAA,GAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA,CAAA;AAAA,EAAA;AAAA,CAAA,CAAA;ACMnB,SAAS,mBAAmB,QAAA,EAAiC;AAC3D,EAAA,MAAM,MAAM,QAAA,CAAS,MAAA,CAAO,gBAAgB,EAAE,MAAA,EAAQ,MAAM,CAAA;AAC5D,EAAA,OAAO,GAAA,KAAQ,cAAA;AACjB;AAUO,SAAS,MAAA,CAAO,QAAA,EAAkB,OAAA,GAAqB,EAAC,EAAiB;AAC9E,EAAA,MAAM,EAAE,QAAA,GAAW,KAAA,EAAM,GAAI,OAAA;AAG7B,EAAA,MAAM,MAAM,QAAA,GAAW,CAAA,QAAA,EAAW,QAAQ,CAAA,CAAA,GAAKC,IAAAA,CAAK,UAAU,OAAO,CAAA;AAErE,EAAA,MAAM,MAAA,GAAS,KAAA,CAAM,GAAA,CAAI,GAAG,CAAA;AAC5B,EAAA,IAAI,MAAA,EAAQ;AACV,IAAA,OAAO,MAAA;AAAA,EACT;AAEA,EAAA,MAAM,WAAW,sBAAA,EAAuB;AACxC,EAAA,IAAI,QAAA;AAEJ,EAAA,IAAI,QAAA,EAAU;AACZ,IAAA,QAAA,GAAW,IAAI,SAAS,UAAU,CAAA;AAAA,EACpC,CAAA,MAAO;AACL,IAAA,MAAM,GAAA,GAAMC,QAAQ,GAAG,CAAA;AACvB,IAAA,SAAA,CAAU,GAAA,EAAK,EAAE,SAAA,EAAW,IAAA,EAAM,CAAA;AAClC,IAAA,QAAA,GAAW,IAAI,SAAS,GAAG,CAAA;AAE3B,IAAA,IAAI,CAAC,kBAAA,CAAmB,QAAQ,CAAA,EAAG;AACjC,MAAA,QAAA,CAAS,KAAA,EAAM;AACf,MAAA,IAAI;AAAE,QAAA,UAAA,CAAW,GAAG,CAAA;AAAA,MAAG,CAAA,CAAA,MAAQ;AAAA,MAAuB;AACtD,MAAA,QAAA,GAAW,IAAI,SAAS,GAAG,CAAA;AAAA,IAC7B;AAEA,IAAA,QAAA,CAAS,OAAO,oBAAoB,CAAA;AAAA,EACtC;AAEA,EAAA,YAAA,CAAa,QAAQ,CAAA;AACrB,EAAA,KAAA,CAAM,GAAA,CAAI,KAAK,QAAQ,CAAA;AACvB,EAAA,OAAO,QAAA;AACT;AAKO,SAAS,OAAA,GAAgB;AAC9B,EAAA,KAAA,MAAW,QAAA,IAAY,KAAA,CAAM,MAAA,EAAO,EAAG;AACrC,IAAA,QAAA,CAAS,KAAA,EAAM;AAAA,EACjB;AACA,EAAA,KAAA,CAAM,KAAA,EAAM;AACd;AA/EA,IAaa,OAAA,CAAA,CAGP;AAhBN,IAAA,eAAA,GAAA,KAAA,CAAA;AAAA,EAAA,yCAAA,GAAA;AASA,IAAA,iBAAA,EAAA;AACA,IAAA,WAAA,EAAA;AAGO,IAAM,OAAA,GAAU,+BAAA;AAGvB,IAAM,KAAA,uBAAY,GAAA,EAA0B;AAAA,EAAA;AAAA,CAAA,CAAA;ACCrC,SAAS,WAAA,CAAY,SAAiB,OAAA,EAAyB;AACpE,EAAA,OAAOC,UAAAA,CAAW,QAAQ,CAAA,CAAE,MAAA,CAAO,CAAA,EAAG,OAAO,CAAA,CAAA,EAAI,OAAO,CAAA,CAAE,CAAA,CAAE,MAAA,CAAO,KAAK,CAAA;AAC1E;AAiDO,SAAS,kBAAA,CACd,QAAA,EACA,QAAA,EACA,SAAA,EACA,IAAA,EACM;AACN,EAAA,MAAM,QAAA,GAAW,OAAO,QAAQ,CAAA;AAEhC,EAAA,MAAM,UAAU,SAAA,YAAqB,YAAA,GAAe,SAAA,GAAY,IAAI,aAAa,SAAS,CAAA;AAC1F,EAAA,MAAM,MAAA,GAAS,OAAO,IAAA,CAAK,OAAA,CAAQ,QAAQ,OAAA,CAAQ,UAAA,EAAY,QAAQ,UAAU,CAAA;AAEjF,EAAA,QAAA,CACG,QAAQ,iEAAiE,CAAA,CACzE,GAAA,CAAI,MAAA,EAAQ,MAAM,QAAQ,CAAA;AAC/B;AAcO,SAAS,wBAAwB,QAAA,EAAqD;AAC3F,EAAA,MAAM,QAAA,GAAW,OAAO,QAAQ,CAAA;AAChC,EAAA,MAAM,IAAA,GAAO,QAAA,CACV,OAAA,CAAQ,6EAA6E,EACrF,GAAA,EAAI;AAEP,EAAA,MAAM,MAAA,uBAAa,GAAA,EAAkC;AACrD,EAAA,KAAA,MAAW,OAAO,IAAA,EAAM;AACtB,IAAA,IAAI,CAAC,IAAI,YAAA,EAAc;AACvB,IAAA,MAAM,UAAU,IAAI,YAAA;AAAA,MAClB,IAAI,SAAA,CAAU,MAAA;AAAA,MACd,IAAI,SAAA,CAAU,UAAA;AAAA,MACd,GAAA,CAAI,UAAU,UAAA,GAAa;AAAA,KAC7B;AACA,IAAA,MAAA,CAAO,GAAA,CAAI,IAAI,EAAA,EAAI,EAAE,QAAQ,OAAA,EAAS,IAAA,EAAM,GAAA,CAAI,YAAA,EAAc,CAAA;AAAA,EAChE;AACA,EAAA,OAAO,MAAA;AACT;AAMO,SAAS,yBAAA,CACd,QAAA,EACA,QAAA,EACA,YAAA,EACqB;AACrB,EAAA,MAAM,QAAA,GAAW,OAAO,QAAQ,CAAA;AAEhC,EAAA,MAAM,MAAM,QAAA,CACT,OAAA,CAAQ,0EAA0E,CAAA,CAClF,IAAI,QAAQ,CAAA;AAEf,EAAA,IAAI,CAAC,GAAA,IAAO,CAAC,IAAI,iBAAA,IAAqB,CAAC,IAAI,oBAAA,EAAsB;AAC/D,IAAA,OAAO,IAAA;AAAA,EACT;AAEA,EAAA,IAAI,YAAA,IAAgB,GAAA,CAAI,oBAAA,KAAyB,YAAA,EAAc;AAC7D,IAAA,OAAO,IAAA;AAAA,EACT;AAEA,EAAA,OAAO,IAAI,YAAA;AAAA,IACT,IAAI,iBAAA,CAAkB,MAAA;AAAA,IACtB,IAAI,iBAAA,CAAkB,UAAA;AAAA,IACtB,GAAA,CAAI,kBAAkB,UAAA,GAAa;AAAA,GACrC;AACF;AAMO,SAAS,yBAAA,CACd,QAAA,EACA,QAAA,EACA,SAAA,EACA,IAAA,EACM;AACN,EAAA,MAAM,QAAA,GAAW,OAAO,QAAQ,CAAA;AAEhC,EAAA,MAAM,UAAU,SAAA,YAAqB,YAAA,GAAe,SAAA,GAAY,IAAI,aAAa,SAAS,CAAA;AAC1F,EAAA,MAAM,MAAA,GAAS,OAAO,IAAA,CAAK,OAAA,CAAQ,QAAQ,OAAA,CAAQ,UAAA,EAAY,QAAQ,UAAU,CAAA;AAEjF,EAAA,QAAA,CACG,QAAQ,iFAAiF,CAAA,CACzF,GAAA,CAAI,MAAA,EAAQ,MAAM,QAAQ,CAAA;AAC/B;AAQO,SAAS,wBAAwB,QAAA,EAA0D;AAChG,EAAA,MAAM,KAAA,uBAAY,GAAA,EAAiC;AACnD,EAAA,MAAM,IAAA,GAAO,QAAA,CACV,OAAA,CAAQ,uJAAuJ,EAC/J,GAAA,EAAI;AAEP,EAAA,KAAA,MAAW,OAAO,IAAA,EAAM;AACtB,IAAA,IAAI,GAAA,CAAI,SAAA,IAAa,GAAA,CAAI,YAAA,EAAc;AACrC,MAAA,KAAA,CAAM,GAAA,CAAI,IAAI,EAAA,EAAI;AAAA,QAChB,WAAW,GAAA,CAAI,SAAA;AAAA,QACf,aAAa,GAAA,CAAI,YAAA;AAAA,QACjB,kBAAkB,GAAA,CAAI,iBAAA;AAAA,QACtB,oBAAoB,GAAA,CAAI;AAAA,OACzB,CAAA;AAAA,IACH,CAAA,MAAA,IAAW,GAAA,CAAI,iBAAA,IAAqB,GAAA,CAAI,oBAAA,EAAsB;AAE5D,MAAA,KAAA,CAAM,GAAA,CAAI,IAAI,EAAA,EAAI;AAAA,QAChB,WAAW,GAAA,CAAI,iBAAA;AAAA;AAAA,QACf,WAAA,EAAa,EAAA;AAAA,QACb,kBAAkB,GAAA,CAAI,iBAAA;AAAA,QACtB,oBAAoB,GAAA,CAAI;AAAA,OACzB,CAAA;AAAA,IACH;AAAA,EACF;AACA,EAAA,OAAO,KAAA;AACT;AApMA,IAAA,UAAA,GAAA,KAAA,CAAA;AAAA,EAAA,oCAAA,GAAA;AAQA,IAAA,eAAA,EAAA;AAAA,EAAA;AAAA,CAAA,CAAA;ACkBA,SAAS,cAAc,QAAA,EAAiC;AACtD,EAAA,MAAM,SAAA,GAAYF,IAAAA,CAAK,QAAA,EAAU,YAAY,CAAA;AAC7C,EAAA,IAAI;AACF,IAAA,MAAM,IAAA,GAAO,SAAS,SAAS,CAAA;AAC/B,IAAA,OAAO,IAAA,CAAK,OAAA;AAAA,EACd,CAAA,CAAA,MAAQ;AACN,IAAA,OAAO,IAAA;AAAA,EACT;AACF;AAOA,SAAS,iBAAiB,QAAA,EAAuC;AAC/D,EAAA,MAAM,MAAM,QAAA,CACT,OAAA,CAAQ,0CAA0C,CAAA,CAClD,IAAI,iBAAiB,CAAA;AACxB,EAAA,OAAO,GAAA,GAAM,UAAA,CAAW,GAAA,CAAI,KAAK,CAAA,GAAI,IAAA;AACvC;AAOA,SAAS,gBAAA,CAAiB,UAAwB,KAAA,EAAqB;AACrE,EAAA,QAAA,CACG,QAAQ,4DAA4D,CAAA,CACpE,IAAI,iBAAA,EAAmB,KAAA,CAAM,UAAU,CAAA;AAC5C;AAOA,eAAsB,aAAa,QAAA,EAAiC;AAClE,EAAA,MAAM,QAAA,GAAW,OAAO,QAAQ,CAAA;AAEhC,EAAA,MAAM,EAAE,KAAA,EAAM,GAAI,MAAM,gBAAgB,QAAQ,CAAA;AAChD,EAAA,MAAM,gBAAA,GAAmB,wBAAwB,QAAQ,CAAA;AACzD,EAAA,QAAA,CAAS,KAAK,qBAAqB,CAAA;AAEnC,EAAA,IAAI,KAAA,CAAM,WAAW,CAAA,EAAG;AACtB,IAAA,MAAMG,MAAAA,GAAQ,cAAc,QAAQ,CAAA;AACpC,IAAA,IAAIA,WAAU,IAAA,EAAM;AAClB,MAAA,gBAAA,CAAiB,UAAUA,MAAK,CAAA;AAAA,IAClC;AACA,IAAA;AAAA,EACF;AAEA,EAAA,MAAM,MAAA,GAAS,QAAA,CAAS,OAAA,CAAQ,iBAAiB,CAAA;AACjD,EAAA,MAAM,UAAA,GAAa,QAAA,CAAS,WAAA,CAAY,CAAC,WAAA,KAA8B;AACrE,IAAA,KAAA,MAAW,QAAQ,WAAA,EAAa;AAC9B,MAAA,MAAM,OAAA,GAAU,WAAA,CAAY,IAAA,CAAK,OAAA,EAAS,KAAK,OAAO,CAAA;AACtD,MAAA,MAAM,WAAA,GAAc,WAAA,CAAY,IAAA,CAAK,OAAA,EAAS,EAAE,CAAA;AAChD,MAAA,MAAM,MAAA,GAAS,gBAAA,CAAiB,GAAA,CAAI,IAAA,CAAK,EAAE,CAAA;AAC3C,MAAA,MAAM,aAAA,GAAgB,MAAA,IAAU,MAAA,CAAO,WAAA,KAAgB,OAAA;AACvD,MAAA,MAAM,oBAAA,GAAuB,MAAA,IAAU,MAAA,CAAO,kBAAA,KAAuB,WAAA;AAErE,MAAA,MAAA,CAAO,GAAA,CAAI;AAAA,QACT,IAAI,IAAA,CAAK,EAAA;AAAA,QACT,MAAM,IAAA,CAAK,IAAA;AAAA,QACX,SAAS,IAAA,CAAK,OAAA;AAAA,QACd,SAAS,IAAA,CAAK,OAAA;AAAA,QACd,QAAA,EAAU,KAAK,QAAA,IAAY,IAAA;AAAA,QAC3B,QAAA,EAAU,KAAK,QAAA,IAAY,IAAA;AAAA,QAC3B,IAAA,EAAM,IAAA,CAAK,IAAA,CAAK,IAAA,CAAK,GAAG,CAAA;AAAA,QACxB,QAAQ,IAAA,CAAK,MAAA;AAAA,QACb,OAAA,EAAS,IAAA,CAAK,SAAA,CAAU,IAAA,CAAK,OAAO,CAAA;AAAA,QACpC,UAAA,EAAY,IAAA,CAAK,SAAA,CAAU,IAAA,CAAK,UAAU,CAAA;AAAA,QAC1C,OAAA,EAAS,IAAA,CAAK,SAAA,CAAU,IAAA,CAAK,OAAO,CAAA;AAAA,QACpC,SAAS,IAAA,CAAK,OAAA;AAAA,QACd,SAAA,EAAW,IAAA,CAAK,SAAA,GAAY,CAAA,GAAI,CAAA;AAAA,QAChC,OAAA,EAAS,IAAA,CAAK,OAAA,GAAU,CAAA,GAAI,CAAA;AAAA,QAC5B,eAAA,EAAiB,KAAK,cAAA,IAAkB,CAAA;AAAA,QACxC,cAAA,EAAgB,KAAK,aAAA,IAAiB,IAAA;AAAA,QACtC,SAAA,EAAW,aAAA,GAAgB,MAAA,CAAO,SAAA,GAAY,IAAA;AAAA,QAC9C,YAAA,EAAc,aAAA,GAAgB,MAAA,CAAO,WAAA,GAAc,IAAA;AAAA,QACnD,iBAAA,EAAmB,oBAAA,GAAuB,MAAA,CAAO,gBAAA,GAAmB,IAAA;AAAA,QACpE,oBAAA,EAAsB,oBAAA,GAAuB,MAAA,CAAO,kBAAA,GAAqB,IAAA;AAAA,QACzE,cAAA,EAAgB,KAAK,aAAA,IAAiB,IAAA;AAAA,QACtC,mBAAA,EAAqB,KAAK,kBAAA,IAAsB,IAAA;AAAA,QAChD,aAAA,EAAe,IAAA,CAAK,QAAA,EAAU,IAAA,IAAQ,IAAA;AAAA,QACtC,aAAA,EAAe,IAAA,CAAK,QAAA,EAAU,IAAA,IAAQ,IAAA;AAAA,QACtC,eAAA,EAAiB,IAAA,CAAK,QAAA,EAAU,MAAA,IAAU,IAAA;AAAA,QAC1C,gBAAA,EAAkB,KAAK,eAAA,IAAmB,CAAA;AAAA,QAC1C,YAAA,EAAc,KAAK,WAAA,IAAe,IAAA;AAAA,QAClC,WAAA,EAAa,IAAA,CAAK,OAAA,EAAS,GAAA,IAAO,IAAA;AAAA,QAClC,YAAA,EAAc,IAAA,CAAK,OAAA,EAAS,IAAA,IAAQ;AAAA,OACrC,CAAA;AAAA,IACH;AAAA,EACF,CAAC,CAAA;AAED,EAAA,UAAA,CAAW,KAAK,CAAA;AAEhB,EAAA,MAAM,KAAA,GAAQ,cAAc,QAAQ,CAAA;AACpC,EAAA,IAAI,UAAU,IAAA,EAAM;AAClB,IAAA,gBAAA,CAAiB,UAAU,KAAK,CAAA;AAAA,EAClC;AACF;AAQA,eAAsB,YAAA,CACpB,QAAA,EACA,OAAA,GAAuB,EAAC,EACN;AAClB,EAAA,MAAM,EAAE,KAAA,GAAQ,KAAA,EAAM,GAAI,OAAA;AAC1B,EAAA,MAAM,UAAA,GAAa,cAAc,QAAQ,CAAA;AACzC,EAAA,IAAI,UAAA,KAAe,IAAA,IAAQ,CAAC,KAAA,EAAO;AACjC,IAAA,OAAO,KAAA;AAAA,EACT;AAEA,EAAA,MAAM,QAAA,GAAW,OAAO,QAAQ,CAAA;AAEhC,EAAA,MAAM,aAAA,GAAgB,iBAAiB,QAAQ,CAAA;AAC/C,EAAA,MAAM,eAAe,KAAA,IAAS,aAAA,KAAkB,IAAA,IAAS,UAAA,KAAe,QAAQ,UAAA,GAAa,aAAA;AAE7F,EAAA,IAAI,YAAA,EAAc;AAChB,IAAA,MAAM,aAAa,QAAQ,CAAA;AAC3B,IAAA,OAAO,IAAA;AAAA,EACT;AAEA,EAAA,OAAO,KAAA;AACT;AA7JA,IAgBM,iBAAA;AAhBN,IAAA,SAAA,GAAA,KAAA,CAAA;AAAA,EAAA,mCAAA,GAAA;AASA,IAAA,UAAA,EAAA;AAGA,IAAA,eAAA,EAAA;AACA,IAAA,UAAA,EAAA;AAGA,IAAM,iBAAA,GAAoB;AAAA;AAAA;AAAA,CAAA;AAAA,EAAA;AAAA,CAAA,CAAA;;;AC+BnB,SAAS,kBAAkB,IAAA,EAAsB;AACtD,EAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,IAAI,GAAG,OAAO,CAAA;AACnC,EAAA,MAAM,GAAA,GAAM,IAAA,CAAK,GAAA,CAAI,IAAI,CAAA;AACzB,EAAA,OAAO,OAAO,CAAA,GAAI,GAAA,CAAA;AACpB;AAYO,SAAS,iBAAA,CACd,aAAA,EACA,cAAA,EACA,KAAA,EACA,OAAA,EACwB;AACxB,EAAA,IAAI,cAAc,MAAA,KAAW,CAAA,IAAK,eAAe,MAAA,KAAW,CAAA,SAAU,EAAC;AAEvE,EAAA,MAAM,OAAA,GAAU,SAAS,YAAA,IAAgB,qBAAA;AACzC,EAAA,MAAM,OAAA,GAAU,SAAS,UAAA,IAAc,mBAAA;AACvC,EAAA,MAAM,QAAQ,OAAA,GAAU,OAAA;AACxB,EAAA,IAAI,KAAA,IAAS,CAAA,EAAG,OAAO,EAAC;AACxB,EAAA,MAAM,OAAO,OAAA,GAAU,KAAA;AACvB,EAAA,MAAM,OAAO,OAAA,GAAU,KAAA;AACvB,EAAA,MAAM,QAAQ,OAAA,EAAS,KAAA;AACvB,EAAA,MAAM,WAAW,OAAA,EAAS,QAAA;AAG1B,EAAA,MAAM,MAAA,uBAAa,GAAA,EAA6D;AAEhF,EAAA,KAAA,MAAW,KAAK,aAAA,EAAe;AAC7B,IAAA,MAAA,CAAO,GAAA,CAAI,KAAA,CAAM,CAAA,CAAE,IAAI,GAAG,EAAE,IAAA,EAAM,CAAA,CAAE,IAAA,EAAM,QAAA,EAAU,CAAA,CAAE,KAAA,EAAO,QAAA,EAAU,GAAG,CAAA;AAAA,EAC5E;AAEA,EAAA,KAAA,MAAW,KAAK,cAAA,EAAgB;AAC9B,IAAA,MAAM,EAAA,GAAK,KAAA,CAAM,CAAA,CAAE,IAAI,CAAA;AACvB,IAAA,MAAM,QAAA,GAAW,MAAA,CAAO,GAAA,CAAI,EAAE,CAAA;AAC9B,IAAA,IAAI,QAAA,EAAU;AACZ,MAAA,QAAA,CAAS,WAAW,CAAA,CAAE,KAAA;AAAA,IACxB,CAAA,MAAO;AACL,MAAA,MAAA,CAAO,GAAA,CAAI,EAAA,EAAI,EAAE,IAAA,EAAM,CAAA,CAAE,IAAA,EAAM,QAAA,EAAU,CAAA,EAAG,QAAA,EAAU,CAAA,CAAE,KAAA,EAAO,CAAA;AAAA,IACjE;AAAA,EACF;AAGA,EAAA,MAAM,UAAkC,EAAC;AACzC,EAAA,KAAA,MAAW,KAAA,IAAS,MAAA,CAAO,MAAA,EAAO,EAAG;AACnC,IAAA,OAAA,CAAQ,IAAA,CAAK;AAAA,MACX,MAAM,KAAA,CAAM,IAAA;AAAA,MACZ,KAAA,EAAO,IAAA,GAAO,KAAA,CAAM,QAAA,GAAW,OAAO,KAAA,CAAM;AAAA,KAC7C,CAAA;AAAA,EACH;AAEA,EAAA,OAAA,CAAQ,KAAK,CAAC,CAAA,EAAG,MAAM,CAAA,CAAE,KAAA,GAAQ,EAAE,KAAK,CAAA;AAExC,EAAA,MAAM,QAAA,GAAW,QAAA,KAAa,MAAA,GAAY,OAAA,CAAQ,MAAA,CAAO,CAAC,CAAA,KAAM,CAAA,CAAE,KAAA,IAAS,QAAQ,CAAA,GAAI,OAAA;AACvF,EAAA,OAAO,UAAU,MAAA,GAAY,QAAA,CAAS,KAAA,CAAM,CAAA,EAAG,KAAK,CAAA,GAAI,QAAA;AAC1D;AAMO,SAAS,kBAAA,CACd,aAAA,EACA,cAAA,EACA,OAAA,EACgB;AAChB,EAAA,MAAM,UAAA,GAAa,aAAA,CAAc,GAAA,CAAI,CAAC,CAAA,MAAO,EAAE,IAAA,EAAM,CAAA,CAAE,MAAA,EAAQ,KAAA,EAAO,CAAA,CAAE,KAAA,EAAM,CAAE,CAAA;AAChF,EAAA,MAAM,SAAA,GAAY,cAAA,CAAe,GAAA,CAAI,CAAC,CAAA,MAAO,EAAE,IAAA,EAAM,CAAA,CAAE,MAAA,EAAQ,KAAA,EAAO,CAAA,CAAE,KAAA,EAAM,CAAE,CAAA;AAChF,EAAA,MAAM,MAAA,GAAS,kBAAkB,UAAA,EAAY,SAAA,EAAW,CAAC,IAAA,KAAS,IAAA,CAAK,IAAI,OAAO,CAAA;AAClF,EAAA,OAAO,MAAA,CAAO,GAAA,CAAI,CAAC,CAAA,MAAO,EAAE,MAAA,EAAQ,CAAA,CAAE,IAAA,EAAM,KAAA,EAAO,CAAA,CAAE,KAAA,EAAM,CAAE,CAAA;AAC/D;AA7HA,IAgCa,qBAAA,CAAA,CACA,qBACA,oBAAA,CAAA,CACA;AAnCb,IAAA,WAAA,GAAA,KAAA,CAAA;AAAA,EAAA,6BAAA,GAAA;AAgCO,IAAM,qBAAA,GAAwB,GAAA;AAC9B,IAAM,mBAAA,GAAsB,GAAA;AAC5B,IAAM,oBAAA,GAAuB,CAAA;AAC7B,IAAM,gBAAA,GAAmB,IAAA;AAAA,EAAA;AAAA,CAAA,CAAA;;;ACnBhC,SAAS,aAAA,CAAiB,OAAe,QAAA,EAAgB;AACvD,EAAA,IAAI;AACF,IAAA,OAAO,IAAA,CAAK,MAAM,KAAK,CAAA;AAAA,EACzB,CAAA,CAAA,MAAQ;AACN,IAAA,OAAO,QAAA;AAAA,EACT;AACF;AAEA,SAAS,gBAAgB,GAAA,EAAuC;AAC9D,EAAA,MAAM,IAAA,GAAO;AAAA,IACX,IAAI,GAAA,CAAI,EAAA;AAAA,IACR,MAAM,GAAA,CAAI,IAAA;AAAA,IACV,SAAS,GAAA,CAAI,OAAA;AAAA,IACb,SAAS,GAAA,CAAI,OAAA;AAAA,IACb,IAAA,EAAM,GAAA,CAAI,IAAA,GAAO,GAAA,CAAI,IAAA,CAAK,KAAA,CAAM,GAAG,CAAA,CAAE,MAAA,CAAO,OAAO,CAAA,GAAI,EAAC;AAAA,IACxD,QAAQ,GAAA,CAAI,MAAA;AAAA,IACZ,OAAA,EAAS,aAAA,CAAc,GAAA,CAAI,OAAA,EAAS,EAAE,CAAA;AAAA,IACtC,UAAA,EAAY,aAAA,CAAc,GAAA,CAAI,UAAA,EAAY,EAAE,CAAA;AAAA,IAC5C,OAAA,EAAS,aAAA,CAAc,GAAA,CAAI,OAAA,EAAS,EAAE,CAAA;AAAA,IACtC,SAAS,GAAA,CAAI,OAAA;AAAA,IACb,SAAA,EAAW,IAAI,SAAA,KAAc;AAAA,GAC/B;AAEA,EAAA,IAAI,GAAA,CAAI,QAAA,KAAa,IAAA,EAAM,IAAA,CAAK,WAAW,GAAA,CAAI,QAAA;AAC/C,EAAA,IAAI,GAAA,CAAI,QAAA,KAAa,IAAA,EAAM,IAAA,CAAK,WAAW,GAAA,CAAI,QAAA;AAC/C,EAAA,IAAI,GAAA,CAAI,OAAA,KAAY,CAAA,EAAG,IAAA,CAAK,OAAA,GAAU,IAAA;AACtC,EAAA,IAAI,GAAA,CAAI,eAAA,GAAkB,CAAA,EAAG,IAAA,CAAK,iBAAiB,GAAA,CAAI,eAAA;AACvD,EAAA,IAAI,GAAA,CAAI,cAAA,KAAmB,IAAA,EAAM,IAAA,CAAK,gBAAgB,GAAA,CAAI,cAAA;AAC1D,EAAA,IAAI,GAAA,CAAI,mBAAA,KAAwB,IAAA,EAAM,IAAA,CAAK,qBAAqB,GAAA,CAAI,mBAAA;AACpE,EAAA,IAAI,GAAA,CAAI,kBAAkB,IAAA,EAAM;AAC9B,IAAA,IAAA,CAAK,QAAA,GAAW;AAAA,MACd,MAAM,GAAA,CAAI,aAAA;AAAA,MACV,GAAI,GAAA,CAAI,aAAA,KAAkB,QAAQ,EAAE,IAAA,EAAM,IAAI,aAAA,EAAc;AAAA,MAC5D,GAAI,GAAA,CAAI,eAAA,KAAoB,QAAQ,EAAE,MAAA,EAAQ,IAAI,eAAA;AAAgB,KACpE;AAAA,EACF;AACA,EAAA,IAAI,GAAA,CAAI,gBAAA,KAAqB,IAAA,IAAQ,GAAA,CAAI,qBAAqB,CAAA,EAAG;AAC/D,IAAA,IAAA,CAAK,kBAAkB,GAAA,CAAI,gBAAA;AAAA,EAC7B;AACA,EAAA,IAAI,GAAA,CAAI,YAAA,KAAiB,IAAA,EAAM,IAAA,CAAK,cAAc,GAAA,CAAI,YAAA;AACtD,EAAA,IAAI,GAAA,CAAI,cAAA,KAAmB,IAAA,EAAM,IAAA,CAAK,gBAAgB,GAAA,CAAI,cAAA;AAC1D,EAAA,IAAI,GAAA,CAAI,WAAA,KAAgB,IAAA,IAAQ,GAAA,CAAI,iBAAiB,IAAA,EAAM;AACzD,IAAA,IAAA,CAAK,UAAU,EAAE,GAAA,EAAK,IAAI,WAAA,EAAa,IAAA,EAAM,IAAI,YAAA,EAAa;AAAA,EAChE;AAEA,EAAA,MAAM,MAAA,GAAS,gBAAA,CAAiB,SAAA,CAAU,IAAI,CAAA;AAC9C,EAAA,IAAI,CAAC,MAAA,CAAO,OAAA,EAAS,OAAO,IAAA;AAC5B,EAAA,OAAO,MAAA,CAAO,IAAA;AAChB;AAWO,SAAS,kBAAkB,QAAA,EAAgC;AAChE,EAAA,MAAM,QAAA,GAAW,OAAO,QAAQ,CAAA;AAEhC,EAAA,MAAM,IAAA,GAAO,QAAA,CACV,OAAA,CAAQ,oDAAoD,EAC5D,GAAA,EAAI;AAEP,EAAA,OAAO,IAAA,CAAK,IAAI,eAAe,CAAA,CAAE,OAAO,CAAC,CAAA,KAAuB,MAAM,IAAI,CAAA;AAC5E;AAWO,SAAS,iBAAiB,KAAA,EAAuB;AAEtD,EAAA,MAAM,QAAA,GAAW,KAAA,CAAM,OAAA,CAAQ,gBAAA,EAAkB,EAAE,CAAA;AAEnD,EAAA,MAAM,MAAA,GAAS,QAAA,CACZ,KAAA,CAAM,KAAK,EACX,MAAA,CAAO,CAAC,CAAA,KAAM,CAAA,CAAE,SAAS,CAAA,IAAK,CAAC,aAAA,CAAc,GAAA,CAAI,CAAC,CAAC,CAAA;AACtD,EAAA,OAAO,MAAA,CAAO,KAAK,GAAG,CAAA;AACxB;AAOO,SAAS,uBAAA,CAAwB,UAAkB,SAAA,EAA2B;AACnF,EAAA,IAAI,SAAA,CAAU,WAAW,CAAA,EAAG;AAE5B,EAAA,MAAM,QAAA,GAAW,OAAO,QAAQ,CAAA;AAEhC,EAAA,MAAM,GAAA,GAAA,iBAAM,IAAI,IAAA,EAAK,EAAE,WAAA,EAAY;AAEnC,EAAA,MAAM,MAAA,GAAS,SAAS,OAAA,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,EAAA,CAK/B,CAAA;AAED,EAAA,MAAM,UAAA,GAAa,QAAA,CAAS,WAAA,CAAY,CAAC,GAAA,KAAkB;AACzD,IAAA,KAAA,MAAW,MAAM,GAAA,EAAK;AACpB,MAAA,MAAA,CAAO,GAAA,CAAI,KAAK,EAAE,CAAA;AAAA,IACpB;AAAA,EACF,CAAC,CAAA;AAED,EAAA,UAAA,CAAW,SAAS,CAAA;AACtB;AAaA,SAAS,eAAA,CACP,QAAA,EACA,KAAA,EACA,KAAA,EACA,OAAA,EACa;AACb,EAAA,MAAM,QAAA,GAAW,OAAO,QAAQ,CAAA;AAEhC,EAAA,MAAM,SAAA,GAAY,iBAAiB,KAAK,CAAA;AACxC,EAAA,IAAI,SAAA,KAAc,EAAA,EAAI,OAAO,EAAC;AAE9B,EAAA,MAAM,UAAA,GAAa,OAAA,CAAQ,WAAA,GAAc,eAAA,GAAkB,KAAA;AAC3D,EAAA,MAAM,WAAA,GAAc,OAAA,CAAQ,WAAA,GAAc,mBAAA,GAAsB,EAAA;AAChE,EAAA,MAAM,UAAA,GAAa,OAAA,CAAQ,UAAA,GAAa,gBAAA,GAAmB,EAAA;AAE3D,EAAA,MAAM,GAAA,GAAM;AAAA,WAAA,EACD,UAAU;AAAA;AAAA;AAAA;AAAA;AAAA,MAAA,EAKf,UAAU;AAAA,IAAA,EACZ,WAAW;AAAA;AAAA,EAAA,CAAA;AAIf,EAAA,MAAM,MAAA,GAAS,OAAA,CAAQ,UAAA,GACnB,CAAC,SAAA,EAAW,OAAA,CAAQ,UAAA,EAAY,KAAK,CAAA,GACrC,CAAC,SAAA,EAAW,KAAK,CAAA;AAErB,EAAA,IAAI;AACF,IAAA,OAAO,SAAS,OAAA,CAAQ,GAAG,CAAA,CAAE,GAAA,CAAI,GAAG,MAAM,CAAA;AAAA,EAC5C,SAAS,GAAA,EAAK;AACZ,IAAA,MAAM,OAAA,GAAU,GAAA,YAAe,KAAA,GAAQ,GAAA,CAAI,OAAA,GAAU,oBAAA;AACrD,IAAA,OAAA,CAAQ,KAAA,CAAM,CAAA,+BAAA,EAAkC,OAAO,CAAA,CAAE,CAAA;AACzD,IAAA,OAAO,EAAC;AAAA,EACV;AACF;AAUA,eAAsB,aAAA,CACpB,QAAA,EACA,KAAA,EACA,KAAA,EACA,UAAA,EACuB;AACvB,EAAA,MAAM,IAAA,GAAO,gBAAgB,QAAA,EAAU,KAAA,EAAO,OAAO,EAAE,WAAA,EAAa,KAAA,EAAO,UAAA,EAAY,CAAA;AACvF,EAAA,OAAO,IAAA,CAAK,IAAI,eAAe,CAAA,CAAE,OAAO,CAAC,CAAA,KAAuB,MAAM,IAAI,CAAA;AAC5E;AAWA,eAAsB,mBAAA,CACpB,QAAA,EACA,KAAA,EACA,KAAA,EACA,UAAA,EACgC;AAChC,EAAA,MAAM,IAAA,GAAO,gBAAgB,QAAA,EAAU,KAAA,EAAO,OAAO,EAAE,WAAA,EAAa,IAAA,EAAM,UAAA,EAAY,CAAA;AACtF,EAAA,MAAM,UAAiC,EAAC;AACxC,EAAA,KAAA,MAAW,OAAO,IAAA,EAAM;AACtB,IAAA,MAAM,MAAA,GAAS,gBAAgB,GAAG,CAAA;AAClC,IAAA,IAAI,MAAA,EAAQ;AACV,MAAA,OAAA,CAAQ,IAAA,CAAK,EAAE,MAAA,EAAQ,KAAA,EAAO,kBAAkB,GAAA,CAAI,IAAI,GAAG,CAAA;AAAA,IAC7D;AAAA,EACF;AACA,EAAA,OAAO,OAAA;AACT;AAhOA,IAsFM,aAAA;AAtFN,IAAA,WAAA,GAAA,KAAA,CAAA;AAAA,EAAA,qCAAA,GAAA;AAIA,IAAA,UAAA,EAAA;AAEA,IAAA,WAAA,EAAA;AAGA,IAAA,eAAA,EAAA;AA6EA,IAAM,aAAA,uBAAoB,GAAA,CAAI,CAAC,OAAO,IAAA,EAAM,KAAA,EAAO,MAAM,CAAC,CAAA;AAAA,EAAA;AAAA,CAAA,CAAA;;;ACtF1D,IAAA,WAAA,GAAA,KAAA,CAAA;AAAA,EAAA,oCAAA,GAAA;AAWA,IAAA,eAAA,EAAA;AAGA,IAAA,UAAA,EAAA;AAWA,IAAA,SAAA,EAAA;AAGA,IAAA,iBAAA,EAAA;AAGA,IAAA,WAAA,EAAA;AAAA,EAAA;AAAA,CAAA,CAAA;AC/BA,IAAA,YAAA,GAAA,KAAA,CAAA;AAAA,EAAA,+BAAA,GAAA;AAWA,IAAA,UAAA,EAAA;AAGA,IAAA,UAAA,EAAA;AAAA,EAAA;AAAA,CAAA,CAAA;;;ACdA,IAAA,YAAA,GAAA,KAAA,CAAA;AAAA,EAAA,6BAAA,GAAA;AAQA,IAAA,UAAA,EAAA;AAIA,IAAA,WAAA,EAAA;AAuBA,IAAA,YAAA,EAAA;AAAA,EAAA;AAAA,CAAA,CAAA;;;ACnCA,IAAA,aAAA,GAAA,EAAA;AAAA,QAAA,CAAA,aAAA,EAAA;AAAA,EAAA,cAAA,EAAA,MAAA,cAAA;AAAA,EAAA,SAAA,EAAA,MAAA,SAAA;AAAA,EAAA,mBAAA,EAAA,MAAA,mBAAA;AAAA,EAAA,gBAAA,EAAA,MAAA,gBAAA;AAAA,EAAA,aAAA,EAAA,MAAA,aAAA;AAAA,EAAA,YAAA,EAAA,MAAA;AAAA,CAAA,CAAA;AA0CO,SAAS,gBAAA,GAA4B;AAC1C,EAAA,OAAO,UAAA,CAAWH,IAAAA,CAAK,iBAAA,EAAmB,cAAc,CAAC,CAAA;AAC3D;AA+BA,eAAsB,aAAA,GAA0C;AAE9D,EAAA,IAAI,oBAAoB,IAAA,EAAM;AAC5B,IAAA,OAAO,eAAA;AAAA,EACT;AAGA,EAAA,IAAI,CAAC,kBAAiB,EAAG;AACvB,IAAA,eAAA,GAAkB;AAAA,MAChB,MAAA,EAAQ,KAAA;AAAA,MACR,MAAA,EAAQ,gCAAA;AAAA,MACR,MAAA,EAAQ;AAAA,KACV;AACA,IAAA,OAAO,eAAA;AAAA,EACT;AAGA,EAAA,IAAI,KAAA,GAAQ,IAAA;AACZ,EAAA,IAAI,KAAA,GAAQ,IAAA;AACZ,EAAA,IAAI,OAAA,GAAU,IAAA;AAEd,EAAA,IAAI;AACF,IAAA,MAAM,SAAA,GAAYA,IAAAA,CAAK,iBAAA,EAAmB,cAAc,CAAA;AAGxD,IAAA,KAAA,GAAQ,MAAM,QAAA,CAAS;AAAA,MACrB,KAAA,EAAO,OAAA;AAAA;AAAA,MACP,YAAA,EAAc,KAAA;AAAA;AAAA,MACd,UAAU,aAAA,CAAc;AAAA;AAAA;AAAA,KAEzB,CAAA;AAGD,IAAA,KAAA,GAAQ,MAAM,KAAA,CAAM,SAAA,CAAU,EAAE,WAAW,CAAA;AAG3C,IAAA,OAAA,GAAU,MAAM,MAAM,sBAAA,EAAuB;AAG7C,IAAA,eAAA,GAAkB,EAAE,QAAQ,IAAA,EAAK;AACjC,IAAA,OAAO,eAAA;AAAA,EACT,SAAS,GAAA,EAAK;AACZ,IAAA,MAAM,OAAA,GAAU,GAAA,YAAe,KAAA,GAAQ,GAAA,CAAI,OAAA,GAAU,eAAA;AACrD,IAAA,eAAA,GAAkB;AAAA,MAChB,MAAA,EAAQ,KAAA;AAAA,MACR,MAAA,EAAQ,kDAAkD,OAAO,CAAA,CAAA;AAAA,MACjE,MAAA,EAAQ;AAAA,KACV;AACA,IAAA,OAAO,eAAA;AAAA,EACT,CAAA,SAAE;AAEA,IAAA,IAAI,OAAA,EAAS;AACX,MAAA,IAAI;AAAE,QAAA,MAAM,QAAQ,OAAA,EAAQ;AAAA,MAAG,CAAA,CAAA,MAAQ;AAAA,MAA8B;AAAA,IACvE;AACA,IAAA,IAAI,KAAA,EAAO;AACT,MAAA,IAAI;AAAE,QAAA,MAAM,MAAM,OAAA,EAAQ;AAAA,MAAG,CAAA,CAAA,MAAQ;AAAA,MAA8B;AAAA,IACrE;AACA,IAAA,IAAI,KAAA,EAAO;AACT,MAAA,IAAI;AAAE,QAAA,MAAM,MAAM,OAAA,EAAQ;AAAA,MAAG,CAAA,CAAA,MAAQ;AAAA,MAA8B;AAAA,IACrE;AAAA,EACF;AACF;AAQO,SAAS,mBAAA,GAA4B;AAC1C,EAAA,eAAA,GAAkB,IAAA;AACpB;AAkBA,eAAsB,YAAA,CAAa,OAAA,GAA6B,EAAC,EAAoB;AACnF,EAAA,MAAM,EAAE,GAAA,GAAM,IAAA,EAAK,GAAI,OAAA;AACvB,EAAA,OAAO,gBAAA,CAAiB,SAAA,EAAW,EAAE,GAAA,EAAK,CAAA;AAC5C;AAvKA,IAmBa,SAAA,CAAA,CAMA,gBAGP,iBAAA,CAAA,CAGF;AA/BJ,IAAA,UAAA,GAAA,KAAA,CAAA;AAAA,EAAA,gCAAA,GAAA;AAmBO,IAAM,SAAA,GAAY,iFAAA;AAMlB,IAAM,cAAA,GAAiB,+CAAA;AAG9B,IAAM,iBAAA,GAAoBA,IAAAA,CAAK,OAAA,EAAQ,EAAG,mBAAmB,QAAQ,CAAA;AAGrE,IAAI,eAAA,GAA0C,IAAA;AAAA,EAAA;AAAA,CAAA,CAAA;AC4B9C,eAAsB,YAAA,GAA+C;AACnE,EAAA,IAAI,kBAAkB,OAAO,gBAAA;AAC7B,EAAA,IAAI,aAAa,OAAO,WAAA;AAExB,EAAA,WAAA,GAAA,CAAe,YAAY;AACzB,IAAA,IAAI;AACF,MAAA,MAAM,YAAY,MAAM,YAAA,CAAa,EAAE,GAAA,EAAK,MAAM,CAAA;AAClD,MAAA,aAAA,GAAgB,MAAMI,QAAAA,CAAS;AAAA,QAC7B,KAAA,EAAO,OAAA;AAAA;AAAA,QACP,YAAA,EAAc,KAAA;AAAA;AAAA,QACd,UAAUC,aAAAA,CAAc;AAAA;AAAA;AAAA,OAEzB,CAAA;AACD,MAAA,aAAA,GAAgB,MAAM,aAAA,CAAc,SAAA,CAAU,EAAE,WAAW,CAAA;AAC3D,MAAA,gBAAA,GAAmB,MAAM,cAAc,sBAAA,EAAuB;AAC9D,MAAA,OAAO,gBAAA;AAAA,IACT,SAAS,GAAA,EAAK;AACZ,MAAA,WAAA,GAAc,IAAA;AACd,MAAA,MAAM,GAAA;AAAA,IACR;AAAA,EACF,CAAA,GAAG;AAEH,EAAA,OAAO,WAAA;AACT;AAQA,eAAsB,wBAAA,GAA0C;AAC9D,EAAA,MAAM,OAAA,GAAU,WAAA;AAChB,EAAA,IAAI,OAAA,EAAS;AACX,IAAA,IAAI;AACF,MAAA,MAAM,OAAA;AAAA,IACR,CAAA,CAAA,MAAQ;AAAA,IAER;AAAA,EACF;AAEA,EAAA,MAAM,OAAA,GAAU,gBAAA;AAChB,EAAA,MAAM,KAAA,GAAQ,aAAA;AACd,EAAA,MAAM,KAAA,GAAQ,aAAA;AAEd,EAAA,gBAAA,GAAmB,IAAA;AACnB,EAAA,aAAA,GAAgB,IAAA;AAChB,EAAA,aAAA,GAAgB,IAAA;AAChB,EAAA,WAAA,GAAc,IAAA;AAEd,EAAA,MAAM,YAAgC,EAAC;AAEvC,EAAA,IAAI,OAAA,EAAS;AACX,IAAA,SAAA,CAAU,IAAA,CAAK,OAAA,CAAQ,OAAA,EAAS,CAAA;AAAA,EAClC;AACA,EAAA,IAAI,KAAA,EAAO;AACT,IAAA,SAAA,CAAU,IAAA,CAAK,KAAA,CAAM,OAAA,EAAS,CAAA;AAAA,EAChC;AACA,EAAA,IAAI,KAAA,EAAO;AACT,IAAA,SAAA,CAAU,IAAA,CAAK,KAAA,CAAM,OAAA,EAAS,CAAA;AAAA,EAChC;AAEA,EAAA,IAAI,SAAA,CAAU,SAAS,CAAA,EAAG;AACxB,IAAA,MAAM,OAAA,CAAQ,WAAW,SAAS,CAAA;AAAA,EACpC;AACF;AA2CO,SAAS,eAAA,GAAwB;AACtC,EAAA,KAAK,wBAAA,EAAyB;AAChC;AAWA,eAAsB,cAAiB,EAAA,EAAkC;AACvE,EAAA,IAAI;AACF,IAAA,OAAO,MAAM,EAAA,EAAG;AAAA,EAClB,CAAA,SAAE;AACA,IAAA,MAAM,wBAAA,EAAyB;AAAA,EACjC;AACF;AAwBA,eAAsB,UAAU,IAAA,EAAqC;AACnE,EAAA,MAAM,GAAA,GAAM,MAAM,YAAA,EAAa;AAC/B,EAAA,MAAM,MAAA,GAAS,MAAM,GAAA,CAAI,eAAA,CAAgB,IAAI,CAAA;AAC7C,EAAA,OAAO,IAAI,YAAA,CAAa,MAAA,CAAO,MAAM,CAAA;AACvC;AA6BA,eAAsB,WAAW,KAAA,EAA0C;AACzE,EAAA,IAAI,KAAA,CAAM,MAAA,KAAW,CAAA,EAAG,OAAO,EAAC;AAEhC,EAAA,MAAM,GAAA,GAAM,MAAM,YAAA,EAAa;AAC/B,EAAA,MAAM,UAA0B,EAAC;AAEjC,EAAA,KAAA,MAAW,QAAQ,KAAA,EAAO;AACxB,IAAA,MAAM,MAAA,GAAS,MAAM,GAAA,CAAI,eAAA,CAAgB,IAAI,CAAA;AAC7C,IAAA,OAAA,CAAQ,IAAA,CAAK,IAAI,YAAA,CAAa,MAAA,CAAO,MAAM,CAAC,CAAA;AAAA,EAC9C;AAEA,EAAA,OAAO,OAAA;AACT;AA/PA,IAsBI,gBAAA,EAEA,aAEA,aAAA,EACA,aAAA;AA3BJ,IAAA,UAAA,GAAA,KAAA,CAAA;AAAA,EAAA,gCAAA,GAAA;AAmBA,IAAA,UAAA,EAAA;AAGA,IAAI,gBAAA,GAAiD,IAAA;AAErD,IAAI,WAAA,GAAqD,IAAA;AAEzD,IAAI,aAAA,GAA8B,IAAA;AAClC,IAAI,aAAA,GAAmC,IAAA;AAAA,EAAA;AAAA,CAAA,CAAA;;;AC3BvC,IAAA,eAAA,GAAA,KAAA,CAAA;AAAA,EAAA,gCAAA,GAAA;AAQA,IAAA,UAAA,EAAA;AAGA,IAAA,UAAA,EAAA;AAAA,EAAA;AAAA,CAAA,CAAA;;;ACQO,SAAS,sBAAsB,UAAA,EAA6C;AACjF,EAAA,MAAM,IAAI,UAAA,CAAW,MAAA;AACrB,EAAA,MAAM,MAAA,GAAqB,KAAA,CAAM,IAAA,CAAK,EAAE,QAAQ,CAAA,EAAE,EAAG,MAAM,IAAI,KAAA,CAAc,CAAC,CAAA,CAAE,IAAA,CAAK,CAAC,CAAC,CAAA;AAEvF,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,IAAA,MAAA,CAAO,CAAC,CAAA,CAAG,CAAC,CAAA,GAAI,CAAA;AAChB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,GAAG,CAAA,EAAA,EAAK;AAC9B,MAAA,MAAM,MAAM,gBAAA,CAAiB,UAAA,CAAW,CAAC,CAAA,EAAI,UAAA,CAAW,CAAC,CAAE,CAAA;AAC3D,MAAA,MAAA,CAAO,CAAC,CAAA,CAAG,CAAC,CAAA,GAAI,GAAA;AAChB,MAAA,MAAA,CAAO,CAAC,CAAA,CAAG,CAAC,CAAA,GAAI,GAAA;AAAA,IAClB;AAAA,EACF;AAEA,EAAA,OAAO,MAAA;AACT;AAWO,SAAS,mBAAA,CACd,KAAA,EACA,UAAA,EACA,SAAA,GAAoB,iBAAA,EACL;AACf,EAAA,MAAM,IAAI,KAAA,CAAM,MAAA;AAChB,EAAA,IAAI,CAAA,KAAM,GAAG,OAAO,EAAE,UAAU,EAAC,EAAG,KAAA,EAAO,EAAC,EAAE;AAE9C,EAAA,MAAM,MAAA,GAAS,sBAAsB,UAAU,CAAA;AAG/C,EAAA,MAAM,MAAA,GAAS,KAAA,CAAM,IAAA,CAAK,EAAE,MAAA,EAAQ,GAAE,EAAG,CAAC,CAAA,EAAG,CAAA,KAAM,CAAC,CAAA;AAEpD,EAAA,SAAS,KAAK,CAAA,EAAmB;AAC/B,IAAA,OAAO,MAAA,CAAO,CAAC,CAAA,KAAM,CAAA,EAAG;AACtB,MAAA,MAAA,CAAO,CAAC,CAAA,GAAI,MAAA,CAAO,MAAA,CAAO,CAAC,CAAE,CAAA;AAC7B,MAAA,CAAA,GAAI,OAAO,CAAC,CAAA;AAAA,IACd;AACA,IAAA,OAAO,CAAA;AAAA,EACT;AAEA,EAAA,SAAS,KAAA,CAAM,GAAW,CAAA,EAAiB;AACzC,IAAA,MAAM,KAAA,GAAQ,KAAK,CAAC,CAAA;AACpB,IAAA,MAAM,KAAA,GAAQ,KAAK,CAAC,CAAA;AACpB,IAAA,IAAI,KAAA,KAAU,KAAA,EAAO,MAAA,CAAO,KAAK,CAAA,GAAI,KAAA;AAAA,EACvC;AAGA,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,GAAG,CAAA,EAAA,EAAK;AAC9B,MAAA,IAAI,MAAA,CAAO,CAAC,CAAA,CAAG,CAAC,KAAM,SAAA,EAAW;AAC/B,QAAA,KAAA,CAAM,GAAG,CAAC,CAAA;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAGA,EAAA,MAAM,MAAA,uBAAa,GAAA,EAA0B;AAC7C,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,IAAA,MAAM,IAAA,GAAO,KAAK,CAAC,CAAA;AACnB,IAAA,IAAI,KAAA,GAAQ,MAAA,CAAO,GAAA,CAAI,IAAI,CAAA;AAC3B,IAAA,IAAI,CAAC,KAAA,EAAO;AACV,MAAA,KAAA,GAAQ,EAAC;AACT,MAAA,MAAA,CAAO,GAAA,CAAI,MAAM,KAAK,CAAA;AAAA,IACxB;AACA,IAAA,KAAA,CAAM,IAAA,CAAK,KAAA,CAAM,CAAC,CAAE,CAAA;AAAA,EACtB;AAEA,EAAA,MAAM,WAA2B,EAAC;AAClC,EAAA,MAAM,QAAsB,EAAC;AAC7B,EAAA,KAAA,MAAW,KAAA,IAAS,MAAA,CAAO,MAAA,EAAO,EAAG;AACnC,IAAA,IAAI,KAAA,CAAM,WAAW,CAAA,EAAG;AACtB,MAAA,KAAA,CAAM,IAAA,CAAK,KAAA,CAAM,CAAC,CAAE,CAAA;AAAA,IACtB,CAAA,MAAO;AACL,MAAA,QAAA,CAAS,KAAK,KAAK,CAAA;AAAA,IACrB;AAAA,EACF;AACA,EAAA,OAAO,EAAE,UAAU,KAAA,EAAM;AAC3B;AAtGA,IAWM,iBAAA;AAXN,IAAA,eAAA,GAAA,KAAA,CAAA;AAAA,EAAA,4BAAA,GAAA;AAMA,IAAAC,YAAAA,EAAAA;AAKA,IAAM,iBAAA,GAAoB,IAAA;AAAA,EAAA;AAAA,CAAA,CAAA;AC+BnB,SAAS,cAAc,KAAA,EAAuB;AACnD,EAAA,MAAM,IAAA,GAAOJ,WAAW,QAAQ,CAAA,CAAE,OAAO,KAAK,CAAA,CAAE,OAAO,KAAK,CAAA;AAC5D,EAAA,OAAO,CAAA,IAAA,EAAO,IAAA,CAAK,KAAA,CAAM,CAAA,EAAG,CAAC,CAAC,CAAA,CAAA;AAChC;AA7CA,IAaa,iBAAA,CAAA,CAGA;AAhBb,IAAAK,WAAAA,GAAA,KAAA,CAAA;AAAA,EAAA,uBAAA,GAAA;AAaO,IAAM,iBAAA,GAAoB,oCAAA;AAG1B,IAAM,gBAAA,GAAmBC,EAAE,MAAA,CAAO;AAAA,MACvC,EAAA,EAAIA,CAAAA,CAAE,MAAA,EAAO,CAAE,MAAM,mBAAmB,CAAA;AAAA,MACxC,IAAA,EAAMA,CAAAA,CAAE,MAAA,EAAO,CAAE,IAAI,CAAC,CAAA;AAAA,MACtB,WAAA,EAAaA,CAAAA,CAAE,MAAA,EAAO,CAAE,IAAI,CAAC,CAAA;AAAA,MAC7B,WAAWA,CAAAA,CAAE,MAAA,EAAO,CAAE,GAAA,GAAM,QAAA,EAAS;AAAA,MACrC,QAAA,EAAUA,EAAE,OAAA,EAAQ;AAAA,MACpB,YAAA,EAAcA,CAAAA,CAAE,MAAA,EAAO,CAAE,QAAA,EAAS;AAAA,MAClC,SAAA,EAAWA,EAAE,KAAA,CAAMA,CAAAA,CAAE,QAAQ,CAAA,CAAE,IAAI,CAAC,CAAA;AAAA,MACpC,OAAA,EAASA,EAAE,MAAA;AAAO;AAAA,KACnB,CAAA;AAAA,EAAA;AAAA,CAAA,CAAA;ACPD,eAAsB,gBAAgB,QAAA,EAAyC;AAC7E,EAAA,MAAM,QAAA,GAAWR,IAAAA,CAAK,QAAA,EAAU,iBAAiB,CAAA;AAEjD,EAAA,IAAI,OAAA;AACJ,EAAA,IAAI;AACF,IAAA,OAAA,GAAU,MAAMS,QAAAA,CAAS,QAAA,EAAU,OAAO,CAAA;AAAA,EAC5C,SAAS,GAAA,EAAK;AACZ,IAAA,IAAK,GAAA,CAA8B,SAAS,QAAA,EAAU;AACpD,MAAA,OAAO,EAAC;AAAA,IACV;AACA,IAAA,MAAM,GAAA;AAAA,EACR;AAEA,EAAA,MAAM,WAAyB,EAAC;AAChC,EAAA,MAAM,KAAA,GAAQ,OAAA,CAAQ,KAAA,CAAM,IAAI,CAAA;AAChC,EAAA,KAAA,MAAW,QAAQ,KAAA,EAAO;AACxB,IAAA,MAAM,OAAA,GAAU,KAAK,IAAA,EAAK;AAC1B,IAAA,IAAI,CAAC,OAAA,EAAS;AAEd,IAAA,IAAI,MAAA;AACJ,IAAA,IAAI;AACF,MAAA,MAAA,GAAS,IAAA,CAAK,MAAM,OAAO,CAAA;AAAA,IAC7B,CAAA,CAAA,MAAQ;AACN,MAAA;AAAA,IACF;AACA,IAAA,MAAM,MAAA,GAAS,gBAAA,CAAiB,SAAA,CAAU,MAAM,CAAA;AAChD,IAAA,IAAI,OAAO,OAAA,EAAS;AAClB,MAAA,QAAA,CAAS,IAAA,CAAK,OAAO,IAAI,CAAA;AAAA,IAC3B;AAAA,EACF;AAEA,EAAA,OAAO,QAAA;AACT;AAQA,eAAsB,gBAAA,CAAiB,UAAkB,QAAA,EAAuC;AAC9F,EAAA,MAAM,QAAA,GAAWT,IAAAA,CAAK,QAAA,EAAU,iBAAiB,CAAA;AACjD,EAAA,MAAMU,MAAMT,OAAAA,CAAQ,QAAQ,GAAG,EAAE,SAAA,EAAW,MAAM,CAAA;AAElD,EAAA,MAAM,KAAA,GAAQ,QAAA,CAAS,GAAA,CAAI,CAAC,CAAA,KAAM,IAAA,CAAK,SAAA,CAAU,CAAC,CAAA,GAAI,IAAI,CAAA,CAAE,IAAA,CAAK,EAAE,CAAA;AACnE,EAAA,MAAMU,UAAAA,CAAW,QAAA,EAAU,KAAA,EAAO,OAAO,CAAA;AAC3C;AAhEA,IAAA,OAAA,GAAA,KAAA,CAAA;AAAA,EAAA,oBAAA,GAAA;AAUA,IAAAJ,WAAAA,EAAAA;AAAA,EAAA;AAAA,CAAA,CAAA;;;ACOO,SAAS,iBAAA,CAAkB,SAAuB,SAAA,EAA+B;AACtF,EAAA,MAAM,EAAA,GAAK,cAAc,SAAS,CAAA;AAClC,EAAA,MAAM,YAAY,OAAA,CAAQ,MAAA;AAC1B,EAAA,MAAM,YAAY,OAAA,CAAQ,GAAA,CAAI,CAAC,IAAA,KAAS,KAAK,EAAE,CAAA;AAG/C,EAAA,MAAM,SAAA,uBAAgB,GAAA,EAAoB;AAC1C,EAAA,KAAA,MAAW,QAAQ,OAAA,EAAS;AAC1B,IAAA,KAAA,MAAW,GAAA,IAAO,KAAK,IAAA,EAAM;AAC3B,MAAA,SAAA,CAAU,IAAI,GAAA,EAAA,CAAM,SAAA,CAAU,IAAI,GAAG,CAAA,IAAK,KAAK,CAAC,CAAA;AAAA,IAClD;AAAA,EACF;AAGA,EAAA,MAAM,UAAA,GAAa,CAAC,GAAG,SAAA,CAAU,SAAS,CAAA,CACvC,IAAA,CAAK,CAAC,CAAA,EAAG,CAAA,KAAM,EAAE,CAAC,CAAA,GAAI,CAAA,CAAE,CAAC,CAAC,CAAA,CAC1B,IAAI,CAAC,CAAC,GAAG,CAAA,KAAM,GAAG,CAAA;AAGrB,EAAA,MAAM,OAAO,UAAA,CAAW,MAAA,GAAS,IAC7B,UAAA,CAAW,KAAA,CAAM,GAAG,CAAC,CAAA,CAAE,IAAA,CAAK,IAAI,IAChC,OAAA,CAAQ,CAAC,EAAG,OAAA,CAAQ,KAAA,CAAM,GAAG,EAAE,CAAA;AAGnC,EAAA,MAAM,WAAA,GAAc,QAAQ,GAAA,CAAI,CAAC,SAAS,IAAA,CAAK,OAAO,CAAA,CAAE,IAAA,CAAK,IAAI,CAAA;AAGjE,EAAA,MAAM,kBAAkB,OAAA,CAAQ,IAAA;AAAA,IAC9B,CAAC,IAAA,KAAS,UAAA,IAAc,IAAA,IAAQ,KAAK,QAAA,KAAa;AAAA,GACpD;AACA,EAAA,MAAM,cAAc,OAAA,CAAQ,IAAA;AAAA,IAC1B,CAAC,IAAA,KAAS,UAAA,IAAc,IAAA,IAAQ,IAAA,CAAK;AAAA,GACvC;AACA,EAAA,MAAM,WAAW,eAAA,IAAmB,WAAA;AAGpC,EAAA,MAAM,eAAe,QAAA,GACjB,CAAA,gBAAA,EAAmB,IAAI,CAAA,QAAA,EAAW,SAAS,CAAA,mBAAA,CAAA,GAC3C,MAAA;AAEJ,EAAA,OAAO;AAAA,IACL,EAAA;AAAA,IACA,IAAA;AAAA,IACA,WAAA;AAAA,IACA,SAAA;AAAA,IACA,QAAA;AAAA,IACA,GAAI,YAAA,KAAiB,MAAA,IAAa,EAAE,YAAA,EAAa;AAAA,IACjD,SAAA;AAAA,IACA,OAAA,EAAA,iBAAS,IAAI,IAAA,EAAK,EAAE,WAAA;AAAY,GAClC;AACF;AAnEA,IAAA,cAAA,GAAA,KAAA,CAAA;AAAA,EAAA,2BAAA,GAAA;AAQA,IAAAA,WAAAA,EAAAA;AAAA,EAAA;AAAA,CAAA,CAAA;;;ACRA,IAAA,aAAA,GAAA,KAAA,CAAA;AAAA,EAAA,uBAAA,GAAA;AAMA,IAAA,eAAA,EAAA;AACA,IAAA,OAAA,EAAA;AACA,IAAA,cAAA,EAAA;AACA,IAAAA,WAAAA,EAAAA;AAAA,EAAA;AAAA,CAAA,CAAA;;;ACqBO,SAAS,gBAAA,CAAiB,GAAsB,CAAA,EAA8B;AACnF,EAAA,IAAI,CAAA,CAAE,MAAA,KAAW,CAAA,CAAE,MAAA,EAAQ;AACzB,IAAA,MAAM,IAAI,MAAM,+BAA+B,CAAA;AAAA,EACjD;AAEA,EAAA,IAAI,UAAA,GAAa,CAAA;AACjB,EAAA,IAAI,KAAA,GAAQ,CAAA;AACZ,EAAA,IAAI,KAAA,GAAQ,CAAA;AAEZ,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,QAAQ,CAAA,EAAA,EAAK;AACjC,IAAA,UAAA,IAAc,CAAA,CAAE,CAAC,CAAA,GAAK,CAAA,CAAE,CAAC,CAAA;AACzB,IAAA,KAAA,IAAS,CAAA,CAAE,CAAC,CAAA,GAAK,CAAA,CAAE,CAAC,CAAA;AACpB,IAAA,KAAA,IAAS,CAAA,CAAE,CAAC,CAAA,GAAK,CAAA,CAAE,CAAC,CAAA;AAAA,EACtB;AAEA,EAAA,MAAM,YAAY,IAAA,CAAK,IAAA,CAAK,KAAK,CAAA,GAAI,IAAA,CAAK,KAAK,KAAK,CAAA;AACpD,EAAA,IAAI,SAAA,KAAc,GAAG,OAAO,CAAA;AAE5B,EAAA,OAAO,UAAA,GAAa,SAAA;AACtB;AA6BA,SAAS,gBAAgB,OAAA,EAAiC;AACxD,EAAA,OAAO;AAAA,IACL,IAAI,OAAA,CAAQ,EAAA;AAAA,IACZ,IAAA,EAAM,QAAA;AAAA,IACN,SAAS,OAAA,CAAQ,IAAA;AAAA,IACjB,SAAS,OAAA,CAAQ,WAAA;AAAA,IACjB,MAAM,EAAC;AAAA,IACP,MAAA,EAAQ,QAAA;AAAA,IACR,OAAA,EAAS,EAAE,IAAA,EAAM,UAAA,EAAY,QAAQ,WAAA,EAAY;AAAA,IACjD,SAAS,OAAA,CAAQ,OAAA;AAAA,IACjB,SAAA,EAAW,IAAA;AAAA,IACX,YAAY,EAAC;AAAA,IACb,SAAS,OAAA,CAAQ;AAAA,GACnB;AACF;AAEA,eAAsB,YAAA,CACpB,QAAA,EACA,KAAA,EACA,OAAA,EACyB;AACzB,EAAA,MAAM,KAAA,GAAQ,SAAS,KAAA,IAAS,aAAA;AAEhC,EAAA,MAAM,aAAa,QAAQ,CAAA;AAC3B,EAAA,MAAM,KAAA,GAAQ,kBAAkB,QAAQ,CAAA;AAGxC,EAAA,IAAI,cAA4B,EAAC;AACjC,EAAA,IAAI;AACF,IAAA,WAAA,GAAc,MAAM,gBAAgB,QAAQ,CAAA;AAAA,EAC9C,CAAA,CAAA,MAAQ;AAAA,EAER;AAEA,EAAA,IAAI,MAAM,MAAA,KAAW,CAAA,IAAK,YAAY,MAAA,KAAW,CAAA,SAAU,EAAC;AAG5D,EAAA,MAAM,WAAA,GAAc,MAAM,SAAA,CAAU,KAAK,CAAA;AAGzC,EAAA,MAAM,gBAAA,GAAmB,wBAAwB,QAAQ,CAAA;AAGzD,EAAA,MAAM,SAAyB,EAAC;AAChC,EAAA,KAAA,MAAW,QAAQ,KAAA,EAAO;AAExB,IAAA,IAAI,KAAK,aAAA,EAAe;AAExB,IAAA,IAAI;AACF,MAAA,MAAM,WAAW,CAAA,EAAG,IAAA,CAAK,OAAO,CAAA,CAAA,EAAI,KAAK,OAAO,CAAA,CAAA;AAChD,MAAA,MAAM,IAAA,GAAO,WAAA,CAAY,IAAA,CAAK,OAAA,EAAS,KAAK,OAAO,CAAA;AAGnD,MAAA,MAAM,MAAA,GAAS,gBAAA,CAAiB,GAAA,CAAI,IAAA,CAAK,EAAE,CAAA;AAC3C,MAAA,IAAI,UAAA;AAEJ,MAAA,IAAI,MAAA,IAAU,MAAA,CAAO,IAAA,KAAS,IAAA,EAAM;AAClC,QAAA,UAAA,GAAa,MAAA,CAAO,MAAA;AAAA,MACtB,CAAA,MAAO;AAEL,QAAA,UAAA,GAAa,MAAM,UAAU,QAAQ,CAAA;AACrC,QAAA,kBAAA,CAAmB,QAAA,EAAU,IAAA,CAAK,EAAA,EAAI,UAAA,EAAY,IAAI,CAAA;AAAA,MACxD;AAEA,MAAA,MAAM,KAAA,GAAQ,gBAAA,CAAiB,WAAA,EAAa,UAAU,CAAA;AACtD,MAAA,MAAA,CAAO,IAAA,CAAK,EAAE,MAAA,EAAQ,IAAA,EAAM,OAAO,CAAA;AAAA,IACrC,CAAA,CAAA,MAAQ;AAEN,MAAA;AAAA,IACF;AAAA,EACF;AAGA,EAAA,KAAA,MAAW,WAAW,WAAA,EAAa;AACjC,IAAA,IAAI;AACF,MAAA,MAAM,OAAO,CAAA,EAAG,OAAA,CAAQ,IAAI,CAAA,CAAA,EAAI,QAAQ,WAAW,CAAA,CAAA;AACnD,MAAA,MAAM,IAAA,GAAO,WAAA,CAAY,OAAA,CAAQ,IAAA,EAAM,QAAQ,WAAW,CAAA;AAC1D,MAAA,MAAM,QAAA,GAAW,CAAA,EAAG,OAAA,CAAQ,EAAE,IAAI,IAAI,CAAA,CAAA;AAEtC,MAAA,IAAI,GAAA,GAAM,iBAAA,CAAkB,GAAA,CAAI,QAAQ,CAAA;AACxC,MAAA,IAAI,CAAC,GAAA,EAAK;AACR,QAAA,GAAA,GAAM,MAAM,UAAU,IAAI,CAAA;AAC1B,QAAA,iBAAA,CAAkB,GAAA,CAAI,UAAU,GAAG,CAAA;AAAA,MACrC;AAEA,MAAA,MAAM,KAAA,GAAQ,gBAAA,CAAiB,WAAA,EAAa,GAAG,CAAA;AAC/C,MAAA,MAAA,CAAO,KAAK,EAAE,MAAA,EAAQ,gBAAgB,OAAO,CAAA,EAAG,OAAO,CAAA;AAAA,IACzD,CAAA,CAAA,MAAQ;AACN,MAAA;AAAA,IACF;AAAA,EACF;AAGA,EAAA,MAAA,CAAO,KAAK,CAAC,CAAA,EAAG,MAAM,CAAA,CAAE,KAAA,GAAQ,EAAE,KAAK,CAAA;AACvC,EAAA,OAAO,MAAA,CAAO,KAAA,CAAM,CAAA,EAAG,KAAK,CAAA;AAC9B;AAqBA,eAAsB,kBAAA,CACpB,QAAA,EACA,IAAA,EACA,OAAA,EAC0B;AAC1B,EAAA,MAAM,SAAA,GAAY,SAAS,SAAA,IAAaK,kBAAAA;AACxC,EAAA,MAAM,YAAY,OAAA,EAAS,SAAA;AAE3B,EAAA,IAAI,CAAC,gBAAA,EAAiB,EAAG,OAAO,EAAC;AAEjC,EAAA,IAAI,KAAA;AACJ,EAAA,IAAI,SAAS,KAAA,EAAO;AAClB,IAAA,KAAA,GAAQ,OAAA,CAAQ,KAAA;AAAA,EAClB,CAAA,MAAO;AACL,IAAA,MAAM,aAAa,QAAQ,CAAA;AAC3B,IAAA,KAAA,GAAQ,kBAAkB,QAAQ,CAAA;AAAA,EACpC;AACA,EAAA,IAAI,KAAA,CAAM,MAAA,KAAW,CAAA,EAAG,OAAO,EAAC;AAEhC,EAAA,MAAM,WAAA,GAAc,MAAM,SAAA,CAAU,IAAI,CAAA;AAExC,EAAA,MAAM,SAA0B,EAAC;AACjC,EAAA,KAAA,MAAW,QAAQ,KAAA,EAAO;AACxB,IAAA,IAAI,KAAK,aAAA,EAAe;AACxB,IAAA,IAAI,SAAA,IAAa,IAAA,CAAK,EAAA,KAAO,SAAA,EAAW;AAExC,IAAA,IAAI;AAGF,MAAA,MAAM,IAAA,GAAO,WAAA,CAAY,IAAA,CAAK,OAAA,EAAS,EAAE,CAAA;AACzC,MAAA,IAAI,UAAA,GAAa,yBAAA,CAA0B,QAAA,EAAU,IAAA,CAAK,IAAI,IAAI,CAAA;AAElE,MAAA,IAAI,CAAC,UAAA,EAAY;AACf,QAAA,UAAA,GAAa,MAAM,SAAA,CAAU,IAAA,CAAK,OAAO,CAAA;AACzC,QAAA,yBAAA,CAA0B,QAAA,EAAU,IAAA,CAAK,EAAA,EAAI,UAAA,EAAY,IAAI,CAAA;AAAA,MAC/D;AAEA,MAAA,MAAM,KAAA,GAAQ,gBAAA,CAAiB,WAAA,EAAa,UAAU,CAAA;AACtD,MAAA,IAAI,SAAS,SAAA,EAAW;AACtB,QAAA,MAAA,CAAO,IAAA,CAAK,EAAE,IAAA,EAAM,KAAA,EAAO,CAAA;AAAA,MAC7B;AAAA,IACF,CAAA,CAAA,MAAQ;AACN,MAAA;AAAA,IACF;AAAA,EACF;AAEA,EAAA,MAAA,CAAO,KAAK,CAAC,CAAA,EAAG,MAAM,CAAA,CAAE,KAAA,GAAQ,EAAE,KAAK,CAAA;AACvC,EAAA,OAAO,MAAA;AACT;AAlPA,IAmBM,mBAiDA,aAAA,EAuHAA,kBAAAA;AA3LN,IAAA,WAAA,GAAA,KAAA,CAAA;AAAA,EAAA,6BAAA,GAAA;AAOA,IAAA,aAAA,EAAA;AACA,IAAA,eAAA,EAAA;AACA,IAAA,UAAA,EAAA;AACA,IAAA,YAAA,EAAA;AASA,IAAM,iBAAA,uBAAwB,GAAA,EAA0B;AAiDxD,IAAM,aAAA,GAAgB,EAAA;AAuHtB,IAAMA,kBAAAA,GAAoB,GAAA;AAAA,EAAA;AAAA,CAAA,CAAA;;;AC9KnB,SAAS,iBAAiB,MAAA,EAAqC;AACpE,EAAA,MAAM,UAAU,IAAI,IAAA,CAAK,MAAA,CAAO,OAAO,EAAE,OAAA,EAAQ;AACjD,EAAA,MAAM,GAAA,GAAM,KAAK,GAAA,EAAI;AACrB,EAAA,OAAO,IAAA,CAAK,KAAA,CAAA,CAAO,GAAA,GAAM,OAAA,IAAW,UAAU,CAAA;AAChD;AAjBA,IAKa,UAAA;AALb,IAAA,UAAA,GAAA,KAAA,CAAA;AAAA,EAAA,cAAA,GAAA;AAKO,IAAM,UAAA,GAAa,EAAA,GAAK,EAAA,GAAK,EAAA,GAAK,GAAA;AAAA,EAAA;AAAA,CAAA,CAAA;;;ACmClC,SAAS,cAAc,IAAA,EAA0B;AACtD,EAAA,QAAQ,KAAK,QAAA;AAAU,IACrB,KAAK,MAAA;AACH,MAAA,OAAO,mBAAA;AAAA,IACT,KAAK,QAAA;AACH,MAAA,OAAO,qBAAA;AAAA,IACT,KAAK,KAAA;AACH,MAAA,OAAO,kBAAA;AAAA,IACT;AACE,MAAA,OAAO,qBAAA;AAAA;AAEb;AAMO,SAAS,aAAa,IAAA,EAA0B;AACrD,EAAA,MAAM,OAAA,GAAU,iBAAiB,IAAI,CAAA;AACrC,EAAA,OAAO,OAAA,IAAW,yBAAyB,aAAA,GAAgB,CAAA;AAC7D;AAMO,SAAS,kBAAkB,IAAA,EAA0B;AAC1D,EAAA,OAAO,IAAA,CAAK,YAAY,kBAAA,GAAqB,CAAA;AAC/C;AAMO,SAAS,cAAA,CAAe,MAAkB,gBAAA,EAAkC;AACjF,EAAA,MAAM,QAAQ,IAAA,CAAK,GAAA;AAAA,IACjB,cAAc,IAAI,CAAA,GAAI,aAAa,IAAI,CAAA,GAAI,kBAAkB,IAAI,CAAA;AAAA,IACjE;AAAA,GACF;AACA,EAAA,OAAO,gBAAA,GAAmB,KAAA;AAC5B;AAQO,SAAS,YAAY,OAAA,EAAyC;AACnE,EAAA,OAAO,OAAA,CACJ,GAAA,CAAI,CAAC,MAAA,MAAY;AAAA,IAChB,GAAG,MAAA;AAAA,IACH,UAAA,EAAY,cAAA,CAAe,MAAA,CAAO,MAAA,EAAQ,OAAO,KAAK;AAAA,GACxD,CAAE,CAAA,CACD,IAAA,CAAK,CAAC,CAAA,EAAG,CAAA,KAAA,CAAO,CAAA,CAAE,UAAA,IAAc,CAAA,KAAM,CAAA,CAAE,UAAA,IAAc,CAAA,CAAE,CAAA;AAC7D;AA/FA,IAmBM,sBAAA,EACA,mBAAA,EACA,qBAAA,EACA,kBAAA,EACA,eACA,kBAAA,EAUA,kBAAA;AAlCN,IAAA,YAAA,GAAA,KAAA,CAAA;AAAA,EAAA,8BAAA,GAAA;AAUA,IAAA,UAAA,EAAA;AASA,IAAM,sBAAA,GAAyB,EAAA;AAC/B,IAAM,mBAAA,GAAsB,GAAA;AAC5B,IAAM,qBAAA,GAAwB,CAAA;AAC9B,IAAM,kBAAA,GAAqB,GAAA;AAC3B,IAAM,aAAA,GAAgB,GAAA;AACtB,IAAM,kBAAA,GAAqB,GAAA;AAU3B,IAAM,kBAAA,GAAqB,GAAA;AAAA,EAAA;AAAA,CAAA,CAAA;;;AClC3B,IAAA,YAAA,GAAA,KAAA,CAAA;AAAA,EAAA,8BAAA,GAAA;AAOA,IAAA,UAAA,EAAA;AACA,IAAA,eAAA,EAAA;AACA,IAAA,YAAA,EAAA;AAAA,EAAA;AAAA,CAAA,CAAA;;;ACTA,IAAAN,YAAAA,GAAA,KAAA,CAAA;AAAA,EAAA,4BAAA,GAAA;AAWA,IAAA,WAAA,EAAA;AAIA,IAAA,YAAA,EAAA;AAUA,IAAA,YAAA,EAAA;AAIA,IAAA,WAAA,EAAA;AAAA,EAAA;AAAA,CAAA,CAAA;;;ACkCO,SAAS,sBAAsB,QAAA,EAA8B;AAClE,EAAA,QAAA,CAAS,KAAKO,WAAU,CAAA;AACxB,EAAA,QAAA,CAAS,MAAA,CAAO,CAAA,eAAA,EAAkB,wBAAwB,CAAA,CAAE,CAAA;AAC9D;AAlEA,IAaa,wBAAA,CAAA,CAGPA;AAhBN,IAAAC,YAAAA,GAAA,KAAA,CAAA;AAAA,EAAA,+CAAA,GAAA;AAaO,IAAM,wBAAA,GAA2B,CAAA;AAGxC,IAAMD,WAAAA,GAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA,CAAA;AAAA,EAAA;AAAA,CAAA,CAAA;ACWZ,SAAS,eAAA,CACd,QAAA,EACA,OAAA,GAA8B,EAAC,EACjB;AACd,EAAA,MAAM,EAAE,QAAA,GAAW,KAAA,EAAM,GAAI,OAAA;AAE7B,EAAA,MAAM,MAAM,QAAA,GAAW,CAAA,QAAA,EAAW,QAAQ,CAAA,CAAA,GAAKb,IAAAA,CAAK,UAAU,iBAAiB,CAAA;AAE/E,EAAA,MAAM,MAAA,GAAS,cAAA,CAAe,GAAA,CAAI,GAAG,CAAA;AACrC,EAAA,IAAI,MAAA,EAAQ;AACV,IAAA,OAAO,MAAA;AAAA,EACT;AAEA,EAAA,MAAM,WAAW,sBAAA,EAAuB;AACxC,EAAA,IAAI,QAAA;AAEJ,EAAA,IAAI,QAAA,EAAU;AACZ,IAAA,QAAA,GAAW,IAAI,SAAS,UAAU,CAAA;AAAA,EACpC,CAAA,MAAO;AACL,IAAA,MAAM,GAAA,GAAMC,QAAQ,GAAG,CAAA;AACvB,IAAAc,SAAAA,CAAU,GAAA,EAAK,EAAE,SAAA,EAAW,MAAM,CAAA;AAClC,IAAA,QAAA,GAAW,IAAI,SAAS,GAAG,CAAA;AAE3B,IAAA,MAAM,UAAU,QAAA,CAAS,MAAA,CAAO,gBAAgB,EAAE,MAAA,EAAQ,MAAM,CAAA;AAChE,IAAA,IAAI,OAAA,KAAY,CAAA,IAAK,OAAA,KAAY,wBAAA,EAA0B;AACzD,MAAA,QAAA,CAAS,KAAA,EAAM;AACf,MAAA,IAAI;AAAE,QAAAC,WAAW,GAAG,CAAA;AAAA,MAAG,CAAA,CAAA,MAAQ;AAAA,MAAuB;AACtD,MAAA,QAAA,GAAW,IAAI,SAAS,GAAG,CAAA;AAAA,IAC7B;AAEA,IAAA,QAAA,CAAS,OAAO,oBAAoB,CAAA;AAAA,EACtC;AAEA,EAAA,qBAAA,CAAsB,QAAQ,CAAA;AAC9B,EAAA,cAAA,CAAe,GAAA,CAAI,KAAK,QAAQ,CAAA;AAChC,EAAA,OAAO,QAAA;AACT;AAKO,SAAS,gBAAA,GAAyB;AACvC,EAAA,KAAA,MAAW,QAAA,IAAY,cAAA,CAAe,MAAA,EAAO,EAAG;AAC9C,IAAA,QAAA,CAAS,KAAA,EAAM;AAAA,EACjB;AACA,EAAA,cAAA,CAAe,KAAA,EAAM;AACvB;AAzEA,IAea,iBAAA,CAAA,CAGP;AAlBN,IAAAC,gBAAAA,GAAA,KAAA,CAAA;AAAA,EAAA,mDAAA,GAAA;AAWA,IAAA,iBAAA,EAAA;AACA,IAAAH,YAAAA,EAAAA;AAGO,IAAM,iBAAA,GAAoB,iCAAA;AAGjC,IAAM,cAAA,uBAAqB,GAAA,EAA0B;AAAA,EAAA;AAAA,CAAA,CAAA;ACiB9C,SAAS,eAAA,CAAgB,QAAA,EAAkB,SAAA,EAAmB,OAAA,EAAyB;AAC5F,EAAA,OAAOZ,WAAW,QAAQ,CAAA,CAAE,MAAA,CAAO,CAAA,EAAG,QAAQ,CAAA,CAAA,EAAI,SAAS,CAAA,CAAA,EAAI,OAAO,EAAE,CAAA,CAAE,MAAA,CAAO,KAAK,CAAA,CAAE,KAAA,CAAM,GAAG,EAAE,CAAA;AACrG;AAGO,SAAS,iBAAiB,IAAA,EAAsB;AACrD,EAAA,OAAOA,WAAW,QAAQ,CAAA,CAAE,OAAO,IAAI,CAAA,CAAE,OAAO,KAAK,CAAA;AACvD;AA1CA,IAyBa,oBAAA,EAKA,eAAA;AA9Bb,IAAAK,WAAAA,GAAA,KAAA,CAAA;AAAA,EAAA,+BAAA,GAAA;AAyBO,IAAM,oBAAA,uBAAgD,GAAA,CAAI;AAAA,MAC/D,KAAA;AAAA,MAAO,MAAA;AAAA,MAAQ,MAAA;AAAA,MAAQ,KAAA;AAAA,MAAO,KAAA;AAAA,MAAO,KAAA;AAAA,MAAO,MAAA;AAAA,MAAQ;AAAA,KACrD,CAAA;AAGM,IAAM,eAAA,uBAA2C,GAAA,CAAI;AAAA,MAC1D,KAAA;AAAA,MAAO,MAAA;AAAA,MAAQ,KAAA;AAAA,MAAO,MAAA;AAAA,MAAQ;AAAA,KAC/B,CAAA;AAAA,EAAA;AAAA,CAAA,CAAA;;;ACbM,SAAS,uBAAA,CACd,QAAA,EACA,OAAA,EACA,YAAA,EACqB;AACrB,EAAA,MAAM,QAAA,GAAW,gBAAgB,QAAQ,CAAA;AAEzC,EAAA,MAAM,MAAM,QAAA,CACT,OAAA,CAAQ,yDAAyD,CAAA,CACjE,IAAI,OAAO,CAAA;AAEd,EAAA,IAAI,CAAC,GAAA,IAAO,CAAC,IAAI,SAAA,IAAa,CAAC,IAAI,YAAA,EAAc;AAC/C,IAAA,OAAO,IAAA;AAAA,EACT;AAEA,EAAA,IAAI,YAAA,IAAgB,GAAA,CAAI,YAAA,KAAiB,YAAA,EAAc;AACrD,IAAA,OAAO,IAAA;AAAA,EACT;AAEA,EAAA,OAAO,IAAI,YAAA;AAAA,IACT,IAAI,SAAA,CAAU,MAAA;AAAA,IACd,IAAI,SAAA,CAAU,UAAA;AAAA,IACd,GAAA,CAAI,UAAU,UAAA,GAAa;AAAA,GAC7B;AACF;AAUO,SAAS,uBAAA,CACd,QAAA,EACA,OAAA,EACA,SAAA,EACA,IAAA,EACM;AACN,EAAA,MAAM,QAAA,GAAW,gBAAgB,QAAQ,CAAA;AAEzC,EAAA,MAAM,UAAU,SAAA,YAAqB,YAAA,GAAe,SAAA,GAAY,IAAI,aAAa,SAAS,CAAA;AAC1F,EAAA,MAAM,MAAA,GAAS,OAAO,IAAA,CAAK,OAAA,CAAQ,QAAQ,OAAA,CAAQ,UAAA,EAAY,QAAQ,UAAU,CAAA;AAEjF,EAAA,QAAA,CACG,QAAQ,gEAAgE,CAAA,CACxE,GAAA,CAAI,MAAA,EAAQ,MAAM,OAAO,CAAA;AAC9B;AAOO,SAAS,6BACd,QAAA,EACkC;AAClC,EAAA,MAAM,KAAA,uBAAY,GAAA,EAAiC;AACnD,EAAA,MAAM,IAAA,GAAO,QAAA,CACV,OAAA,CAAQ,4EAA4E,EACpF,GAAA,EAAI;AAEP,EAAA,KAAA,MAAW,OAAO,IAAA,EAAM;AACtB,IAAA,IAAI,GAAA,CAAI,SAAA,IAAa,GAAA,CAAI,YAAA,EAAc;AACrC,MAAA,KAAA,CAAM,GAAA,CAAI,GAAA,CAAI,EAAA,EAAI,EAAE,SAAA,EAAW,IAAI,SAAA,EAAW,WAAA,EAAa,GAAA,CAAI,YAAA,EAAc,CAAA;AAAA,IAC/E;AAAA,EACF;AACA,EAAA,OAAO,KAAA;AACT;AAxFA,IAAAW,WAAAA,GAAA,KAAA,CAAA;AAAA,EAAA,8CAAA,GAAA;AAOA,IAAAD,gBAAAA,EAAAA;AACA,IAAAV,WAAAA,EAAAA;AAAA,EAAA;AAAA,CAAA,CAAA;;;ACkBA,SAAS,WAAW,GAAA,EAA+B;AACjD,EAAA,MAAM,KAAA,GAAwB;AAAA,IAC5B,IAAI,GAAA,CAAI,EAAA;AAAA,IACR,UAAU,GAAA,CAAI,SAAA;AAAA,IACd,WAAW,GAAA,CAAI,UAAA;AAAA,IACf,SAAS,GAAA,CAAI,QAAA;AAAA,IACb,aAAa,GAAA,CAAI,YAAA;AAAA,IACjB,MAAM,GAAA,CAAI,IAAA;AAAA,IACV,WAAW,GAAA,CAAI;AAAA,GACjB;AACA,EAAA,IAAI,GAAA,CAAI,UAAU,IAAA,EAAM;AACtB,IAAA,KAAA,CAAM,QAAQ,GAAA,CAAI,KAAA;AAAA,EACpB;AACA,EAAA,OAAO,KAAA;AACT;AASO,SAAS,yBAAA,CACd,QAAA,EACA,KAAA,EACA,KAAA,EACe;AACf,EAAA,MAAM,QAAA,GAAW,gBAAgB,QAAQ,CAAA;AAEzC,EAAA,MAAM,SAAA,GAAY,iBAAiB,KAAK,CAAA;AACxC,EAAA,IAAI,SAAA,KAAc,EAAA,EAAI,OAAO,EAAC;AAE9B,EAAA,IAAI;AAEF,IAAA,MAAM,OAAO,QAAA,CACV,OAAA;AAAA,MACC,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAAA;AAAA,KAMF,CACC,GAAA,CAAI,SAAA,EAAW,KAAK,CAAA;AAEvB,IAAA,OAAO,IAAA,CAAK,GAAA,CAAI,CAAC,GAAA,MAAS;AAAA,MACxB,KAAA,EAAO,WAAW,GAAG,CAAA;AAAA,MACrB,KAAA,EAAO,iBAAA,CAAkB,GAAA,CAAI,IAAI;AAAA,KACnC,CAAE,CAAA;AAAA,EACJ,SAAS,GAAA,EAAK;AACZ,IAAA,MAAM,OAAA,GAAU,GAAA,YAAe,KAAA,GAAQ,GAAA,CAAI,OAAA,GAAU,oBAAA;AACrD,IAAA,OAAA,CAAQ,KAAA,CAAM,CAAA,gDAAA,EAAmD,OAAO,CAAA,CAAE,CAAA;AAC1E,IAAA,OAAO,EAAC;AAAA,EACV;AACF;AAjFA,IAAAD,YAAAA,GAAA,KAAA,CAAA;AAAA,EAAA,+CAAA,GAAA;AAKA,IAAAW,gBAAAA,EAAAA;AACA,IAAA,WAAA,EAAA;AACA,IAAA,WAAA,EAAA;AAAA,EAAA;AAAA,CAAA,CAAA;;;ACOO,SAAS,YAAA,CACd,QAAA,EACA,MAAA,EACA,UAAA,EACM;AACN,EAAA,IAAI,MAAA,CAAO,WAAW,CAAA,EAAG;AAEzB,EAAA,MAAM,QAAA,GAAW,gBAAgB,QAAQ,CAAA;AAEzC,EAAA,MAAM,MAAA,GAAS,SAAS,OAAA,CAAQ;AAAA;AAAA;AAAA,EAAA,CAG/B,CAAA;AAED,EAAA,MAAM,UAAA,GAAa,QAAA,CAAS,WAAA,CAAY,CAAC,KAAA,KAA4B;AACnE,IAAA,KAAA,MAAW,SAAS,KAAA,EAAO;AACzB,MAAA,MAAM,GAAA,GAAM,UAAA,EAAY,GAAA,CAAI,KAAA,CAAM,EAAE,CAAA;AACpC,MAAA,MAAM,SAAA,GAAY,GAAA,GACd,MAAA,CAAO,IAAA,CAAK,GAAA,CAAI,QAAQ,GAAA,CAAI,UAAA,EAAY,GAAA,CAAI,UAAU,CAAA,GACtD,IAAA;AAEJ,MAAA,MAAA,CAAO,GAAA;AAAA,QACL,KAAA,CAAM,EAAA;AAAA,QACN,KAAA,CAAM,QAAA;AAAA,QACN,KAAA,CAAM,SAAA;AAAA,QACN,KAAA,CAAM,OAAA;AAAA,QACN,KAAA,CAAM,WAAA;AAAA,QACN,KAAA,CAAM,IAAA;AAAA,QACN,SAAA;AAAA,QACA,MAAM,KAAA,IAAS,IAAA;AAAA,QACf,KAAA,CAAM;AAAA,OACR;AAAA,IACF;AAAA,EACF,CAAC,CAAA;AAED,EAAA,UAAA,CAAW,MAAM,CAAA;AACnB;AAOO,SAAS,sBAAA,CAAuB,UAAkB,SAAA,EAA2B;AAClF,EAAA,IAAI,SAAA,CAAU,WAAW,CAAA,EAAG;AAE5B,EAAA,MAAM,QAAA,GAAW,gBAAgB,QAAQ,CAAA;AAEzC,EAAA,MAAM,GAAA,GAAM,QAAA,CAAS,OAAA,CAAQ,wCAAwC,CAAA;AAErE,EAAA,MAAM,UAAA,GAAa,QAAA,CAAS,WAAA,CAAY,CAAC,KAAA,KAAoB;AAC3D,IAAA,KAAA,MAAW,QAAQ,KAAA,EAAO;AACxB,MAAA,GAAA,CAAI,IAAI,IAAI,CAAA;AAAA,IACd;AAAA,EACF,CAAC,CAAA;AAED,EAAA,UAAA,CAAW,SAAS,CAAA;AACtB;AAOO,SAAS,oBAAoB,QAAA,EAA4B;AAC9D,EAAA,MAAM,QAAA,GAAW,gBAAgB,QAAQ,CAAA;AAEzC,EAAA,MAAM,IAAA,GAAO,QAAA,CACV,OAAA,CAAQ,uCAAuC,EAC/C,GAAA,EAAI;AAEP,EAAA,OAAO,IAAA,CAAK,GAAA,CAAI,CAAC,CAAA,KAAM,EAAE,SAAS,CAAA;AACpC;AAOO,SAAS,iBAAiB,QAAA,EAAiC;AAChE,EAAA,MAAM,QAAA,GAAW,gBAAgB,QAAQ,CAAA;AAEzC,EAAA,MAAM,GAAA,GAAM,QAAA,CACT,OAAA,CAAQ,0DAA0D,EAClE,GAAA,EAAI;AAEP,EAAA,OAAO,KAAK,KAAA,IAAS,IAAA;AACvB;AAMO,SAAS,cAAc,QAAA,EAA0B;AACtD,EAAA,MAAM,QAAA,GAAW,gBAAgB,QAAQ,CAAA;AACzC,EAAA,MAAM,GAAA,GAAM,QAAA,CAAS,OAAA,CAAQ,oCAAoC,EAAE,GAAA,EAAI;AACvE,EAAA,OAAO,GAAA,CAAI,GAAA;AACb;AAOO,SAAS,uBAAA,CAAwB,UAAkB,QAAA,EAA0B;AAClF,EAAA,MAAM,QAAA,GAAW,gBAAgB,QAAQ,CAAA;AACzC,EAAA,MAAM,MAAM,QAAA,CACT,OAAA,CAAQ,wDAAwD,CAAA,CAChE,IAAI,QAAQ,CAAA;AACf,EAAA,OAAO,GAAA,CAAI,GAAA;AACb;AAOO,SAAS,gBAAA,CAAiB,UAAkB,IAAA,EAAoB;AACrE,EAAA,MAAM,QAAA,GAAW,gBAAgB,QAAQ,CAAA;AAEzC,EAAA,QAAA,CACG,OAAA,CAAQ,4EAA4E,CAAA,CACpF,GAAA,CAAI,IAAI,CAAA;AACb;AAzIA,IAAAE,UAAAA,GAAA,KAAA,CAAA;AAAA,EAAA,6CAAA,GAAA;AAKA,IAAAF,gBAAAA,EAAAA;AAAA,EAAA;AAAA,CAAA,CAAA;;;ACLA,IAAA,wBAAA,GAAA,EAAA;AAAA,QAAA,CAAA,wBAAA,EAAA;AAAA,EAAA,iBAAA,EAAA,MAAA,iBAAA;AAAA,EAAA,wBAAA,EAAA,MAAA,wBAAA;AAAA,EAAA,gBAAA,EAAA,MAAA,gBAAA;AAAA,EAAA,gBAAA,EAAA,MAAA,gBAAA;AAAA,EAAA,4BAAA,EAAA,MAAA,4BAAA;AAAA,EAAA,sBAAA,EAAA,MAAA,sBAAA;AAAA,EAAA,uBAAA,EAAA,MAAA,uBAAA;AAAA,EAAA,aAAA,EAAA,MAAA,aAAA;AAAA,EAAA,uBAAA,EAAA,MAAA,uBAAA;AAAA,EAAA,mBAAA,EAAA,MAAA,mBAAA;AAAA,EAAA,gBAAA,EAAA,MAAA,gBAAA;AAAA,EAAA,eAAA,EAAA,MAAA,eAAA;AAAA,EAAA,yBAAA,EAAA,MAAA,yBAAA;AAAA,EAAA,uBAAA,EAAA,MAAA,uBAAA;AAAA,EAAA,gBAAA,EAAA,MAAA,gBAAA;AAAA,EAAA,YAAA,EAAA,MAAA;AAAA,CAAA,CAAA;AAAA,IAAA,qBAAA,GAAA,KAAA,CAAA;AAAA,EAAA,8CAAA,GAAA;AAQA,IAAAA,gBAAAA,EAAAA;AAGA,IAAAH,YAAAA,EAAAA;AAGA,IAAAI,WAAAA,EAAAA;AAQA,IAAAZ,YAAAA,EAAAA;AAGA,IAAAa,UAAAA,EAAAA;AAAA,EAAA;AAAA,CAAA,CAAA;ACXA,SAAS,SAAS,OAAA,EAA0B;AAC1C,EAAA,OAAO,OAAA,CAAQ,SAAS,IAAI,CAAA;AAC9B;AAOA,SAAS,iBAAA,CACP,WACA,GAAA,EAC0C;AAC1C,EAAA,IAAI,QAAQ,KAAA,EAAO;AACjB,IAAA,OAAO,cAAc,SAAS,CAAA;AAAA,EAChC;AACA,EAAA,IAAI,QAAQ,MAAA,EAAQ;AAClB,IAAA,OAAO,gBAAgB,SAAS,CAAA;AAAA,EAClC;AACA,EAAA,IAAI,eAAA,CAAgB,GAAA,CAAI,GAAG,CAAA,EAAG;AAC5B,IAAA,OAAO,UAAU,SAAS,CAAA;AAAA,EAC5B;AAEA,EAAA,OAAO,gBAAgB,SAAS,CAAA;AAClC;AAMA,SAAS,cACP,SAAA,EAC0C;AAC1C,EAAA,MAAM,WAAqD,EAAC;AAC5D,EAAA,IAAI,UAAkD,EAAC;AACvD,EAAA,IAAI,WAAA,GAAc,KAAA;AAElB,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,CAAU,QAAQ,CAAA,EAAA,EAAK;AACzC,IAAA,MAAM,IAAA,GAAO,UAAU,CAAC,CAAA;AACxB,IAAA,MAAM,UAAU,EAAE,UAAA,EAAY,CAAA,GAAI,CAAA,EAAG,MAAM,IAAA,EAAK;AAGhD,IAAA,IAAI,IAAA,CAAK,SAAA,EAAU,CAAE,UAAA,CAAW,KAAK,CAAA,EAAG;AACtC,MAAA,WAAA,GAAc,CAAC,WAAA;AACf,MAAA,OAAA,CAAQ,KAAK,OAAO,CAAA;AACpB,MAAA;AAAA,IACF;AAGA,IAAA,IAAI,CAAC,eAAe,UAAA,CAAW,IAAA,CAAK,IAAI,CAAA,IAAK,OAAA,CAAQ,SAAS,CAAA,EAAG;AAC/D,MAAA,QAAA,CAAS,KAAK,OAAO,CAAA;AACrB,MAAA,OAAA,GAAU,CAAC,OAAO,CAAA;AAClB,MAAA;AAAA,IACF;AAIA,IAAA,IACE,CAAC,WAAA,IACD,IAAA,CAAK,MAAK,KAAM,EAAA,IAChB,QAAQ,MAAA,GAAS,CAAA,IACjB,OAAA,CAAQ,IAAA,CAAK,CAAC,CAAA,KAAM,CAAA,CAAE,KAAK,IAAA,EAAK,KAAM,EAAE,CAAA,EACxC;AAEA,MAAA,OAAA,CAAQ,KAAK,OAAO,CAAA;AACpB,MAAA,QAAA,CAAS,KAAK,OAAO,CAAA;AACrB,MAAA,OAAA,GAAU,EAAC;AACX,MAAA;AAAA,IACF;AAEA,IAAA,OAAA,CAAQ,KAAK,OAAO,CAAA;AAAA,EACtB;AAEA,EAAA,IAAI,OAAA,CAAQ,SAAS,CAAA,EAAG;AACtB,IAAA,QAAA,CAAS,KAAK,OAAO,CAAA;AAAA,EACvB;AAEA,EAAA,OAAO,QAAA;AACT;AAGA,SAAS,UACP,SAAA,EAC0C;AAC1C,EAAA,MAAM,WAAqD,EAAC;AAC5D,EAAA,IAAI,UAAkD,EAAC;AAEvD,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,CAAU,QAAQ,CAAA,EAAA,EAAK;AACzC,IAAA,MAAM,IAAA,GAAO,UAAU,CAAC,CAAA;AACxB,IAAA,MAAM,UAAU,EAAE,UAAA,EAAY,CAAA,GAAI,CAAA,EAAG,MAAM,IAAA,EAAK;AAEhD,IAAA,IAAI,KAAK,IAAA,EAAK,KAAM,EAAA,IAAM,OAAA,CAAQ,SAAS,CAAA,EAAG;AAE5C,MAAA,IAAI,eAAA,GAAkB,KAAA;AACtB,MAAA,KAAA,IAAS,IAAI,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,CAAU,QAAQ,CAAA,EAAA,EAAK;AAC7C,QAAA,IAAI,SAAA,CAAU,CAAC,CAAA,CAAG,IAAA,OAAW,EAAA,EAAI;AAC/B,UAAA,eAAA,GAAkB,IAAA;AAClB,UAAA;AAAA,QACF;AAAA,MACF;AACA,MAAA,IAAI,eAAA,EAAiB;AACnB,QAAA,QAAA,CAAS,KAAK,OAAO,CAAA;AACrB,QAAA,OAAA,GAAU,CAAC,OAAO,CAAA;AAClB,QAAA;AAAA,MACF;AAAA,IACF;AAEA,IAAA,OAAA,CAAQ,KAAK,OAAO,CAAA;AAAA,EACtB;AAEA,EAAA,IAAI,OAAA,CAAQ,SAAS,CAAA,EAAG;AACtB,IAAA,QAAA,CAAS,KAAK,OAAO,CAAA;AAAA,EACvB;AAEA,EAAA,OAAO,QAAA;AACT;AAGA,SAAS,gBACP,SAAA,EAC0C;AAC1C,EAAA,MAAM,WAAqD,EAAC;AAC5D,EAAA,IAAI,UAAkD,EAAC;AAEvD,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,CAAU,QAAQ,CAAA,EAAA,EAAK;AACzC,IAAA,MAAM,IAAA,GAAO,UAAU,CAAC,CAAA;AACxB,IAAA,MAAM,UAAU,EAAE,UAAA,EAAY,CAAA,GAAI,CAAA,EAAG,MAAM,IAAA,EAAK;AAEhD,IAAA,IAAI,KAAK,IAAA,EAAK,KAAM,EAAA,IAAM,OAAA,CAAQ,SAAS,CAAA,EAAG;AAC5C,MAAA,QAAA,CAAS,KAAK,OAAO,CAAA;AACrB,MAAA,OAAA,GAAU,CAAC,OAAO,CAAA;AAClB,MAAA;AAAA,IACF;AAEA,IAAA,OAAA,CAAQ,KAAK,OAAO,CAAA;AAAA,EACtB;AAEA,EAAA,IAAI,OAAA,CAAQ,SAAS,CAAA,EAAG;AACtB,IAAA,QAAA,CAAS,KAAK,OAAO,CAAA;AAAA,EACvB;AAEA,EAAA,OAAO,QAAA;AACT;AAGA,SAAS,YAAY,OAAA,EAAqC;AACxD,EAAA,OAAO,OAAA,CAAQ,IAAI,CAAC,CAAA,KAAM,EAAE,IAAI,CAAA,CAAE,KAAK,IAAI,CAAA;AAC7C;AAUO,SAAS,SAAA,CACd,QAAA,EACA,OAAA,EACA,OAAA,EACS;AAET,EAAA,IAAI,OAAA,CAAQ,IAAA,EAAK,KAAM,EAAA,SAAW,EAAC;AAGnC,EAAA,IAAI,QAAA,CAAS,OAAO,CAAA,EAAG,OAAO,EAAC;AAE/B,EAAA,MAAM,UAAA,GAAa,SAAS,UAAA,IAAc,mBAAA;AAC1C,EAAA,MAAM,WAAA,GAAc,SAAS,WAAA,IAAe,oBAAA;AAE5C,EAAA,MAAM,SAAA,GAAY,OAAA,CAAQ,KAAA,CAAM,IAAI,CAAA;AACpC,EAAA,MAAM,GAAA,GAAM,OAAA,CAAQ,QAAQ,CAAA,CAAE,WAAA,EAAY;AAE1C,EAAA,MAAM,QAAA,GAAW,iBAAA,CAAkB,SAAA,EAAW,GAAG,CAAA;AAGjD,EAAA,MAAM,SAAkB,EAAC;AACzB,EAAA,IAAI,cAAsD,EAAC;AAC3D,EAAA,IAAI,iBAAA,GAAoB,CAAA;AAExB,EAAA,SAAS,SAAA,CACP,OACAC,aAAAA,EACwC;AACxC,IAAA,IAAI,KAAA,CAAM,MAAA,KAAW,CAAA,EAAG,OAAO,EAAC;AAEhC,IAAA,MAAM,QAAA,GAAW,CAAC,GAAGA,aAAAA,EAAc,GAAG,KAAK,CAAA;AAC3C,IAAA,MAAM,IAAA,GAAO,SAAS,GAAA,CAAI,CAAC,MAAM,CAAA,CAAE,IAAI,CAAA,CAAE,IAAA,CAAK,IAAI,CAAA;AAClD,IAAA,MAAM,SAAA,GAAY,QAAA,CAAS,CAAC,CAAA,CAAG,UAAA;AAC/B,IAAA,MAAM,OAAA,GAAU,QAAA,CAAS,QAAA,CAAS,MAAA,GAAS,CAAC,CAAA,CAAG,UAAA;AAE/C,IAAA,MAAA,CAAO,IAAA,CAAK;AAAA,MACV,EAAA,EAAI,eAAA,CAAgB,QAAA,EAAU,SAAA,EAAW,OAAO,CAAA;AAAA,MAChD,QAAA;AAAA,MACA,SAAA;AAAA,MACA,OAAA;AAAA,MACA,IAAA;AAAA,MACA,WAAA,EAAa,iBAAiB,IAAI;AAAA,KACnC,CAAA;AAGD,IAAA,IAAI,WAAA,IAAe,CAAA,EAAG,OAAO,EAAC;AAC9B,IAAA,MAAM,gBAAwD,EAAC;AAC/D,IAAA,IAAI,UAAA,GAAa,CAAA;AACjB,IAAA,KAAA,IAAS,IAAI,KAAA,CAAM,MAAA,GAAS,CAAA,EAAG,CAAA,IAAK,GAAG,CAAA,EAAA,EAAK;AAC1C,MAAA,MAAM,OAAA,GAAU,KAAA,CAAM,CAAC,CAAA,CAAG,KAAK,MAAA,GAAS,CAAA;AACxC,MAAA,IAAI,UAAA,GAAa,OAAA,GAAU,WAAA,IAAe,aAAA,CAAc,SAAS,CAAA,EAAG;AACpE,MAAA,aAAA,CAAc,OAAA,CAAQ,KAAA,CAAM,CAAC,CAAE,CAAA;AAC/B,MAAA,UAAA,IAAc,OAAA;AAAA,IAChB;AACA,IAAA,OAAO,aAAA;AAAA,EACT;AAEA,EAAA,IAAI,eAAuD,EAAC;AAE5D,EAAA,KAAA,MAAW,WAAW,QAAA,EAAU;AAC9B,IAAA,MAAM,UAAA,GAAa,WAAA,CAAY,OAAO,CAAA,CAAE,MAAA;AAGxC,IAAA,IAAI,iBAAA,GAAoB,CAAA,IAAK,iBAAA,GAAoB,UAAA,GAAa,UAAA,EAAY;AACxE,MAAA,YAAA,GAAe,SAAA,CAAU,aAAa,YAAY,CAAA;AAClD,MAAA,WAAA,GAAc,EAAC;AACf,MAAA,iBAAA,GAAoB,CAAA;AAAA,IACtB;AAEA,IAAA,WAAA,CAAY,IAAA,CAAK,GAAG,OAAO,CAAA;AAC3B,IAAA,iBAAA,IAAqB,UAAA;AAGrB,IAAA,IAAI,oBAAoB,UAAA,EAAY;AAClC,MAAA,YAAA,GAAe,SAAA,CAAU,aAAa,YAAY,CAAA;AAClD,MAAA,WAAA,GAAc,EAAC;AACf,MAAA,iBAAA,GAAoB,CAAA;AAAA,IACtB;AAAA,EACF;AAGA,EAAA,IAAI,WAAA,CAAY,SAAS,CAAA,EAAG;AAC1B,IAAA,SAAA,CAAU,aAAa,YAAY,CAAA;AAAA,EACrC;AAEA,EAAA,OAAO,MAAA;AACT;AAjQA,IAUM,mBAAA,EACA,oBAAA;AAXN,IAAA,aAAA,GAAA,KAAA,CAAA;AAAA,EAAA,kCAAA,GAAA;AAEA,IAAAb,WAAAA,EAAAA;AAQA,IAAM,mBAAA,GAAsB,IAAA;AAC5B,IAAM,oBAAA,GAAuB,GAAA;AAAA,EAAA;AAAA,CAAA,CAAA;;;ACX7B,IAAA,oBAAA,GAAA,EAAA;AAAA,QAAA,CAAA,oBAAA,EAAA;AAAA,EAAA,WAAA,EAAA,MAAA,WAAA;AAAA,EAAA,uBAAA,EAAA,MAAA;AAAA,CAAA,CAAA;AA2BO,SAAS,wBAAwB,QAAA,EAA0B;AAChE,EAAA,MAAM,EAAA,GAAK,gBAAgB,QAAQ,CAAA;AACnC,EAAA,MAAM,GAAA,GAAM,EAAA,CACT,OAAA,CAAQ,8DAA8D,EACtE,GAAA,EAAI;AACP,EAAA,OAAO,GAAA,CAAI,KAAA;AACb;AAYA,eAAsB,WAAA,CACpB,UACA,OAAA,EAC4B;AAC5B,EAAA,MAAM,KAAA,GAAQ,KAAK,GAAA,EAAI;AACvB,EAAA,MAAM,WAAA,GAAc,SAAS,WAAA,IAAe,IAAA;AAC5C,EAAA,MAAM,EAAA,GAAK,gBAAgB,QAAQ,CAAA;AAEnC,EAAA,MAAM,KAAA,GAAQ,cACV,mEAAA,GACA,2CAAA;AACJ,EAAA,MAAM,IAAA,GAAO,EAAA,CACV,OAAA,CAAQ,KAAK,EACb,GAAA,EAAI;AAGP,EAAA,MAAM,QAAA,GAAW,EAAA,CAAG,OAAA,CAAQ,sCAAsC,EAAE,GAAA,EAAI;AACxE,EAAA,MAAM,aAAA,GAAgB,QAAA,CAAS,KAAA,GAAQ,IAAA,CAAK,MAAA;AAE5C,EAAA,IAAI,cAAA,GAAiB,CAAA;AAErB,EAAA,MAAM,aAAa,EAAA,CAAG,OAAA;AAAA,IACpB;AAAA,GACF;AACA,EAAA,MAAM,UAAA,GAAa,EAAA,CAAG,WAAA,CAAY,CAAC,KAAA,KAA6E;AAC9G,IAAA,KAAA,MAAW,QAAQ,KAAA,EAAO;AACxB,MAAA,MAAM,MAAA,GAAS,MAAA,CAAO,IAAA,CAAK,IAAA,CAAK,MAAA,CAAO,MAAA,EAAQ,IAAA,CAAK,MAAA,CAAO,UAAA,EAAY,IAAA,CAAK,MAAA,CAAO,UAAU,CAAA;AAC7F,MAAA,UAAA,CAAW,GAAA,CAAI,MAAA,EAAQ,IAAA,CAAK,YAAA,EAAc,KAAK,EAAE,CAAA;AAAA,IACnD;AAAA,EACF,CAAC,CAAA;AAED,EAAA,KAAA,IAAS,IAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,MAAA,EAAQ,KAAK,UAAA,EAAY;AAChD,IAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,KAAA,CAAM,CAAA,EAAG,IAAI,UAAU,CAAA;AAC1C,IAAA,MAAM,KAAA,GAAQ,KAAA,CAAM,GAAA,CAAI,CAAA,CAAA,KAAK,EAAE,IAAI,CAAA;AACnC,IAAA,MAAM,OAAA,GAAU,MAAM,UAAA,CAAW,KAAK,CAAA;AACtC,IAAA,IAAI,OAAA,CAAQ,MAAA,KAAW,KAAA,CAAM,MAAA,EAAQ;AACnC,MAAA,MAAM,IAAI,MAAM,CAAA,oBAAA,EAAuB,OAAA,CAAQ,MAAM,CAAA,aAAA,EAAgB,KAAA,CAAM,MAAM,CAAA,OAAA,CAAS,CAAA;AAAA,IAC5F;AACA,IAAA,MAAM,QAAA,GAAW,KAAA,CAAM,GAAA,CAAI,CAAC,CAAA,EAAG,CAAA,MAAO,EAAE,GAAG,CAAA,EAAG,MAAA,EAAQ,OAAA,CAAQ,CAAC,GAAG,CAAE,CAAA;AACpE,IAAA,UAAA,CAAW,QAAQ,CAAA;AACnB,IAAA,cAAA,IAAkB,KAAA,CAAM,MAAA;AAAA,EAC1B;AAEA,EAAA,OAAO;AAAA,IACL,cAAA;AAAA,IACA,aAAA;AAAA,IACA,UAAA,EAAY,IAAA,CAAK,GAAA,EAAI,GAAI;AAAA,GAC3B;AACF;AA7FA,IAUM,UAAA;AAVN,IAAA,iBAAA,GAAA,KAAA,CAAA;AAAA,EAAA,sCAAA,GAAA;AAOA,IAAA,UAAA,EAAA;AACA,IAAAU,gBAAAA,EAAAA;AAEA,IAAM,UAAA,GAAa,EAAA;AAAA,EAAA;AAAA,CAAA,CAAA;;;ACVnB,IAAA,gBAAA,GAAA,EAAA;AAAA,QAAA,CAAA,gBAAA,EAAA;AAAA,EAAA,SAAA,EAAA,MAAA;AAAA,CAAA,CAAA;AA8CA,SAAS,SAAS,OAAA,EAAyB;AACzC,EAAA,OAAOf,WAAW,QAAQ,CAAA,CAAE,OAAO,OAAO,CAAA,CAAE,OAAO,KAAK,CAAA;AAC1D;AAGA,SAAS,YAAY,YAAA,EAA8B;AACjD,EAAA,OAAO,YAAA,GAAe,YAAA;AACxB;AAGA,SAAS,iBAAA,CAAkB,UAAkB,YAAA,EAAqC;AAChF,EAAA,MAAM,EAAA,GAAK,gBAAgB,QAAQ,CAAA;AACnC,EAAA,MAAM,GAAA,GAAM,GACT,OAAA,CAAQ,0CAA0C,EAClD,GAAA,CAAI,WAAA,CAAY,YAAY,CAAC,CAAA;AAChC,EAAA,OAAO,KAAK,KAAA,IAAS,IAAA;AACvB;AAGA,SAAS,WAAA,CAAY,QAAA,EAAkB,YAAA,EAAsB,IAAA,EAAoB;AAC/E,EAAA,MAAM,EAAA,GAAK,gBAAgB,QAAQ,CAAA;AACnC,EAAA,EAAA,CAAG,QAAQ,4DAA4D,CAAA,CACpE,IAAI,WAAA,CAAY,YAAY,GAAG,IAAI,CAAA;AACxC;AAGA,SAAS,cAAA,CAAe,UAAkB,YAAA,EAA4B;AACpE,EAAA,MAAM,EAAA,GAAK,gBAAgB,QAAQ,CAAA;AACnC,EAAA,EAAA,CAAG,QAAQ,oCAAoC,CAAA,CAAE,GAAA,CAAI,WAAA,CAAY,YAAY,CAAC,CAAA;AAChF;AAGA,eAAe,kBAAA,CAAmB,SAAiB,QAAA,EAAqC;AACtF,EAAA,MAAM,UAAoB,EAAC;AAE3B,EAAA,IAAI,OAAA;AACJ,EAAA,IAAI;AACF,IAAA,OAAA,GAAU,MAAM,QAAQ,OAAA,EAAS,EAAE,WAAW,IAAA,EAAM,aAAA,EAAe,MAAM,CAAA;AAAA,EAC3E,CAAA,CAAA,MAAQ;AAEN,IAAA,OAAO,OAAA;AAAA,EACT;AAEA,EAAA,KAAA,MAAW,SAAS,OAAA,EAAS;AAC3B,IAAA,IAAI,CAAC,KAAA,CAAM,MAAA,EAAO,EAAG;AACrB,IAAA,MAAM,GAAA,GAAMmB,OAAAA,CAAQ,KAAA,CAAM,IAAI,EAAE,WAAA,EAAY;AAC5C,IAAA,IAAI,CAAC,oBAAA,CAAqB,GAAA,CAAI,GAAG,CAAA,EAAG;AAIpC,IAAA,MAAM,WAAWrB,IAAAA,CAAK,KAAA,CAAM,cAAc,KAAA,CAAM,IAAA,EAAM,MAAM,IAAI,CAAA;AAChE,IAAA,MAAM,OAAA,GAAU,QAAA,CAAS,QAAA,EAAU,QAAQ,CAAA;AAC3C,IAAA,OAAA,CAAQ,KAAK,OAAO,CAAA;AAAA,EACtB;AAEA,EAAA,OAAO,OAAA;AACT;AASA,eAAe,eAAe,QAAA,EAAmC;AAC/D,EAAA,MAAM,EAAE,aAAA,EAAAsB,cAAAA,EAAc,GAAI,MAAM,OAAA,CAAA,OAAA,EAAA,CAAA,IAAA,CAAA,OAAA,UAAA,EAAA,EAAA,aAAA,CAAA,CAAA;AAChC,EAAA,MAAM,SAAA,GAAY,MAAMA,cAAAA,EAAc;AACtC,EAAA,IAAI,CAAC,UAAU,MAAA,EAAQ;AACrB,IAAA,MAAM,IAAI,MAAM,CAAA,kBAAA,EAAqB,SAAA,CAAU,MAAM,CAAA,EAAA,EAAK,SAAA,CAAU,MAAM,CAAA,CAAE,CAAA;AAAA,EAC9E;AACA,EAAA,MAAM,EAAE,WAAA,EAAAC,YAAAA,EAAY,GAAI,MAAM,OAAA,CAAA,OAAA,EAAA,CAAA,IAAA,CAAA,OAAA,iBAAA,EAAA,EAAA,oBAAA,CAAA,CAAA;AAC9B,EAAA,MAAM,WAAA,GAAc,MAAMA,YAAAA,CAAY,QAAQ,CAAA;AAC9C,EAAA,OAAO,WAAA,CAAY,cAAA;AACrB;AASA,eAAsB,SAAA,CACpB,QAAA,EACA,OAAA,GAAwB,EAAC,EACH;AACtB,EAAA,MAAM,KAAA,GAAQ,KAAK,GAAA,EAAI;AACvB,EAAA,MAAM,OAAA,GAAU,QAAQ,OAAA,IAAW,MAAA;AACnC,EAAA,MAAM,KAAA,GAAQ,QAAQ,KAAA,IAAS,KAAA;AAE/B,EAAA,MAAM,KAAA,GAAqB;AAAA,IACzB,YAAA,EAAc,CAAA;AAAA,IACd,YAAA,EAAc,CAAA;AAAA,IACd,YAAA,EAAc,CAAA;AAAA,IACd,aAAA,EAAe,CAAA;AAAA,IACf,aAAA,EAAe,CAAA;AAAA,IACf,cAAA,EAAgB,CAAA;AAAA,IAChB,UAAA,EAAY;AAAA,GACd;AAEA,EAAA,MAAM,QAAA,GAAWvB,IAAAA,CAAK,QAAA,EAAU,OAAO,CAAA;AACvC,EAAA,MAAM,SAAA,GAAY,MAAM,kBAAA,CAAmB,QAAA,EAAU,QAAQ,CAAA;AAG7D,EAAA,KAAA,MAAW,WAAW,SAAA,EAAW;AAC/B,IAAA,MAAM,QAAA,GAAWA,IAAAA,CAAK,QAAA,EAAU,OAAO,CAAA;AACvC,IAAA,IAAI,OAAA;AACJ,IAAA,IAAI;AACF,MAAA,OAAA,GAAU,MAAMS,QAAAA,CAAS,QAAA,EAAU,OAAO,CAAA;AAAA,IAC5C,CAAA,CAAA,MAAQ;AACN,MAAA,KAAA,CAAM,YAAA,EAAA;AACN,MAAA;AAAA,IACF;AAEA,IAAA,MAAM,IAAA,GAAO,SAAS,OAAO,CAAA;AAC7B,IAAA,MAAM,UAAA,GAAa,iBAAA,CAAkB,QAAA,EAAU,OAAO,CAAA;AAGtD,IAAA,IAAI,CAAC,KAAA,IAAS,UAAA,KAAe,IAAA,EAAM;AACjC,MAAA,KAAA,CAAM,YAAA,EAAA;AACN,MAAA;AAAA,IACF;AAGA,IAAA,MAAM,MAAA,GAAS,SAAA,CAAU,OAAA,EAAS,OAAO,CAAA;AAGzC,IAAA,MAAM,GAAA,GAAA,iBAAM,IAAI,IAAA,EAAK,EAAE,WAAA,EAAY;AACnC,IAAA,MAAM,eAAA,GAAoC,MAAA,CAAO,GAAA,CAAI,CAAC,KAAA,MAAW;AAAA,MAC/D,IAAI,KAAA,CAAM,EAAA;AAAA,MACV,UAAU,KAAA,CAAM,QAAA;AAAA,MAChB,WAAW,KAAA,CAAM,SAAA;AAAA,MACjB,SAAS,KAAA,CAAM,OAAA;AAAA,MACf,aAAa,KAAA,CAAM,WAAA;AAAA,MACnB,MAAM,KAAA,CAAM,IAAA;AAAA,MACZ,SAAA,EAAW;AAAA,KACb,CAAE,CAAA;AAGF,IAAA,MAAM,EAAA,GAAK,gBAAgB,QAAQ,CAAA;AACnC,IAAA,EAAA,CAAG,YAAY,MAAM;AACnB,MAAA,sBAAA,CAAuB,QAAA,EAAU,CAAC,OAAO,CAAC,CAAA;AAC1C,MAAA,IAAI,eAAA,CAAgB,SAAS,CAAA,EAAG;AAC9B,QAAA,YAAA,CAAa,UAAU,eAAe,CAAA;AAAA,MACxC;AACA,MAAA,WAAA,CAAY,QAAA,EAAU,SAAS,IAAI,CAAA;AAAA,IACrC,CAAC,CAAA,EAAE;AAEH,IAAA,KAAA,CAAM,YAAA,EAAA;AACN,IAAA,KAAA,CAAM,iBAAiB,eAAA,CAAgB,MAAA;AAAA,EACzC;AAGA,EAAA,MAAM,YAAA,GAAe,oBAAoB,QAAQ,CAAA;AACjD,EAAA,MAAM,cAAA,GAAiB,IAAI,GAAA,CAAI,SAAS,CAAA;AACxC,EAAA,MAAM,UAAA,GAAa,aAAa,MAAA,CAAO,CAAC,MAAM,CAAC,cAAA,CAAe,GAAA,CAAI,CAAC,CAAC,CAAA;AAEpE,EAAA,IAAI,UAAA,CAAW,SAAS,CAAA,EAAG;AAEzB,IAAA,KAAA,MAAW,QAAQ,UAAA,EAAY;AAC7B,MAAA,KAAA,CAAM,aAAA,IAAiB,uBAAA,CAAwB,QAAA,EAAU,IAAI,CAAA;AAAA,IAC/D;AAEA,IAAA,sBAAA,CAAuB,UAAU,UAAU,CAAA;AAG3C,IAAA,KAAA,MAAW,QAAQ,UAAA,EAAY;AAC7B,MAAA,cAAA,CAAe,UAAU,IAAI,CAAA;AAAA,IAC/B;AAAA,EACF;AAGA,EAAA,gBAAA,CAAiB,QAAA,EAAA,iBAAU,IAAI,IAAA,EAAK,EAAE,aAAa,CAAA;AAGnD,EAAA,IAAI,QAAQ,KAAA,EAAO;AACjB,IAAA,KAAA,CAAM,cAAA,GAAiB,MAAM,cAAA,CAAe,QAAQ,CAAA;AAAA,EACtD;AAEA,EAAA,KAAA,CAAM,UAAA,GAAa,IAAA,CAAK,GAAA,EAAI,GAAI,KAAA;AAChC,EAAA,OAAO,KAAA;AACT;AApOA,IAAA,aAAA,GAAA,KAAA,CAAA;AAAA,EAAA,kCAAA,GAAA;AAWA,IAAAQ,gBAAAA,EAAAA;AAGA,IAAAE,UAAAA,EAAAA;AASA,IAAA,aAAA,EAAA;AACA,IAAAZ,WAAAA,EAAAA;AAAA,EAAA;AAAA,CAAA,CAAA;ACeA,SAAS,SAAS,QAAA,EAA0B;AAC1C,EAAA,OAAOP,IAAAA,CAAK,QAAA,EAAU,SAAA,EAAW,QAAA,EAAU,YAAY,CAAA;AACzD;AAEA,SAAS,QAAQ,QAAA,EAA0B;AACzC,EAAA,OAAOA,IAAAA,CAAK,QAAA,EAAU,SAAA,EAAW,QAAQ,CAAA;AAC3C;AAGA,SAAS,eAAe,GAAA,EAAsB;AAC5C,EAAA,IAAI;AACF,IAAA,OAAA,CAAQ,IAAA,CAAK,KAAK,CAAC,CAAA;AACnB,IAAA,OAAO,IAAA;AAAA,EACT,CAAA,CAAA,MAAQ;AACN,IAAA,OAAO,KAAA;AAAA,EACT;AACF;AAGA,SAAS,SAAS,QAAA,EAAsC;AACtD,EAAA,IAAI;AACF,IAAA,MAAM,GAAA,GAAM,YAAA,CAAa,QAAA,EAAU,OAAO,CAAA;AAC1C,IAAA,MAAM,MAAA,GAAkB,IAAA,CAAK,KAAA,CAAM,GAAG,CAAA;AACtC,IAAA,IACE,OAAO,MAAA,KAAW,QAAA,IAAY,MAAA,KAAW,IAAA,IACzC,OAAQ,MAAA,CAAmC,GAAA,KAAQ,QAAA,IACnD,OAAQ,MAAA,CAAmC,SAAA,KAAc,QAAA,EACzD;AACA,MAAA,OAAO,MAAA;AAAA,IACT;AACA,IAAA,OAAO,IAAA;AAAA,EACT,CAAA,CAAA,MAAQ;AACN,IAAA,OAAO,IAAA;AAAA,EACT;AACF;AASO,SAAS,iBAAiB,QAAA,EAA8B;AAC7D,EAAA,MAAM,GAAA,GAAM,QAAQ,QAAQ,CAAA;AAC5B,EAAA,MAAM,IAAA,GAAO,SAAS,QAAQ,CAAA;AAC9B,EAAA,MAAM,OAAA,GAAuB,EAAE,GAAA,EAAK,OAAA,CAAQ,GAAA,EAAK,4BAAW,IAAI,IAAA,EAAK,EAAE,WAAA,EAAY,EAAE;AAErF,EAAAe,SAAAA,CAAU,GAAA,EAAK,EAAE,SAAA,EAAW,MAAM,CAAA;AAElC,EAAA,IAAI;AACF,IAAA,aAAA,CAAc,IAAA,EAAM,KAAK,SAAA,CAAU,OAAO,GAAG,EAAE,IAAA,EAAM,MAAM,CAAA;AAC3D,IAAA,OAAO,EAAE,QAAA,EAAU,IAAA,EAAM,SAAS,MAAM,WAAA,CAAY,IAAI,CAAA,EAAE;AAAA,EAC5D,SAAS,GAAA,EAAc;AACrB,IAAA,IAAK,GAAA,CAA8B,IAAA,KAAS,QAAA,EAAU,MAAM,GAAA;AAG5D,IAAA,MAAM,QAAA,GAAW,SAAS,IAAI,CAAA;AAC9B,IAAA,IAAI,QAAA,IAAY,cAAA,CAAe,QAAA,CAAS,GAAG,CAAA,EAAG;AAC5C,MAAA,MAAM,OAAA,GAAU,KAAK,GAAA,EAAI,GAAI,IAAI,IAAA,CAAK,QAAA,CAAS,SAAS,CAAA,CAAE,OAAA,EAAQ;AAClE,MAAA,IAAI,UAAU,eAAA,EAAiB;AAC7B,QAAA,OAAO,EAAE,QAAA,EAAU,KAAA,EAAO,MAAA,EAAQ,SAAS,GAAA,EAAI;AAAA,MACjD;AAAA,IAEF;AAGA,IAAA,IAAI;AAAE,MAAAC,WAAW,IAAI,CAAA;AAAA,IAAG,CAAA,CAAA,MAAQ;AAAA,IAAqB;AACrD,IAAA,IAAI;AACF,MAAA,aAAA,CAAc,IAAA,EAAM,KAAK,SAAA,CAAU,OAAO,GAAG,EAAE,IAAA,EAAM,MAAM,CAAA;AAC3D,MAAA,OAAO,EAAE,QAAA,EAAU,IAAA,EAAM,SAAS,MAAM,WAAA,CAAY,IAAI,CAAA,EAAE;AAAA,IAC5D,CAAA,CAAA,MAAQ;AAEN,MAAA,MAAM,MAAA,GAAS,SAAS,IAAI,CAAA;AAC5B,MAAA,OAAO,EAAE,QAAA,EAAU,KAAA,EAAO,MAAA,EAAQ,MAAA,EAAQ,OAAO,EAAA,EAAG;AAAA,IACtD;AAAA,EACF;AACF;AAGO,SAAS,cAAc,QAAA,EAA2B;AACvD,EAAA,MAAM,IAAA,GAAO,SAAS,QAAQ,CAAA;AAC9B,EAAA,IAAI,CAACQ,UAAAA,CAAW,IAAI,CAAA,EAAG,OAAO,KAAA;AAE9B,EAAA,MAAM,OAAA,GAAU,SAAS,IAAI,CAAA;AAC7B,EAAA,IAAI,CAAC,SAAS,OAAO,KAAA;AAErB,EAAA,OAAO,cAAA,CAAe,QAAQ,GAAG,CAAA;AACnC;AAEA,SAAS,YAAY,IAAA,EAAoB;AACvC,EAAA,IAAI;AACF,IAAAR,WAAW,IAAI,CAAA;AAAA,EACjB,CAAA,CAAA,MAAQ;AAAA,EAER;AACF;AAvIA,IAgCM,eAAA;AAhCN,IAAA,eAAA,GAAA,KAAA,CAAA;AAAA,EAAA,oCAAA,GAAA;AAgCA,IAAM,eAAA,GAAkB,KAAK,EAAA,GAAK,GAAA;AAAA,EAAA;AAAA,CAAA,CAAA;ACflC,SAAS,WAAW,QAAA,EAA0B;AAC5C,EAAA,OAAOhB,IAAAA,CAAK,UAAU,WAAW,CAAA;AACnC;AAGO,SAAS,gBAAA,CAAiB,UAAkB,MAAA,EAA2B;AAC5E,EAAA,MAAM,QAAA,GAAW,WAAW,QAAQ,CAAA;AACpC,EAAAe,UAAUd,OAAAA,CAAQ,QAAQ,GAAG,EAAE,SAAA,EAAW,MAAM,CAAA;AAChD,EAAAwB,aAAAA,CAAc,UAAU,IAAA,CAAK,SAAA,CAAU,QAAQ,IAAA,EAAM,CAAC,GAAG,OAAO,CAAA;AAClE;AAKO,SAAS,gBAAgB,QAAA,EAAsC;AACpE,EAAA,IAAI;AACF,IAAA,MAAM,GAAA,GAAMC,YAAAA,CAAa,UAAA,CAAW,QAAQ,GAAG,OAAO,CAAA;AACtD,IAAA,MAAM,MAAA,GAAS,IAAA,CAAK,KAAA,CAAM,GAAG,CAAA;AAC7B,IAAA,IAAI,CAAC,MAAA,IAAU,OAAO,MAAA,KAAW,QAAA,IAAY,CAAC,YAAA,CAAa,GAAA,CAAI,MAAA,CAAO,KAAe,CAAA,EAAG;AACtF,MAAA,OAAO,IAAA;AAAA,IACT;AACA,IAAA,OAAO,MAAA;AAAA,EACT,CAAA,CAAA,MAAQ;AACN,IAAA,OAAO,IAAA;AAAA,EACT;AACF;AA1CA,IAeM,WAAA,EAaA,YAAA;AA5BN,IAAA,iBAAA,GAAA,KAAA,CAAA;AAAA,EAAA,sCAAA,GAAA;AAeA,IAAM,WAAA,GAAc,kCAAA;AAapB,IAAM,YAAA,uBAAmB,GAAA,CAAI,CAAC,QAAQ,SAAA,EAAW,WAAA,EAAa,QAAQ,CAAC,CAAA;AAAA,EAAA;AAAA,CAAA,CAAA;ACEvE,SAAS,oBAAA,GAA4D;AACnE,EAAA,IAAI,GAAA,GAAMzB,OAAAA,CAAQ,aAAA,CAAc,MAAA,CAAA,IAAA,CAAY,GAAG,CAAC,CAAA;AAChD,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,EAAA,EAAI,CAAA,EAAA,EAAK;AAC3B,IAAA,MAAM,SAAA,GAAYD,IAAAA,CAAK,GAAA,EAAK,MAAA,EAAQ,QAAQ,CAAA;AAC5C,IAAA,IAAIwB,UAAAA,CAAW,SAAS,CAAA,EAAG;AACzB,MAAA,OAAO,EAAE,OAAA,EAAS,OAAA,CAAQ,UAAU,IAAA,EAAM,CAAC,SAAS,CAAA,EAAE;AAAA,IACxD;AACA,IAAA,MAAM,MAAA,GAASvB,QAAQ,GAAG,CAAA;AAC1B,IAAA,IAAI,WAAW,GAAA,EAAK;AACpB,IAAA,GAAA,GAAM,MAAA;AAAA,EACR;AACA,EAAA,OAAO,EAAE,OAAA,EAAS,KAAA,EAAO,IAAA,EAAM,CAAC,IAAI,CAAA,EAAE;AACxC;AAUO,SAAS,qBAAqB,QAAA,EAAoC;AACvE,EAAA,IAAI,aAAA,CAAc,QAAQ,CAAA,EAAG;AAC3B,IAAA,OAAO,EAAE,OAAA,EAAS,KAAA,EAAO,MAAA,EAAQ,+BAAA,EAAgC;AAAA,EACnE;AACA,EAAA,IAAI,CAAC,kBAAiB,EAAG;AACvB,IAAA,OAAO,EAAE,OAAA,EAAS,KAAA,EAAO,MAAA,EAAQ,qBAAA,EAAsB;AAAA,EACzD;AACA,EAAA,IAAI,uBAAA,CAAwB,QAAQ,CAAA,KAAM,CAAA,EAAG;AAC3C,IAAA,OAAO,EAAE,OAAA,EAAS,KAAA,EAAO,MAAA,EAAQ,6BAAA,EAA8B;AAAA,EACjE;AAEA,EAAA,MAAM,MAAM,oBAAA,EAAqB;AACjC,EAAA,MAAM,KAAA,GAAQ,KAAA,CAAM,GAAA,CAAI,OAAA,EAAS,CAAC,GAAG,GAAA,CAAI,IAAA,EAAM,cAAA,EAAgB,QAAQ,CAAA,EAAG;AAAA,IACxE,QAAA,EAAU,IAAA;AAAA,IACV,KAAA,EAAO;AAAA,GACR,CAAA;AACD,EAAA,KAAA,CAAM,KAAA,EAAM;AAEZ,EAAA,OAAO,EAAE,OAAA,EAAS,IAAA,EAAM,GAAA,EAAK,MAAM,GAAA,EAAI;AACzC;AAKA,eAAsB,mBAAmB,QAAA,EAAiC;AACxE,EAAA,MAAM,IAAA,GAAO,iBAAiB,QAAQ,CAAA;AACtC,EAAA,IAAI,CAAC,KAAK,QAAA,EAAU;AAGpB,EAAA,MAAM,EAAE,eAAA,EAAA0B,gBAAAA,EAAgB,GAAI,MAAM,OAAA,CAAA,OAAA,EAAA,CAAA,IAAA,CAAA,OAAA,qBAAA,EAAA,EAAA,wBAAA,CAAA,CAAA;AAClC,EAAAA,iBAAgB,QAAQ,CAAA;AAExB,EAAA,MAAM,KAAA,GAAQ,KAAK,GAAA,EAAI;AACvB,EAAA,gBAAA,CAAiB,QAAA,EAAU,EAAE,KAAA,EAAO,SAAA,EAAW,SAAA,EAAA,qBAAe,IAAA,EAAK,EAAE,WAAA,EAAY,EAAG,CAAA;AAEpF,EAAA,IAAI;AACF,IAAA,MAAM,MAAA,GAAS,MAAM,aAAA,CAAc,YAAY,WAAA,CAAY,UAAU,EAAE,WAAA,EAAa,IAAA,EAAM,CAAC,CAAA;AAC3F,IAAA,gBAAA,CAAiB,QAAA,EAAU;AAAA,MACzB,KAAA,EAAO,WAAA;AAAA,MACP,gBAAgB,MAAA,CAAO,cAAA;AAAA,MACvB,WAAA,EAAA,iBAAa,IAAI,IAAA,EAAK,EAAE,WAAA,EAAY;AAAA,MACpC,YAAY,MAAA,CAAO;AAAA,KACpB,CAAA;AAAA,EACH,SAAS,GAAA,EAAK;AACZ,IAAA,MAAM,GAAA,GAAM,GAAA,YAAe,KAAA,GAAQ,GAAA,CAAI,OAAA,GAAU,eAAA;AACjD,IAAA,gBAAA,CAAiB,QAAA,EAAU;AAAA,MACzB,KAAA,EAAO,QAAA;AAAA,MACP,KAAA,EAAO,GAAA;AAAA,MACP,UAAA,EAAY,IAAA,CAAK,GAAA,EAAI,GAAI;AAAA,KAC1B,CAAA;AAAA,EACH,CAAA,SAAE;AACA,IAAA,gBAAA,EAAiB;AACjB,IAAA,IAAA,CAAK,OAAA,EAAQ;AAAA,EACf;AACF;AAQA,eAAsB,mBAAmB,QAAA,EAAoD;AAC3F,EAAA,MAAM,QAAA,GAAW3B,IAAAA,CAAK,QAAA,EAAU,MAAM,CAAA;AACtC,EAAA,IAAI,CAACwB,UAAAA,CAAW,QAAQ,CAAA,EAAG,OAAO,IAAA;AAClC,EAAA,MAAM,EAAE,SAAA,EAAAI,UAAAA,EAAU,GAAI,MAAM,OAAA,CAAA,OAAA,EAAA,CAAA,IAAA,CAAA,OAAA,aAAA,EAAA,EAAA,gBAAA,CAAA,CAAA;AAC5B,EAAA,MAAMA,WAAU,QAAQ,CAAA;AACxB,EAAA,OAAO,qBAAqB,QAAQ,CAAA;AACtC;AAxHA,IAAA,qBAAA,GAAA,KAAA,CAAA;AAAA,EAAA,0CAAA,GAAA;AAYA,IAAA,eAAA,EAAA;AACA,IAAA,qBAAA,EAAA;AACA,IAAA,eAAA,EAAA;AACA,IAAA,iBAAA,EAAA;AACA,IAAA,iBAAA,EAAA;AAAA,EAAA;AAAA,CAAA,CAAA;ACVA,IAAM,QAAA,GAAW,aAAA,CAAc,MAAA,CAAA,IAAA,CAAY,GAAG,CAAA;AAC9C,IAAM,IAAA,GAAO,SAAS,iBAAiB,CAAA;AAEhC,IAAM,UAAkB,IAAA,CAAK;;;ACwEpC,YAAA,EAAA;AAcA,eAAA,EAAA;AAcAtB,YAAAA,EAAAA;;;AClGA,UAAA,EAAA;AACAA,YAAAA,EAAAA;AACA,YAAA,EAAA;AAGA,IAAM,mBAAA,GAAsB,IAAA;AAmB5B,eAAsB,OAAA,CACpB,QAAA,EACA,OAAA,EACA,OAAA,GAA0B,EAAC,EACH;AACxB,EAAA,MAAM,SAAA,GAAY,QAAQ,SAAA,IAAa,mBAAA;AAEvC,EAAA,IAAI,CAAC,kBAAiB,EAAG;AACvB,IAAA,OAAO,EAAE,OAAO,IAAA,EAAK;AAAA,EACvB;AAEA,EAAA,IAAI;AACF,IAAA,MAAM,aAAa,QAAQ,CAAA;AAC3B,IAAA,MAAM,UAAU,MAAM,kBAAA,CAAmB,UAAU,OAAA,EAAS,EAAE,WAAW,CAAA;AACzE,IAAA,MAAM,GAAA,GAAM,QAAQ,CAAC,CAAA;AACrB,IAAA,IAAI,GAAA,EAAK;AACP,MAAA,OAAO;AAAA,QACL,KAAA,EAAO,KAAA;AAAA,QACP,MAAA,EAAQ,uCAAuC,GAAA,CAAI,IAAA,CAAK,QAAQ,KAAA,CAAM,CAAA,EAAG,EAAE,CAAC,CAAA,IAAA,CAAA;AAAA,QAC5E,UAAA,EAAY,IAAI,IAAA,CAAK;AAAA,OACvB;AAAA,IACF;AACA,IAAA,OAAO,EAAE,OAAO,IAAA,EAAK;AAAA,EACvB,SAAS,GAAA,EAAK;AACZ,IAAA,IAAI,OAAA,CAAQ,GAAA,CAAI,UAAU,CAAA,EAAG;AAC3B,MAAA,OAAA,CAAQ,MAAA,CAAO,MAAM,CAAA,0BAAA,EAA6B,GAAA,YAAe,QAAQ,GAAA,CAAI,OAAA,GAAU,MAAA,CAAO,GAAG,CAAC;AAAA,CAAI,CAAA;AAAA,IACxG;AACA,IAAA,OAAO,EAAE,OAAO,IAAA,EAAK;AAAA,EACvB;AACF;AAGA,IAAM,cAAA,GAAiB,CAAA;AAGvB,IAAM,cAAA,GAAiB;AAAA,EACrB,mBAAA;AAAA,EACA,iBAAA;AAAA,EACA,kBAAA;AAAA,EACA,gBAAA;AAAA,EACA,aAAA;AAAA,EACA;AACF,CAAA;AAGA,IAAM,0BAAA,GAA6B,sCAAA;AAY5B,SAAS,WAAW,OAAA,EAAoC;AAE7D,EAAA,MAAM,KAAA,GAAQ,OAAA,CAAQ,IAAA,EAAK,CAAE,KAAA,CAAM,KAAK,CAAA,CAAE,MAAA,CAAO,CAAC,CAAA,KAAM,CAAA,CAAE,MAAA,GAAS,CAAC,CAAA;AACpE,EAAA,IAAI,KAAA,CAAM,SAAS,cAAA,EAAgB;AACjC,IAAA,OAAO,EAAE,QAAA,EAAU,KAAA,EAAO,MAAA,EAAQ,uCAAA,EAAwC;AAAA,EAC5E;AAGA,EAAA,KAAA,MAAW,WAAW,cAAA,EAAgB;AACpC,IAAA,IAAI,OAAA,CAAQ,IAAA,CAAK,OAAO,CAAA,EAAG;AACzB,MAAA,OAAO,EAAE,QAAA,EAAU,KAAA,EAAO,MAAA,EAAQ,iCAAA,EAAkC;AAAA,IACtE;AAAA,EACF;AAGA,EAAA,IAAI,0BAAA,CAA2B,IAAA,CAAK,OAAO,CAAA,EAAG;AAC5C,IAAA,OAAO,EAAE,QAAA,EAAU,KAAA,EAAO,MAAA,EAAQ,iCAAA,EAAkC;AAAA,EACtE;AAEA,EAAA,OAAO,EAAE,UAAU,IAAA,EAAK;AAC1B;AAGA,IAAM,eAAA,GAAkB;AAAA,EACtB,8BAAA;AAAA;AAAA,EACA,8BAAA;AAAA;AAAA,EACA,yBAAA;AAAA;AAAA,EACA,2BAAA;AAAA;AAAA,EACA,2BAAA;AAAA;AAAA,EACA,0BAAA;AAAA;AAAA,EACA;AAAA;AACF,CAAA;AAYO,SAAS,aAAa,OAAA,EAAsC;AAEjE,EAAA,KAAA,MAAW,WAAW,eAAA,EAAiB;AACrC,IAAA,IAAI,OAAA,CAAQ,IAAA,CAAK,OAAO,CAAA,EAAG;AACzB,MAAA,OAAO,EAAE,YAAY,IAAA,EAAK;AAAA,IAC5B;AAAA,EACF;AAEA,EAAA,OAAO,EAAE,UAAA,EAAY,KAAA,EAAO,MAAA,EAAQ,qCAAA,EAAsC;AAC5E;AAaA,eAAsB,aAAA,CACpB,UACA,OAAA,EACwB;AAExB,EAAA,MAAM,cAAA,GAAiB,WAAW,OAAO,CAAA;AACzC,EAAA,IAAI,CAAC,eAAe,QAAA,EAAU;AAC5B,IAAA,OAAO,EAAE,aAAA,EAAe,KAAA,EAAO,MAAA,EAAQ,eAAe,MAAA,EAAO;AAAA,EAC/D;AAGA,EAAA,MAAM,aAAA,GAAgB,MAAM,OAAA,CAAQ,QAAA,EAAU,OAAO,CAAA;AACrD,EAAA,IAAI,CAAC,cAAc,KAAA,EAAO;AACxB,IAAA,OAAO,EAAE,aAAA,EAAe,KAAA,EAAO,MAAA,EAAQ,cAAc,MAAA,EAAO;AAAA,EAC9D;AAEA,EAAA,OAAO,EAAE,eAAe,IAAA,EAAK;AAC/B;;;AC/IA,IAAM,wBAAA,GAA2B;AAAA,EAC/B,gBAAA;AAAA;AAAA,EACA,YAAA;AAAA;AAAA,EACA,eAAA;AAAA;AAAA,EACA,eAAA;AAAA;AAAA,EACA;AAAA;AACF,CAAA;AAWO,SAAS,qBAAqB,OAAA,EAAsD;AACzF,EAAA,MAAM,EAAE,QAAA,EAAU,OAAA,EAAQ,GAAI,OAAA;AAE9B,EAAA,IAAI,QAAA,CAAS,SAAS,CAAA,EAAG;AACvB,IAAA,OAAO,IAAA;AAAA,EACT;AAGA,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,CAAS,QAAQ,CAAA,EAAA,EAAK;AACxC,IAAA,MAAM,OAAA,GAAU,SAAS,CAAC,CAAA;AAC1B,IAAA,IAAI,CAAC,OAAA,EAAS;AAEd,IAAA,KAAA,MAAW,WAAW,wBAAA,EAA0B;AAC9C,MAAA,IAAI,OAAA,CAAQ,IAAA,CAAK,OAAO,CAAA,EAAG;AACzB,QAAA,OAAO;AAAA,UACL,OAAA,EAAS,CAAA,uBAAA,EAA0B,OAAA,CAAQ,MAAM,CAAA,CAAA;AAAA,UACjD,iBAAA,EAAmB,OAAA;AAAA,UACnB;AAAA,SACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,EAAA,OAAO,IAAA;AACT;AA6BO,SAAS,qBAAqB,OAAA,EAAqD;AACxF,EAAA,MAAM,EAAE,OAAM,GAAI,OAAA;AAElB,EAAA,IAAI,KAAA,CAAM,SAAS,CAAA,EAAG;AACpB,IAAA,OAAO,IAAA;AAAA,EACT;AAGA,EAAA,KAAA,IAAS,IAAI,CAAA,EAAG,CAAA,IAAK,KAAA,CAAM,MAAA,GAAS,GAAG,CAAA,EAAA,EAAK;AAC1C,IAAA,MAAM,KAAA,GAAQ,MAAM,CAAC,CAAA;AACrB,IAAA,MAAM,MAAA,GAAS,KAAA,CAAM,CAAA,GAAI,CAAC,CAAA;AAC1B,IAAA,MAAM,KAAA,GAAQ,KAAA,CAAM,CAAA,GAAI,CAAC,CAAA;AAEzB,IAAA,IAAI,CAAC,KAAA,IAAS,CAAC,MAAA,IAAU,CAAC,KAAA,EAAO;AAGjC,IAAA,IACE,KAAA,CAAM,IAAA,KAAS,MAAA,CAAO,IAAA,IACtB,OAAO,IAAA,KAAS,KAAA,CAAM,IAAA,IACtB,KAAA,CAAM,OAAA,IACN,CAAC,MAAA,CAAO,OAAA,IACR,MAAM,OAAA,EACN;AACA,MAAA,OAAO;AAAA,QACL,MAAM,KAAA,CAAM,IAAA;AAAA,QACZ,OAAA,EAAS,CAAA,mBAAA,EAAsB,KAAA,CAAM,IAAI,CAAA;AAAA,OAC3C;AAAA,IACF;AAAA,EACF;AAEA,EAAA,OAAO,IAAA;AACT;AAyBO,SAAS,kBAAkB,UAAA,EAAoD;AACpF,EAAA,IAAI,WAAW,MAAA,EAAQ;AACrB,IAAA,OAAO,IAAA;AAAA,EACT;AAGA,EAAA,MAAM,KAAA,GAAQ,UAAA,CAAW,MAAA,CAAO,KAAA,CAAM,IAAI,CAAA,CAAE,MAAA,CAAO,CAAC,IAAA,KAAS,IAAA,CAAK,IAAA,EAAK,CAAE,SAAS,CAAC,CAAA;AACnF,EAAA,MAAM,SAAA,GAAY,KAAA,CAAM,IAAA,CAAK,CAAC,IAAA,KAAS,oBAAA,CAAqB,IAAA,CAAK,IAAI,CAAC,CAAA,IAAK,KAAA,CAAM,CAAC,CAAA,IAAK,EAAA;AAEvF,EAAA,OAAO;AAAA,IACL,UAAU,UAAA,CAAW,QAAA;AAAA,IACrB,aAAa,UAAA,CAAW,MAAA;AAAA,IACxB,OAAA,EAAS,mBAAmB,UAAA,CAAW,QAAQ,KAAK,SAAA,CAAU,KAAA,CAAM,CAAA,EAAG,GAAG,CAAC,CAAA;AAAA,GAC7E;AACF;;;AC/JA,UAAA,EAAA;AAwJA,IAAM,sBAAA,GAAyBE,EAAE,MAAA,CAAO;AAAA,EACtC,QAAA,EAAUA,CAAAA,CAAE,KAAA,CAAMA,CAAAA,CAAE,QAAQ,CAAA;AAAA,EAC5B,OAAA,EAAS;AACX,CAAC,CAAA;AAGD,IAAM,eAAA,GAAkBA,EAAE,MAAA,CAAO;AAAA,EAC/B,IAAA,EAAMA,EAAE,MAAA,EAAO;AAAA,EACf,OAAA,EAASA,EAAE,OAAA,EAAQ;AAAA,EACnB,SAAA,EAAWA,EAAE,MAAA;AACf,CAAC,CAAA;AAGD,IAAM,iBAAA,GAAoBA,EAAE,MAAA,CAAO;AAAA,EACjC,KAAA,EAAOA,CAAAA,CAAE,KAAA,CAAM,eAAe;AAChC,CAAC,CAAA;AAGD,IAAM,gBAAA,GAAmBA,EAAE,MAAA,CAAO;AAAA,EAChC,MAAA,EAAQA,EAAE,OAAA,EAAQ;AAAA,EAClB,MAAA,EAAQA,EAAE,MAAA,EAAO;AAAA,EACjB,QAAA,EAAUA,EAAE,MAAA;AACd,CAAC,CAAA;AAG4BA,CAAAA,CAAE,kBAAA,CAAmB,MAAA,EAAQ;AAAA,EACxDA,CAAAA,CAAE,MAAA,CAAO,EAAE,IAAA,EAAMA,CAAAA,CAAE,QAAQ,MAAM,CAAA,EAAG,IAAA,EAAM,sBAAA,EAAwB,CAAA;AAAA,EAClEA,CAAAA,CAAE,MAAA,CAAO,EAAE,IAAA,EAAMA,CAAAA,CAAE,QAAQ,MAAM,CAAA,EAAG,IAAA,EAAM,iBAAA,EAAmB,CAAA;AAAA,EAC7DA,CAAAA,CAAE,MAAA,CAAO,EAAE,IAAA,EAAMA,CAAAA,CAAE,QAAQ,MAAM,CAAA,EAAG,IAAA,EAAM,gBAAA,EAAkB;AAC9D,CAAC;;;AC1LD,YAAA,EAAA;AAIA,IAAMqB,cAAAA,GAAgB,CAAA;AAQtB,SAAS,YAAY,IAAA,EAA+D;AAClF,EAAA,OAAO,KAAK,QAAA,KAAa,MAAA;AAC3B;AAaA,eAAsB,kBAAA,CACpB,QAAA,EACA,KAAA,GAAgBA,cAAAA,EACe;AAC/B,EAAA,MAAM,EAAE,KAAA,EAAM,GAAI,MAAM,gBAAgB,QAAQ,CAAA;AAGhD,EAAA,MAAM,sBAAsB,KAAA,CAAM,MAAA;AAAA,IAChC,CAAC,IAAA,KACC,WAAA,CAAY,IAAI,CAAA,IAChB,IAAA,CAAK,QAAA,KAAa,MAAA,IAClB,IAAA,CAAK,SAAA,IACL,CAAC,IAAA,CAAK;AAAA,GACV;AAGA,EAAA,mBAAA,CAAoB,IAAA,CAAK,CAAC,CAAA,EAAG,CAAA,KAAM;AACjC,IAAA,MAAM,QAAQ,IAAI,IAAA,CAAK,CAAA,CAAE,OAAO,EAAE,OAAA,EAAQ;AAC1C,IAAA,MAAM,QAAQ,IAAI,IAAA,CAAK,CAAA,CAAE,OAAO,EAAE,OAAA,EAAQ;AAC1C,IAAA,OAAO,KAAA,GAAQ,KAAA;AAAA,EACjB,CAAC,CAAA;AAGD,EAAA,MAAM,UAAA,GAAa,mBAAA,CAAoB,KAAA,CAAM,CAAA,EAAG,KAAK,CAAA;AACrD,EAAA,IAAI,UAAA,CAAW,SAAS,CAAA,EAAG;AACzB,IAAA,uBAAA,CAAwB,UAAU,UAAA,CAAW,GAAA,CAAI,CAAC,MAAA,KAAW,MAAA,CAAO,EAAE,CAAC,CAAA;AAAA,EACzE;AAEA,EAAA,OAAO,UAAA;AACT;;;ACxDAvB,YAAAA,EAAAA;AACA,YAAA,EAAA;AAGA,IAAMuB,cAAAA,GAAgB,CAAA;AAqBtB,eAAsB,eAAA,CACpB,QAAA,EACA,QAAA,EACA,KAAA,GAAgBA,cAAAA,EACc;AAC9B,EAAA,MAAM,iBAAiB,KAAA,GAAQ,oBAAA;AAI/B,EAAA,IAAI,gBAAgC,EAAC;AACrC,EAAA,IAAI,YAAA,GAAe,KAAA;AACnB,EAAA,MAAM,qBAAA,GAAwB,mBAAA,CAAoB,QAAA,EAAU,QAAA,EAAU,cAAc,CAAA;AAEpF,EAAA,IAAI;AACF,IAAA,aAAA,GAAgB,MAAM,YAAA,CAAa,QAAA,EAAU,UAAU,EAAE,KAAA,EAAO,gBAAgB,CAAA;AAAA,EAClF,CAAA,CAAA,MAAQ;AACN,IAAA,YAAA,GAAe,IAAA;AACf,IAAA,OAAA,CAAQ,MAAM,iFAAiF,CAAA;AAAA,EACjG;AAEA,EAAA,MAAM,iBAAiB,MAAM,qBAAA;AAE7B,EAAA,IAAI,MAAA;AACJ,EAAA,IAAI,YAAA,EAAc;AAGhB,IAAA,MAAA,GAAS,kBAAA,CAAmB,EAAC,EAAG,cAAA,EAAgB;AAAA,MAC9C,YAAA,EAAc,CAAA;AAAA,MACd,UAAA,EAAY;AAAA,KACb,CAAA;AAAA,EACH,CAAA,MAAO;AACL,IAAA,MAAA,GAAS,mBAAmB,aAAA,EAAe,cAAA,EAAgB,EAAE,QAAA,EAAU,kBAAkB,CAAA;AAAA,EAC3F;AAGA,EAAA,MAAM,MAAA,GAAS,YAAY,MAAM,CAAA;AAGjC,EAAA,MAAM,UAAA,GAAa,MAAA,CAAO,KAAA,CAAM,CAAA,EAAG,KAAK,CAAA;AAGxC,EAAA,IAAI,UAAA,CAAW,SAAS,CAAA,EAAG;AACzB,IAAA,uBAAA,CAAwB,QAAA,EAAU,WAAW,GAAA,CAAI,CAAC,SAAS,IAAA,CAAK,MAAA,CAAO,EAAE,CAAC,CAAA;AAAA,EAC5E;AAGA,EAAA,MAAM,OAAA,GAAU,mBAAmB,UAAU,CAAA;AAE7C,EAAA,OAAO,EAAE,OAAA,EAAS,UAAA,EAAY,OAAA,EAAQ;AACxC;AAWO,SAAS,mBAAmB,OAAA,EAAiC;AAClE,EAAA,MAAM,MAAA,GAAS,iBAAA,GAAoB,QAAA,CAAI,MAAA,CAAO,EAAE,CAAA;AAEhD,EAAA,IAAI,OAAA,CAAQ,WAAW,CAAA,EAAG;AACxB,IAAA,OAAO,GAAG,MAAM;AAAA,wCAAA,CAAA;AAAA,EAClB;AAEA,EAAA,MAAM,WAAA,GAAc,OAAA,CAAQ,GAAA,CAAI,CAAC,GAAG,CAAA,KAAM;AACxC,IAAA,MAAM,MAAA,GAAS,CAAA,EAAG,CAAA,GAAI,CAAC,CAAA,CAAA,CAAA;AACvB,IAAA,MAAM,OAAA,GAAU,EAAE,MAAA,CAAO,OAAA;AACzB,IAAA,OAAO,CAAA,EAAG,MAAM,CAAA,CAAA,EAAI,OAAO,CAAA,CAAA;AAAA,EAC7B,CAAC,CAAA;AAED,EAAA,OAAO,GAAG,MAAM;AAAA,EAAK,WAAA,CAAY,IAAA,CAAK,IAAI,CAAC,CAAA,CAAA;AAC7C;;;ALiDA,qBAAA,EAAA;;;AM3JA,aAAA,EAAA;AACAtB,WAAAA,EAAAA;AAOA,aAAA,EAAA;;;ACFAU,gBAAAA,EAAAA;AACAX,YAAAA,EAAAA;AACA,UAAA,EAAA;AACA,WAAA,EAAA;AACA,WAAA,EAAA;AACA,UAAA,EAAA;AAMA,IAAM,uBAAA,GAA0B,CAAA;AA0BhC,eAAsB,qBAAA,CACpB,QAAA,EACA,KAAA,EACA,OAAA,EAC8C;AAC9C,EAAA,MAAM,KAAA,GAAQ,SAAS,KAAA,IAAS,uBAAA;AAChC,EAAA,MAAM,QAAA,GAAW,gBAAgB,QAAQ,CAAA;AAGzC,EAAA,MAAM,OAAA,GAAU,QAAA,CACb,OAAA,CAAQ,8DAA8D,EACtE,GAAA,EAAI;AAEP,EAAA,IAAI,OAAA,CAAQ,MAAA,KAAW,CAAA,EAAG,OAAO,EAAC;AAElC,EAAA,MAAM,WAAA,GAAc,MAAM,SAAA,CAAU,KAAK,CAAA;AAEzC,EAAA,MAAM,SAA0C,EAAC;AACjD,EAAA,KAAA,MAAW,OAAO,OAAA,EAAS;AACzB,IAAA,MAAM,WAAW,IAAI,YAAA;AAAA,MACnB,IAAI,SAAA,CAAU,MAAA;AAAA,MACd,IAAI,SAAA,CAAU,UAAA;AAAA,MACd,GAAA,CAAI,UAAU,UAAA,GAAa;AAAA,KAC7B;AACA,IAAA,MAAA,CAAO,IAAA,CAAK,EAAE,EAAA,EAAI,GAAA,CAAI,EAAA,EAAI,OAAO,gBAAA,CAAiB,WAAA,EAAa,QAAQ,CAAA,EAAG,CAAA;AAAA,EAC5E;AAEA,EAAA,MAAA,CAAO,KAAK,CAAC,CAAA,EAAG,MAAM,CAAA,CAAE,KAAA,GAAQ,EAAE,KAAK,CAAA;AACvC,EAAA,MAAM,IAAA,GAAO,MAAA,CAAO,KAAA,CAAM,CAAA,EAAG,KAAK,CAAA;AAClC,EAAA,IAAI,IAAA,CAAK,MAAA,KAAW,CAAA,EAAG,OAAO,EAAC;AAI/B,EAAA,MAAM,eAAe,IAAA,CAAK,GAAA,CAAI,MAAM,GAAG,CAAA,CAAE,KAAK,GAAG,CAAA;AACjD,EAAA,MAAM,GAAA,GAAM,8GAA8G,YAAY,CAAA,CAAA,CAAA;AACtI,EAAA,MAAM,QAAA,GAAW,QAAA,CACd,OAAA,CAAQ,GAAG,CAAA,CACX,GAAA,CAAI,GAAG,IAAA,CAAK,GAAA,CAAI,CAAC,CAAA,KAAM,CAAA,CAAE,EAAE,CAAC,CAAA;AAE/B,EAAA,MAAM,OAAA,GAAU,IAAI,GAAA,CAAI,QAAA,CAAS,GAAA,CAAI,CAAC,CAAA,KAAM,CAAC,CAAA,CAAE,EAAA,EAAI,CAAC,CAAC,CAAC,CAAA;AACtD,EAAA,MAAM,UAA+C,EAAC;AAEtD,EAAA,KAAA,MAAW,EAAE,EAAA,EAAI,KAAA,EAAM,IAAK,IAAA,EAAM;AAChC,IAAA,MAAM,GAAA,GAAM,OAAA,CAAQ,GAAA,CAAI,EAAE,CAAA;AAC1B,IAAA,IAAI,CAAC,GAAA,EAAK;AACV,IAAA,MAAM,KAAA,GAAwB;AAAA,MAC5B,IAAI,GAAA,CAAI,EAAA;AAAA,MACR,UAAU,GAAA,CAAI,SAAA;AAAA,MACd,WAAW,GAAA,CAAI,UAAA;AAAA,MACf,SAAS,GAAA,CAAI,QAAA;AAAA,MACb,aAAa,GAAA,CAAI,YAAA;AAAA,MACjB,MAAM,GAAA,CAAI,IAAA;AAAA,MACV,WAAW,GAAA,CAAI;AAAA,KACjB;AACA,IAAA,IAAI,GAAA,CAAI,UAAU,IAAA,EAAM;AACtB,MAAA,KAAA,CAAM,QAAQ,GAAA,CAAI,KAAA;AAAA,IACpB;AACA,IAAA,OAAA,CAAQ,IAAA,CAAK,EAAE,IAAA,EAAM,KAAA,EAAO,OAAO,CAAA;AAAA,EACrC;AAEA,EAAA,OAAO,OAAA;AACT;AAQA,eAAsB,eAAA,CACpB,QAAA,EACA,KAAA,EACA,OAAA,EAC8C;AAC9C,EAAA,MAAM,KAAA,GAAQ,SAAS,KAAA,IAAS,uBAAA;AAChC,EAAA,MAAM,iBAAiB,KAAA,GAAQ,oBAAA;AAE/B,EAAA,MAAM,SAAA,GAAY,MAAM,aAAA,EAAc;AAEtC,EAAA,IAAI,UAAU,MAAA,EAAQ;AAEpB,IAAA,MAAM,CAAC,aAAA,EAAewB,eAAc,CAAA,GAAI,MAAM,QAAQ,GAAA,CAAI;AAAA,MACxD,sBAAsB,QAAA,EAAU,KAAA,EAAO,EAAE,KAAA,EAAO,gBAAgB,CAAA;AAAA,MAChE,QAAQ,OAAA,CAAQ,yBAAA,CAA0B,QAAA,EAAU,KAAA,EAAO,cAAc,CAAC;AAAA,KAC3E,CAAA;AAKD,IAAA,IAAI,aAAA,CAAc,WAAW,CAAA,EAAG;AAC9B,MAAA,OAAOA,eAAAA,CACJ,GAAA,CAAI,CAAC,CAAA,MAAO,EAAE,IAAA,EAAM,CAAA,CAAE,KAAA,EAAO,KAAA,EAAO,EAAE,KAAA,EAAM,CAAE,CAAA,CAC9C,KAAA,CAAM,GAAG,KAAK,CAAA;AAAA,IACnB;AAEA,IAAA,MAAM,SAAA,GAAiDA,eAAAA,CAAe,GAAA,CAAI,CAAC,CAAA,MAAO;AAAA,MAChF,MAAM,CAAA,CAAE,KAAA;AAAA,MACR,OAAO,CAAA,CAAE;AAAA,KACX,CAAE,CAAA;AAEF,IAAA,MAAM,MAAA,GAAS,iBAAA;AAAA,MACb,aAAA;AAAA,MACA,SAAA;AAAA,MACA,CAAC,SAAS,IAAA,CAAK,EAAA;AAAA,MACf,EAAE,KAAA,EAAO,QAAA,EAAU,gBAAA;AAAiB,KACtC;AAEA,IAAA,OAAO,MAAA;AAAA,EACT;AAGA,EAAA,MAAM,cAAA,GAAiB,yBAAA,CAA0B,QAAA,EAAU,KAAA,EAAO,KAAK,CAAA;AACvE,EAAA,OAAO,cAAA,CAAe,GAAA,CAAI,CAAC,CAAA,MAAO,EAAE,IAAA,EAAM,CAAA,CAAE,KAAA,EAAO,KAAA,EAAO,CAAA,CAAE,KAAA,EAAM,CAAE,CAAA;AACtE;;;AD9IA,iBAAA,EAAA;AAGA,eAAA,EAAA;AAGA,iBAAA,EAAA;AAGA,qBAAA,EAAA;;;AEmBO,SAAS,WAAA,GAAsB;AACpC,EAAA,OAAO,OAAA,CAAQ,GAAA,CAAI,qBAAqB,CAAA,IAAK,QAAQ,GAAA,EAAI;AAC3D;;;AChCA,eAAA,EAAA;AACA,YAAA,EAAA;;;ACAA,YAAA,EAAA;ACDA,IAAM,SAAA,GAAY,SAAA;AAClB,IAAM,UAAA,GAAa,sBAAA;AAGZ,IAAM,sBAAA,GAAyB,EAAA,GAAK,EAAA,GAAK,EAAA,GAAK,GAAA;AAE9C,IAAM,SAAS,CAAC,UAAA,EAAY,MAAA,EAAQ,MAAA,EAAQ,UAAU,UAAU,CAAA;AAGhE,IAAM,KAAA,GAAQ,CAAC,WAAA,EAAa,QAAA,EAAU,UAAU,OAAO,CAAA;AAqB9D,SAAS,aAAa,QAAA,EAA0B;AAC9C,EAAA,OAAO9B,IAAAA,CAAK,QAAA,EAAU,SAAA,EAAW,UAAU,CAAA;AAC7C;AAEA,SAAS,YAAY,KAAA,EAAoC;AACvD,EAAA,OAAO,OAAO,KAAA,KAAU,QAAA,IAAa,MAAA,CAA6B,SAAS,KAAK,CAAA;AAClF;AAEA,SAAS,WAAW,KAAA,EAAmC;AACrD,EAAA,OAAO,OAAO,KAAA,KAAU,QAAA,IAAa,KAAA,CAA4B,SAAS,KAAK,CAAA;AACjF;AAEA,SAAS,UAAU,KAAA,EAAiC;AAClD,EAAA,IAAI,OAAO,KAAA,KAAU,QAAA,EAAU,OAAO,KAAA;AACtC,EAAA,OAAO,CAAC,MAAA,CAAO,KAAA,CAAM,IAAA,CAAK,KAAA,CAAM,KAAK,CAAC,CAAA;AACxC;AAEA,SAAS,cAAc,KAAA,EAAmC;AACxD,EAAA,OAAO,KAAA,CAAM,OAAA,CAAQ,KAAK,CAAA,IAAK,KAAA,CAAM,MAAM,CAAC,IAAA,KAAS,OAAO,IAAA,KAAS,QAAQ,CAAA;AAC/E;AAGA,SAAS,oBAAoB,GAAA,EAAoC;AAC/D,EAAA,IAAI,IAAI,aAAA,KAAkB,MAAA,IAAa,OAAO,GAAA,CAAI,eAAe,SAAA,EAAW;AAC1E,IAAA,GAAA,CAAI,gBAAgB,GAAA,CAAI,UAAA;AACxB,IAAA,OAAO,GAAA,CAAI,UAAA;AAAA,EACb;AACF;AAEA,SAAS,mBAAmB,GAAA,EAAiC;AAC3D,EAAA,IAAI,OAAO,GAAA,KAAQ,QAAA,IAAY,GAAA,KAAQ,MAAM,OAAO,KAAA;AACpD,EAAA,MAAM,KAAA,GAAQ,GAAA;AACd,EAAA,mBAAA,CAAoB,KAAK,CAAA;AAEzB,EAAA,OACE,OAAO,KAAA,CAAM,aAAA,KAAkB,aAC/B,OAAO,KAAA,CAAM,YAAY,QAAA,IACzB,WAAA,CAAY,MAAM,aAAa,CAAA,IAC/B,OAAO,KAAA,CAAM,WAAA,KAAgB,YAC7B,KAAA,CAAM,WAAA,IAAe,KACrB,KAAA,CAAM,WAAA,IAAe,CAAA,IACrB,aAAA,CAAc,MAAM,WAAW,CAAA,IAC/B,MAAM,OAAA,CAAQ,KAAA,CAAM,YAAY,CAAA,IAChC,KAAA,CAAM,aAAa,KAAA,CAAM,CAAC,SAAS,UAAA,CAAW,IAAI,CAAC,CAAA,IACnD,SAAA,CAAU,MAAM,UAAU,CAAA;AAE9B;AA2BO,SAAS,cAAc,QAAA,EAAqC;AACjE,EAAA,IAAI;AACF,IAAA,MAAM,IAAA,GAAO,aAAa,QAAQ,CAAA;AAClC,IAAA,IAAI,CAACwB,UAAAA,CAAW,IAAI,CAAA,EAAG,OAAO,IAAA;AAC9B,IAAA,MAAM,GAAA,GAAME,YAAAA,CAAa,IAAA,EAAM,OAAO,CAAA;AACtC,IAAA,MAAM,MAAA,GAAS,IAAA,CAAK,KAAA,CAAM,GAAG,CAAA;AAC7B,IAAA,IAAI,CAAC,kBAAA,CAAmB,MAAM,CAAA,EAAG,OAAO,IAAA;AAExC,IAAA,MAAM,GAAA,GAAM,KAAK,GAAA,EAAI,GAAI,IAAI,IAAA,CAAK,MAAA,CAAO,UAAU,CAAA,CAAE,OAAA,EAAQ;AAC7D,IAAA,IAAI,MAAM,sBAAA,EAAwB;AAChC,MAAA,eAAA,CAAgB,QAAQ,CAAA;AACxB,MAAA,OAAO,IAAA;AAAA,IACT;AACA,IAAA,OAAO,MAAA;AAAA,EACT,CAAA,CAAA,MAAQ;AACN,IAAA,OAAO,IAAA;AAAA,EACT;AACF;AAwBO,SAAS,gBAAgB,QAAA,EAAwB;AACtD,EAAA,IAAI;AACF,IAAA,MAAM,IAAA,GAAO,aAAa,QAAQ,CAAA;AAClC,IAAA,IAAIF,UAAAA,CAAW,IAAI,CAAA,EAAGR,WAAW,IAAI,CAAA;AAAA,EACvC,CAAA,CAAA,MAAQ;AAAA,EAER;AACF;ACoOA,YAAA,EAAA;AC7XA,eAAA,EAAA;ACAA,YAAA,EAAA;;;ACDA,YAAA,EAAA;;;ACFA,UAAA,EAAA;AA6BA,YAAA,EAAA;AAaA,eAAA,EAAA;AAaAV,YAAAA,EAAAA;AA0BA,YAAA,EAAA;;;AChFA,UAAA,EAAA;ACIA,eAAA,EAAA;AACA,YAAA,EAAA;AACA,YAAA,EAAA;;;ACLA,YAAA,EAAA;ACCA,YAAA,EAAA;ACCA,YAAA,EAAA;ACCA,YAAA,EAAA;ACOA,IAAM,YAAA,GAAe,EAAA,GAAK,EAAA,GAAK,EAAA,GAAK,GAAA;AACpC,IAAM,gBAAA,GAAmB,GAAA;AACzB,IAAM,cAAA,GAAiB,mBAAA;AAOvB,eAAsB,kBAAA,CACpB,cAAsB,gBAAA,EACE;AACxB,EAAA,IAAI;AACF,IAAA,MAAM,MAAM,MAAM,KAAA;AAAA,MAChB,wCAAwC,WAAW,CAAA,UAAA,CAAA;AAAA,MACnD,EAAE,MAAA,EAAQ,WAAA,CAAY,OAAA,CAAQ,gBAAgB,CAAA;AAAE,KAClD;AACA,IAAA,IAAI,CAAC,GAAA,CAAI,EAAA,EAAI,OAAO,IAAA;AACpB,IAAA,MAAM,IAAA,GAAQ,MAAM,GAAA,CAAI,IAAA,EAAK;AAC7B,IAAA,MAAM,MAAA,GAAS,KAAK,QAAQ,CAAA;AAC5B,IAAA,OAAO,OAAO,MAAA,KAAW,QAAA,GAAW,MAAA,GAAS,IAAA;AAAA,EAC/C,CAAA,CAAA,MAAQ;AACN,IAAA,OAAO,IAAA;AAAA,EACT;AACF;AAOA,eAAsB,eACpB,QAAA,EACmC;AACnC,EAAA,IAAI;AACF,IAAA,MAAM,SAAA,GAAYN,IAAAA,CAAK,QAAA,EAAU,cAAc,CAAA;AAG/C,IAAA,MAAM,MAAA,GAAS,UAAU,SAAS,CAAA;AAClC,IAAA,IAAI,MAAA,EAAQ;AACV,MAAA,OAAO;AAAA,QACL,OAAA,EAAS,OAAA;AAAA,QACT,QAAQ,MAAA,CAAO,MAAA;AAAA,QACf,eAAA,EAAiB,QAAA,CAAS,MAAA,CAAO,MAAA,EAAQ,OAAO;AAAA,OAClD;AAAA,IACF;AAGA,IAAA,MAAM,MAAA,GAAS,MAAM,kBAAA,EAAmB;AACxC,IAAA,IAAI,MAAA,KAAW,MAAM,OAAO,IAAA;AAG5B,IAAA,IAAI;AACF,MAAAe,SAAAA,CAAU,QAAA,EAAU,EAAE,SAAA,EAAW,MAAM,CAAA;AACvC,MAAA,MAAM,SAAA,GAAuB,EAAE,MAAA,EAAO;AACtC,MAAAU,aAAAA,CAAc,SAAA,EAAW,IAAA,CAAK,SAAA,CAAU,SAAS,CAAC,CAAA;AAAA,IACpD,CAAA,CAAA,MAAQ;AAAA,IAER;AAEA,IAAA,OAAO;AAAA,MACL,OAAA,EAAS,OAAA;AAAA,MACT,MAAA;AAAA,MACA,eAAA,EAAiB,QAAA,CAAS,MAAA,EAAQ,OAAO;AAAA,KAC3C;AAAA,EACF,CAAA,CAAA,MAAQ;AACN,IAAA,OAAO,IAAA;AAAA,EACT;AACF;AAKO,SAAS,aAAA,CAAc,SAAiB,MAAA,EAAyB;AACtE,EAAA,OAAO,SAAS,MAAA,CAAO,KAAA,CAAM,GAAG,CAAA,CAAE,CAAC,CAAA,EAAI,EAAE,CAAA,GAAI,QAAA,CAAS,QAAQ,KAAA,CAAM,GAAG,CAAA,CAAE,CAAC,GAAI,EAAE,CAAA;AAClF;AAwBO,SAAS,gCAAA,CACd,SACA,MAAA,EACQ;AACR,EAAA,MAAM,OAAA,GAAU,aAAA,CAAc,OAAA,EAAS,MAAM,IACzC,yCAAA,GACA,EAAA;AACJ,EAAA,OAAO;AAAA;AAAA;AAAA,gBAAA,EAA8C,MAAM,CAAA,yBAAA,EAA4B,OAAO,CAAA,CAAA,EAAI,OAAO,CAAA;AAAA;AAAA,CAAA;AAC3G;AAwBA,SAAS,QAAA,CAAS,GAAW,CAAA,EAAoB;AAC/C,EAAA,MAAM,KAAA,GAAQ,CAAC,CAAA,KAAwC;AAErD,IAAA,MAAM,KAAA,GAAQ,CAAA,CAAE,KAAA,CAAM,GAAG,EAAE,CAAC,CAAA;AAC5B,IAAA,MAAM,QAAQ,KAAA,CAAM,KAAA,CAAM,GAAG,CAAA,CAAE,IAAI,CAAA,CAAA,KAAK;AACtC,MAAA,MAAM,GAAA,GAAM,QAAA,CAAS,CAAA,EAAG,EAAE,CAAA;AAC1B,MAAA,OAAO,KAAA,CAAM,GAAG,CAAA,GAAI,CAAA,GAAI,GAAA;AAAA,IAC1B,CAAC,CAAA;AACD,IAAA,OAAO,CAAC,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA,EAAG,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA,EAAG,KAAA,CAAM,CAAC,CAAA,IAAK,CAAC,CAAA;AAAA,EACrD,CAAA;AACA,EAAA,MAAM,CAAC,IAAA,EAAM,IAAA,EAAM,IAAI,CAAA,GAAI,MAAM,CAAC,CAAA;AAClC,EAAA,MAAM,CAAC,IAAA,EAAM,IAAA,EAAM,IAAI,CAAA,GAAI,MAAM,CAAC,CAAA;AAClC,EAAA,IAAI,IAAA,KAAS,IAAA,EAAM,OAAO,IAAA,GAAO,IAAA;AACjC,EAAA,IAAI,IAAA,KAAS,IAAA,EAAM,OAAO,IAAA,GAAO,IAAA;AACjC,EAAA,OAAO,IAAA,GAAO,IAAA;AAChB;AAEA,SAAS,UAAU,SAAA,EAAqC;AACtD,EAAA,IAAI;AACF,IAAA,MAAM,IAAA,GAAOM,SAAS,SAAS,CAAA;AAC/B,IAAA,IAAI,KAAK,GAAA,EAAI,GAAI,IAAA,CAAK,OAAA,GAAU,cAAc,OAAO,IAAA;AAErD,IAAA,MAAM,GAAA,GAAML,YAAAA,CAAa,SAAA,EAAW,OAAO,CAAA;AAC3C,IAAA,MAAM,IAAA,GAAO,IAAA,CAAK,KAAA,CAAM,GAAG,CAAA;AAC3B,IAAA,IAAI,OAAO,IAAA,CAAK,MAAA,KAAW,YAAY,CAAC,IAAA,CAAK,QAAQ,OAAO,IAAA;AAC5D,IAAA,OAAO,IAAA;AAAA,EACT,CAAA,CAAA,MAAQ;AACN,IAAA,OAAO,IAAA;AAAA,EACT;AACF;;;AD3JA,IAAM,uBAAA,GAA0B,CAAA;;AAAA;;AAAA;;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;;AAAA;AAAA;;AAAA;AAAA;;AAAA;;AAAA;AAAA;AAAA;;AAAA;;AAAA;;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA,CAAA;AAkDhC,SAAS,aAAa,MAAA,EAAwB;AAC5C,EAAA,QAAQ,MAAA;AAAQ,IACd,KAAK,iBAAA;AACH,MAAA,OAAO,iBAAA;AAAA,IACT,KAAK,iBAAA;AACH,MAAA,OAAO,iBAAA;AAAA,IACT,KAAK,cAAA;AACH,MAAA,OAAO,cAAA;AAAA,IACT,KAAK,QAAA;AACH,MAAA,OAAO,QAAA;AAAA,IACT;AACE,MAAA,OAAO,MAAA;AAAA;AAEb;AAQA,SAAS,qBAAqB,MAAA,EAA4B;AACxD,EAAA,MAAM,IAAA,GAAO,MAAA,CAAO,OAAA,CAAQ,KAAA,CAAM,GAAG,EAAE,CAAA;AACvC,EAAA,MAAM,IAAA,GAAO,MAAA,CAAO,IAAA,CAAK,MAAA,GAAS,CAAA,GAAI,CAAA,EAAA,EAAK,MAAA,CAAO,IAAA,CAAK,IAAA,CAAK,IAAI,CAAC,CAAA,CAAA,CAAA,GAAM,EAAA;AACvE,EAAA,MAAM,MAAA,GAAS,YAAA,CAAa,MAAA,CAAO,MAAM,CAAA;AACzC,EAAA,OAAO,CAAA,IAAA,EAAO,MAAA,CAAO,OAAO,CAAA,EAAA,EAAK,IAAI;AAAA,WAAA,EAAgB,IAAI,QAAQ,MAAM,CAAA,CAAA;AACzE;AAEA,SAAS,0BAA0B,QAAA,EAAiC;AAClE,EAAA,MAAM,KAAA,GAAQ,cAAc,QAAQ,CAAA;AACpC,EAAA,IAAI,KAAA,KAAU,IAAA,IAAQ,CAAC,KAAA,CAAM,eAAe,OAAO,IAAA;AAEnD,EAAA,MAAM,UAAA,GAAa,MAAM,WAAA,CAAY,MAAA,KAAW,IAAI,QAAA,GAAW,KAAA,CAAM,WAAA,CAAY,IAAA,CAAK,IAAI,CAAA;AAC1F,EAAA,MAAM,WAAA,GAAc,MAAM,YAAA,CAAa,MAAA,KAAW,IAAI,QAAA,GAAW,KAAA,CAAM,YAAA,CAAa,IAAA,CAAK,IAAI,CAAA;AAE7F,EAAA,OAAO;AAAA;;AAAA;;AAAA,MAAA,EAKD,MAAM,OAAO;AAAA,OAAA,EACZ,KAAA,CAAM,aAAa,CAAA,EAAA,EAAK,KAAA,CAAM,WAAW,CAAA;AAAA,aAAA,EACnC,UAAU;AAAA,cAAA,EACT,WAAW;AAAA,SAAA,EAChB,MAAM,UAAU;;AAAA,kBAAA,EAEP,KAAA,CAAM,aAAa,CAAA,kCAAA,EAAqC,KAAA,CAAM,aAAa,CAAA;AAAA,qDAAA,EACxC,MAAM,aAAa,CAAA;AAAA,CAAA;AAE1E;AAWA,eAAsB,gBAAgB,QAAA,EAAoC;AACxE,EAAA,MAAM,IAAA,GAAO,YAAY,WAAA,EAAY;AAIrC,EAAA,IAAI;AACF,IAAA,MAAM,aAAa,IAAI,CAAA;AAAA,EACzB,CAAA,CAAA,MAAQ;AAAA,EAER;AAGA,EAAA,MAAM,OAAA,GAAU,MAAM,kBAAA,CAAmB,IAAA,EAAM,CAAC,CAAA;AAGhD,EAAA,IAAI,MAAA,GAAS,uBAAA;AAGb,EAAA,IAAI,OAAA,CAAQ,SAAS,CAAA,EAAG;AACtB,IAAA,MAAM,mBAAmB,OAAA,CAAQ,GAAA,CAAI,oBAAoB,CAAA,CAAE,KAAK,MAAM,CAAA;AACtE,IAAA,MAAA,IAAU;AAAA;;AAAA;;AAAA;;AAAA,EAOZ,gBAAgB;AAAA,CAAA;AAAA,EAEhB;AAEA,EAAA,MAAM,aAAA,GAAgB,0BAA0B,IAAI,CAAA;AACpD,EAAA,IAAI,kBAAkB,IAAA,EAAM;AAC1B,IAAA,MAAA,IAAU,aAAA;AAAA,EACZ;AAMA,EAAA,IAAI,CAAC,OAAA,CAAQ,MAAA,CAAO,KAAA,IAAS,CAAC,OAAA,CAAQ,GAAA,CAAI,IAAI,CAAA,IAAK,CAAC,OAAA,CAAQ,GAAA,CAAI,oBAAoB,CAAA,EAAG;AACrF,IAAA,IAAI;AACF,MAAA,MAAM,eAAe,MAAM,cAAA,CAAe1B,KAAK,IAAA,EAAM,SAAA,EAAW,QAAQ,CAAC,CAAA;AACzE,MAAA,IAAI,cAAc,eAAA,EAAiB;AACjC,QAAA,MAAA,IAAU,gCAAA,CAAiC,YAAA,CAAa,OAAA,EAAS,YAAA,CAAa,MAAM,CAAA;AAAA,MACtF;AAAA,IACF,CAAA,CAAA,MAAQ;AAAA,IAER;AAAA,EACF;AAEA,EAAA,OAAO,MAAA;AACT;;;AEpLA,YAAA,EAAA;AASA,eAAsB,aAAa,QAAA,EAA6C;AAC9E,EAAA,MAAM,EAAE,KAAA,EAAM,GAAI,MAAM,gBAAgB,QAAQ,CAAA;AAChD,EAAA,MAAM,WAAyC,EAAC;AAEhD,EAAA,KAAA,MAAW,QAAQ,KAAA,EAAO;AACxB,IAAA,IAAI,IAAA,CAAK,aAAa,MAAA,EAAQ;AAC5B,MAAA,QAAA,CAAS,IAAA,CAAK;AAAA,QACZ,IAAA,EAAM,EAAA;AAAA,QACN,KAAA,EAAO,CAAA,sBAAA,EAAyB,IAAA,CAAK,OAAO,CAAA,CAAA;AAAA,QAC5C,QAAA,EAAU,MAAA;AAAA,QACV,iBAAiB,IAAA,CAAK,EAAA;AAAA,QACtB,MAAA,EAAQ;AAAA,OACT,CAAA;AAAA,IACH;AAAA,EACF;AAEA,EAAA,MAAM,eAAe,KAAA,CAAM,MAAA,GAAS,IAAI,CAAC,YAAY,IAAI,EAAC;AAC1D,EAAA,OAAO,EAAE,UAAU,YAAA,EAAa;AAClC;;;ACxBA,YAAA,EAAA;ACCO,IAAMgC,kBAAiBxB,CAAAA,CAAE,IAAA,CAAK,CAAC,OAAA,EAAS,SAAA,EAAW,MAAM,CAAC,CAAA;AAG1D,IAAM,sBAAA,GAAyBA,EAAE,MAAA,CAAO;AAAA,EAC7C,IAAA,EAAMA,CAAAA,CAAE,OAAA,CAAQ,cAAc,CAAA;AAAA,EAC9B,IAAA,EAAMA,EAAE,MAAA,EAAO;AAAA,EACf,OAAA,EAASA,EAAE,MAAA,EAAO;AAAA,EAClB,SAAA,EAAWA,CAAAA,CAAE,OAAA,EAAQ,CAAE,QAAA;AACzB,CAAC,CAAA;AAGM,IAAM,mBAAA,GAAsBA,EAAE,MAAA,CAAO;AAAA,EAC1C,IAAA,EAAMA,CAAAA,CAAE,OAAA,CAAQ,WAAW,CAAA;AAAA,EAC3B,IAAA,EAAMA,EAAE,MAAA,EAAO;AAAA,EACf,UAAUA,CAAAA,CAAE,MAAA,EAAO,CAAE,GAAA,GAAM,QAAA;AAC7B,CAAC,CAAA;AAGM,IAAM,iBAAA,GAAoBA,EAAE,MAAA,CAAO;AAAA,EACxC,IAAA,EAAMA,CAAAA,CAAE,OAAA,CAAQ,QAAQ,CAAA;AAAA,EACxB,OAAA,EAASA,EAAE,MAAA,EAAO;AAAA,EAClB,gBAAgBA,CAAAA,CAAE,MAAA,EAAO,CAAE,GAAA,GAAM,QAAA,EAAS;AAAA,EAC1C,OAAA,EAASA,EAAE,MAAA,EAAO,CAAE,KAAI,CAAE,QAAA,GAAW,QAAA;AACvC,CAAC,CAAA;AAGM,IAAM,eAAA,GAAkBA,CAAAA,CAAE,kBAAA,CAAmB,MAAA,EAAQ;AAAA,EAC1D,sBAAA;AAAA,EACA,mBAAA;AAAA,EACA;AACF,CAAC,CAAA;AAGM,IAAM,UAAA,GAAaA,EAAE,MAAA,CAAO;AAAA,EACjC,EAAA,EAAIA,CAAAA,CAAE,MAAA,EAAO,CAAE,IAAI,CAAC,CAAA;AAAA,EACpB,WAAA,EAAaA,EAAE,MAAA,EAAO;AAAA,EACtB,QAAA,EAAUwB,eAAAA;AAAA,EACV,KAAA,EAAO,eAAA;AAAA,EACP,WAAA,EAAaxB,EAAE,MAAA;AACjB,CAAC,CAAA;AAGM,IAAM,gBAAA,GAAmBA,EAAE,MAAA,CAAO;AAAA,EACvC,KAAA,EAAOA,CAAAA,CAAE,KAAA,CAAM,UAAU;AAC3B,CAAC,CAAA;ACrCM,SAAS,YAAY,IAAA,EAAsB;AAChD,EAAA,MAAM,OAAA,GAAU,IAAA,CACb,OAAA,CAAQ,KAAA,EAAO,KAAK,CAAA,CACpB,OAAA,CAAQ,SAAA,EAAW,QAAQ,CAAA,CAC3B,OAAA,CAAQ,KAAA,EAAO,OAAO,CAAA;AACzB,EAAA,OAAO,IAAI,MAAA,CAAO,CAAA,CAAA,EAAI,OAAO,CAAA,CAAA,CAAG,CAAA;AAClC;AASO,SAAS,SAAA,CAAU,SAAiB,IAAA,EAAwB;AACjE,EAAA,MAAM,KAAA,GAAQ,YAAY,IAAI,CAAA;AAC9B,EAAA,MAAM,UAAoB,EAAC;AAE3B,EAAA,SAAS,KAAK,GAAA,EAAmB;AAC/B,IAAA,MAAM,OAAA,GAAU,YAAY,GAAG,CAAA;AAC/B,IAAA,KAAA,MAAW,SAAS,OAAA,EAAS;AAE3B,MAAA,IAAI,KAAA,CAAM,UAAA,CAAW,GAAG,CAAA,IAAK,UAAU,cAAA,EAAgB;AAEvD,MAAA,MAAM,QAAA,GAAWR,IAAAA,CAAK,GAAA,EAAK,KAAK,CAAA;AAChC,MAAA,MAAM,IAAA,GAAO+B,SAAS,QAAQ,CAAA;AAC9B,MAAA,IAAI,IAAA,CAAK,aAAY,EAAG;AACtB,QAAA,IAAA,CAAK,QAAQ,CAAA;AAAA,MACf,CAAA,MAAO;AACL,QAAA,MAAM,OAAA,GAAUE,QAAAA,CAAS,OAAA,EAAS,QAAQ,CAAA;AAC1C,QAAA,IAAI,KAAA,CAAM,IAAA,CAAK,OAAO,CAAA,EAAG;AACvB,UAAA,OAAA,CAAQ,KAAK,OAAO,CAAA;AAAA,QACtB;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,EAAA,IAAA,CAAK,OAAO,CAAA;AACZ,EAAA,OAAO,QAAQ,IAAA,EAAK;AACtB;;;AClCO,SAAS,mBAAA,CACd,SACA,KAAA,EACa;AACb,EAAA,MAAM,KAAA,GAAQ,SAAA,CAAU,OAAA,EAAS,KAAA,CAAM,IAAI,CAAA;AAC3C,EAAA,MAAM,KAAA,GAAQ,IAAI,MAAA,CAAO,KAAA,CAAM,OAAO,CAAA;AACtC,EAAA,MAAM,aAA0B,EAAC;AAEjC,EAAA,KAAA,MAAW,QAAQ,KAAA,EAAO;AACxB,IAAA,MAAM,QAAA,GAAWjC,IAAAA,CAAK,OAAA,EAAS,IAAI,CAAA;AACnC,IAAA,MAAM,OAAA,GAAU0B,YAAAA,CAAa,QAAA,EAAU,OAAO,CAAA;AAC9C,IAAA,MAAM,KAAA,GAAQ,OAAA,CAAQ,KAAA,CAAM,IAAI,CAAA;AAEhC,IAAA,IAAI,MAAM,SAAA,EAAW;AACnB,MAAA,MAAM,KAAA,GAAQ,MAAM,IAAA,CAAK,CAAC,SAAS,KAAA,CAAM,IAAA,CAAK,IAAI,CAAC,CAAA;AACnD,MAAA,IAAI,CAAC,KAAA,EAAO;AACV,QAAA,UAAA,CAAW,IAAA,CAAK;AAAA,UACd,IAAA;AAAA,UACA,OAAA,EAAS,CAAA,QAAA,EAAW,KAAA,CAAM,OAAO,CAAA,kBAAA;AAAA,SAClC,CAAA;AAAA,MACH;AAAA,IACF,CAAA,MAAO;AACL,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,KAAA,CAAM,QAAQ,CAAA,EAAA,EAAK;AACrC,QAAA,IAAI,KAAA,CAAM,IAAA,CAAK,KAAA,CAAM,CAAC,CAAE,CAAA,EAAG;AACzB,UAAA,UAAA,CAAW,IAAA,CAAK;AAAA,YACd,IAAA;AAAA,YACA,MAAM,CAAA,GAAI,CAAA;AAAA,YACV,OAAA,EAAS,CAAA,QAAA,EAAW,KAAA,CAAM,OAAO,CAAA,QAAA;AAAA,WAClC,CAAA;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,EAAA,OAAO,UAAA;AACT;ACrCO,SAAS,gBAAA,CACd,SACA,KAAA,EACa;AACb,EAAA,MAAM,KAAA,GAAQ,SAAA,CAAU,OAAA,EAAS,KAAA,CAAM,IAAI,CAAA;AAC3C,EAAA,MAAM,aAA0B,EAAC;AAEjC,EAAA,KAAA,MAAW,QAAQ,KAAA,EAAO;AACxB,IAAA,MAAM,UAAUA,YAAAA,CAAa1B,IAAAA,CAAK,OAAA,EAAS,IAAI,GAAG,OAAO,CAAA;AAEzD,IAAA,MAAM,SAAA,GAAY,YAAY,EAAA,GAAK,CAAA,GAAI,QAAQ,KAAA,CAAM,IAAI,EAAE,MAAA,CAAO,CAAC,GAAG,CAAA,EAAG,GAAA,KAAQ,IAAI,GAAA,CAAI,MAAA,GAAS,KAAK,GAAA,CAAI,CAAC,CAAA,KAAM,EAAE,CAAA,CAAE,MAAA;AAEtH,IAAA,IAAI,SAAA,GAAY,MAAM,QAAA,EAAU;AAC9B,MAAA,UAAA,CAAW,IAAA,CAAK;AAAA,QACd,IAAA;AAAA,QACA,OAAA,EAAS,CAAA,SAAA,EAAY,SAAS,CAAA,yBAAA,EAA4B,MAAM,QAAQ,CAAA;AAAA,OACzE,CAAA;AAAA,IACH;AAAA,EACF;AAEA,EAAA,OAAO,UAAA;AACT;ACxBA,IAAM,sBAAA,GAAyB,GAAA;AAExB,SAAS,cAAA,CAAe,OAAoB,OAAA,EAA+B;AAChF,EAAA,MAAM,YAAA,GAAe,MAAM,cAAA,IAAkB,CAAA;AAC7C,EAAA,MAAM,OAAA,GAAU,MAAM,OAAA,IAAW,sBAAA;AAEjC,EAAA,IAAI;AACF,IAAAkC,QAAAA,CAAS,KAAA,CAAM,OAAA,EAAS,EAAE,KAAA,EAAO,CAAC,MAAA,EAAQ,MAAA,EAAQ,MAAM,CAAA,EAAG,GAAA,EAAK,OAAA,EAAS,SAAS,CAAA;AAElF,IAAA,IAAI,iBAAiB,CAAA,EAAG;AACtB,MAAA,OAAO,CAAC,EAAE,OAAA,EAAS,CAAA,yCAAA,EAA4C,YAAY,IAAI,CAAA;AAAA,IACjF;AACA,IAAA,OAAO,EAAC;AAAA,EACV,SAAS,GAAA,EAAc;AACrB,IAAA,MAAM,QAAA,GAAY,IAA4B,MAAA,IAAU,CAAA;AACxD,IAAA,IAAI,aAAa,YAAA,EAAc;AAC7B,MAAA,OAAO,EAAC;AAAA,IACV;AACA,IAAA,MAAM,MAAA,GAAA,CAAW,GAAA,CAA4B,MAAA,IAAU,MAAA,CAAO,KAAA,CAAM,CAAC,CAAA,EAClE,QAAA,CAAS,OAAO,CAAA,CAChB,IAAA,EAAK;AACR,IAAA,MAAM,GAAA,GAAM,MAAA,GACR,CAAA,6BAAA,EAAgC,QAAQ,CAAA,WAAA,EAAc,YAAY,CAAA,GAAA,EAAM,MAAM,CAAA,CAAA,GAC9E,CAAA,6BAAA,EAAgC,QAAQ,CAAA,WAAA,EAAc,YAAY,CAAA,CAAA,CAAA;AACtE,IAAA,OAAO,CAAC,EAAE,OAAA,EAAS,GAAA,EAAK,CAAA;AAAA,EAC1B;AACF;;;ACFO,SAAS,eAAe,OAAA,EAA6B;AAC1D,EAAA,MAAM,UAAA,GAAalC,IAAAA,CAAK,OAAA,EAAS,SAAA,EAAW,YAAY,CAAA;AACxD,EAAA,IAAI,CAACwB,UAAAA,CAAW,UAAU,CAAA,EAAG;AAC3B,IAAA,OAAO,EAAE,KAAA,EAAO,EAAC,EAAE;AAAA,EACrB;AAEA,EAAA,MAAM,GAAA,GAAME,YAAAA,CAAa,UAAA,EAAY,OAAO,CAAA;AAC5C,EAAA,MAAM,IAAA,GAAgB,IAAA,CAAK,KAAA,CAAM,GAAG,CAAA;AACpC,EAAA,OAAO,gBAAA,CAAiB,MAAM,IAAI,CAAA;AACpC;AASO,SAAS,QAAA,CAAS,SAAiB,KAAA,EAA6B;AACrE,EAAA,OAAO,KAAA,CAAM,GAAA,CAAI,CAAC,IAAA,KAAS;AACzB,IAAA,IAAI;AACF,MAAA,MAAM,UAAA,GAAa,QAAA,CAAS,OAAA,EAAS,IAAI,CAAA;AACzC,MAAA,OAAO,EAAE,IAAA,EAAM,UAAA,EAAY,MAAA,EAAQ,UAAA,CAAW,WAAW,CAAA,EAAE;AAAA,IAC7D,SAAS,GAAA,EAAK;AACZ,MAAA,MAAM,OAAA,GAAU,GAAA,YAAe,KAAA,GAAQ,GAAA,CAAI,OAAA,GAAU,mBAAA;AACrD,MAAA,OAAO,EAAE,IAAA,EAAM,UAAA,EAAY,CAAC,EAAE,OAAA,EAAS,CAAA,kBAAA,EAAqB,OAAO,CAAA,CAAA,EAAI,CAAA,EAAG,MAAA,EAAQ,KAAA,EAAM;AAAA,IAC1F;AAAA,EACF,CAAC,CAAA;AACH;AAwBA,SAAS,QAAA,CAAS,SAAiB,IAAA,EAAyB;AAC1D,EAAA,QAAQ,IAAA,CAAK,MAAM,IAAA;AAAM,IACvB,KAAK,cAAA;AACH,MAAA,OAAO,mBAAA,CAAoB,OAAA,EAAS,IAAA,CAAK,KAAK,CAAA;AAAA,IAChD,KAAK,WAAA;AACH,MAAA,OAAO,gBAAA,CAAiB,OAAA,EAAS,IAAA,CAAK,KAAK,CAAA;AAAA,IAC7C,KAAK,QAAA;AACH,MAAA,OAAO,cAAA,CAAe,IAAA,CAAK,KAAA,EAAO,OAAO,CAAA;AAAA;AAE/C;;;ANpFA,eAAsB,cAAc,QAAA,EAA6C;AAC/E,EAAA,MAAM,EAAE,KAAA,EAAM,GAAI,MAAM,gBAAgB,QAAQ,CAAA;AAGhD,EAAA,MAAM,YAAY,KAAA,CAAM,MAAA,CAAO,CAAC,IAAA,KAAS,IAAA,CAAK,SAAS,GAAG,CAAA;AAC1D,EAAA,IAAI,SAAA,CAAU,WAAW,CAAA,EAAG;AAC1B,IAAA,OAAO,EAAE,QAAA,EAAU,EAAC,EAAG,YAAA,EAAc,EAAC,EAAE;AAAA,EAC1C;AAGA,EAAA,MAAM,WAAA,GAAc,SAAA,CAAU,QAAA,EAAU,SAAS,CAAA;AACjD,EAAA,MAAM,WAAyC,EAAC;AAEhD,EAAA,KAAA,MAAW,QAAQ,SAAA,EAAW;AAC5B,IAAA,MAAM,GAAA,GAAM,KAAK,OAAA,CAAS,GAAA;AAC1B,IAAA,KAAA,MAAW,WAAW,WAAA,EAAa;AACjC,MAAA,MAAM,UAAUA,YAAAA,CAAa1B,IAAAA,CAAK,QAAA,EAAU,OAAO,GAAG,OAAO,CAAA;AAC7D,MAAA,IAAI,OAAA,CAAQ,QAAA,CAAS,GAAG,CAAA,EAAG;AACzB,QAAA,QAAA,CAAS,IAAA,CAAK;AAAA,UACZ,IAAA,EAAM,OAAA;AAAA,UACN,KAAA,EAAO,CAAA,oBAAA,EAAuB,GAAG,CAAA,GAAA,EAAM,KAAK,OAAO,CAAA,CAAA,CAAA;AAAA,UACnD,QAAA,EAAU,SAAA;AAAA,UACV,iBAAiB,IAAA,CAAK,EAAA;AAAA,UACtB,YAAA,EAAc,KAAK,OAAA,CAAS,IAAA,GAAO,QAAQ,IAAA,CAAK,OAAA,CAAS,IAAI,CAAA,CAAA,GAAK,MAAA;AAAA,UAClE,MAAA,EAAQ;AAAA,SACT,CAAA;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAEA,EAAA,OAAO,EAAE,QAAA,EAAU,YAAA,EAAc,WAAA,EAAY;AAC/C;;;AOnCO,SAAS,WAAW,QAAA,EAAoC;AAC7D,EAAA,IAAI,MAAA;AACJ,EAAA,IAAI;AACF,IAAA,MAAA,GAAS,eAAe,QAAQ,CAAA;AAAA,EAClC,SAAS,GAAA,EAAK;AACZ,IAAA,MAAM,OAAA,GAAU,GAAA,YAAe,KAAA,GAAQ,GAAA,CAAI,OAAA,GAAU,6BAAA;AACrD,IAAA,OAAO;AAAA,MACL,UAAU,CAAC;AAAA,QACT,IAAA,EAAM,oBAAA;AAAA,QACN,KAAA,EAAO,gCAAgC,OAAO,CAAA,CAAA;AAAA,QAC9C,QAAA,EAAU,OAAA;AAAA,QACV,MAAA,EAAQ;AAAA,OACT,CAAA;AAAA,MACD,cAAc;AAAC,KACjB;AAAA,EACF;AAEA,EAAA,IAAI,MAAA,CAAO,KAAA,CAAM,MAAA,KAAW,CAAA,EAAG;AAC7B,IAAA,OAAO,EAAE,QAAA,EAAU,EAAC,EAAG,YAAA,EAAc,EAAC,EAAE;AAAA,EAC1C;AAEA,EAAA,MAAM,OAAA,GAAU,QAAA,CAAS,QAAA,EAAU,MAAA,CAAO,KAAK,CAAA;AAC/C,EAAA,MAAM,WAAyC,EAAC;AAChD,EAAA,MAAM,eAAA,uBAAsB,GAAA,EAAY;AAExC,EAAA,KAAA,MAAW,UAAU,OAAA,EAAS;AAC5B,IAAA,KAAA,MAAW,SAAA,IAAa,OAAO,UAAA,EAAY;AACzC,MAAA,IAAI,UAAU,IAAA,EAAM;AAClB,QAAA,eAAA,CAAgB,GAAA,CAAI,UAAU,IAAI,CAAA;AAAA,MACpC;AACA,MAAA,QAAA,CAAS,IAAA,CAAK;AAAA,QACZ,IAAA,EAAM,UAAU,IAAA,IAAQ,EAAA;AAAA,QACxB,OAAO,SAAA,CAAU,OAAA;AAAA,QACjB,QAAA,EAAU,OAAO,IAAA,CAAK,QAAA;AAAA,QACtB,YAAA,EAAc,OAAO,IAAA,CAAK,WAAA;AAAA,QAC1B,MAAA,EAAQ;AAAA,OACT,CAAA;AAAA,IACH;AAAA,EACF;AAEA,EAAA,OAAO,EAAE,QAAA,EAAU,YAAA,EAAc,CAAC,GAAG,eAAe,CAAA,EAAE;AACxD;;;ACxCA,eAAsB,QAAA,CACpB,QAAA,EACA,OAAA,GAAwB,EAAC,EACH;AACtB,EAAA,MAAM,EAAE,YAAA,GAAe,IAAA,EAAM,kBAAkB,IAAA,EAAM,cAAA,GAAiB,MAAK,GAAI,OAAA;AAE/E,EAAA,MAAM,WAA2B,EAAC;AAClC,EAAA,MAAM,eAAA,uBAAsB,GAAA,EAAY;AAExC,EAAA,SAAS,QAAQ,MAAA,EAAgC;AAC/C,IAAA,QAAA,CAAS,IAAA,CAAK,GAAG,MAAA,CAAO,QAAQ,CAAA;AAChC,IAAA,KAAA,MAAW,CAAA,IAAK,OAAO,YAAA,EAAc;AACnC,MAAA,eAAA,CAAgB,IAAI,CAAC,CAAA;AAAA,IACvB;AAAA,EACF;AAEA,EAAA,IAAI,YAAA,EAAc;AAChB,IAAA,OAAA,CAAQ,UAAA,CAAW,QAAQ,CAAC,CAAA;AAAA,EAC9B;AAEA,EAAA,IAAI,eAAA,EAAiB;AACnB,IAAA,OAAA,CAAQ,MAAM,aAAA,CAAc,QAAQ,CAAC,CAAA;AAAA,EACvC;AAEA,EAAA,IAAI,cAAA,EAAgB;AAClB,IAAA,OAAA,CAAQ,MAAM,YAAA,CAAa,QAAQ,CAAC,CAAA;AAAA,EACtC;AAEA,EAAA,MAAM,MAAA,GAAS,SAAS,MAAA,CAAO,CAAC,MAAM,CAAA,CAAE,QAAA,KAAa,OAAO,CAAA,CAAE,MAAA;AAC9D,EAAA,MAAM,QAAA,GAAW,SAAS,MAAA,CAAO,CAAC,MAAM,CAAA,CAAE,QAAA,KAAa,SAAS,CAAA,CAAE,MAAA;AAClE,EAAA,MAAM,KAAA,GAAQ,SAAS,MAAA,CAAO,CAAC,MAAM,CAAA,CAAE,QAAA,KAAa,MAAM,CAAA,CAAE,MAAA;AAE5D,EAAA,OAAO;AAAA,IACL,QAAA;AAAA,IACA,SAAS,EAAE,MAAA,EAAQ,UAAU,KAAA,EAAO,YAAA,EAAc,gBAAgB,IAAA,EAAK;AAAA,IACvE,SAAA,EAAA,iBAAW,IAAI,IAAA,EAAK,EAAE,WAAA;AAAY,GACpC;AACF;AC9CO,IAAM,kBAAA,GAAqBQ,EAAE,MAAA,CAAO;AAAA,EACzC,IAAA,EAAMA,EAAE,MAAA,EAAO;AAAA,EACf,KAAA,EAAOA,EAAE,MAAA,EAAO;AAAA,EAChB,UAAUA,CAAAA,CAAE,IAAA,CAAK,CAAC,OAAA,EAAS,SAAA,EAAW,MAAM,CAAC,CAAA;AAAA,EAC7C,eAAA,EAAiBA,CAAAA,CAAE,MAAA,EAAO,CAAE,QAAA,EAAS;AAAA,EACrC,YAAA,EAAcA,CAAAA,CAAE,MAAA,EAAO,CAAE,QAAA,EAAS;AAAA,EAClC,QAAQA,CAAAA,CAAE,IAAA,CAAK,CAAC,MAAA,EAAQ,SAAA,EAAW,QAAQ,CAAC;AAC9C,CAAC;AAGM,IAAM,kBAAA,GAAqBA,EAAE,MAAA,CAAO;AAAA,EACzC,MAAA,EAAQA,EAAE,MAAA,EAAO;AAAA,EACjB,QAAA,EAAUA,EAAE,MAAA,EAAO;AAAA,EACnB,KAAA,EAAOA,EAAE,MAAA,EAAO;AAAA,EAChB,YAAA,EAAcA,EAAE,MAAA;AAClB,CAAC,CAAA;AAGM,IAAM,iBAAA,GAAoBA,EAAE,MAAA,CAAO;AAAA,EACxC,QAAA,EAAUA,CAAAA,CAAE,KAAA,CAAM,kBAAkB,CAAA;AAAA,EACpC,OAAA,EAAS,kBAAA;AAAA,EACT,SAAA,EAAWA,EAAE,MAAA;AACf,CAAC;;;ACnBD,eAAA,EAAA;AAEA,qBAAA,EAAA;;;ACAA,eAAA,EAAA;AACA,qBAAA,EAAA;;;ACFA,eAAA,EAAA;AACAF,YAAAA,EAAAA;AACA,YAAA,EAAA;ACFA,eAAA,EAAA;AACA,YAAA,EAAA;AAEAA,YAAAA,EAAAA;;;AtC2LA,aAAA,EAAA;AuC5LO,IAAM,gBAAA,GAAmBE,EAAE,IAAA,CAAK;AAAA,EACrC,QAAA;AAAA,EACA,MAAA;AAAA,EACA,QAAA;AAAA,EACA,eAAA;AAAA,EACA,UAAA;AAAA,EACA,SAAA;AAAA,EACA;AACF,CAAC;AAGM,IAAM,gBAAA,GAAmBA,EAAE,MAAA,CAAO;AAAA,EACvC,MAAA,EAAQ,gBAAA;AAAA,EACR,UAAA,EAAYA,CAAAA,CAAE,MAAA,EAAO,CAAE,QAAA;AACzB,CAAC;AAMD,IAAM,eAAA,GAGD;AAAA,EACH;AAAA,IACE,MAAA,EAAQ,QAAA;AAAA,IACR,OAAA,EAAS;AAAA;AAAA,MAEP,kBAAA;AAAA,MACA,mBAAA;AAAA,MACA,mBAAA;AAAA,MACA,kBAAA;AAAA,MACA,mBAAA;AAAA,MACA,mBAAA;AAAA;AAAA,MAEA,cAAA;AAAA,MACA,eAAA;AAAA,MACA,gBAAA;AAAA,MACA,eAAA;AAAA,MACA;AAAA;AACF,GACF;AAAA,EACA;AAAA,IACE,MAAA,EAAQ,MAAA;AAAA,IACR,OAAA,EAAS,CAAC,WAAA,EAAa,YAAY;AAAA,GACrC;AAAA,EACA;AAAA,IACE,MAAA,EAAQ,QAAA;AAAA,IACR,OAAA,EAAS,CAAC,aAAA,EAAe,cAAc;AAAA,GACzC;AAAA,EACA;AAAA,IACE,MAAA,EAAQ,eAAA;AAAA,IACR,OAAA,EAAS,CAAC,eAAA,EAAiB,gBAAA,EAAkB,kBAAkB,gBAAgB;AAAA,GACjF;AAAA,EACA;AAAA,IACE,MAAA,EAAQ,UAAA;AAAA,IACR,OAAA,EAAS,CAAC,cAAc;AAAA,GAC1B;AAAA,EACA;AAAA,IACE,MAAA,EAAQ,SAAA;AAAA,IACR,OAAA,EAAS,CAAC,cAAA,EAAgB,eAAe;AAAA;AAE7C,CAAA;AAGA,SAAS,OAAA,GAAsB;AAC7B,EAAA,OAAO,EAAE,MAAA,EAAQ,SAAA,EAAW,UAAA,EAAY,IAAA,EAAK;AAC/C;AAGA,SAAS,OAAO,QAAA,EAA2B;AACzC,EAAA,IAAI;AACF,IAAA,OAAOuB,QAAAA,CAAS,QAAQ,CAAA,CAAE,MAAA,EAAO;AAAA,EACnC,CAAA,CAAA,MAAQ;AACN,IAAA,OAAO,KAAA;AAAA,EACT;AACF;AAMA,SAAS,iBAAiB,QAAA,EAA2B;AACnD,EAAA,MAAM,QAAA,GAAW/B,IAAAA,CAAK,QAAA,EAAU,gBAAgB,CAAA;AAChD,EAAA,IAAI;AACF,IAAA,MAAM,OAAA,GAAU0B,YAAAA,CAAa,QAAA,EAAU,OAAO,CAAA;AAE9C,IAAA,OAAO,qBAAA,CAAsB,KAAK,OAAO,CAAA;AAAA,EAC3C,CAAA,CAAA,MAAQ;AACN,IAAA,OAAO,KAAA;AAAA,EACT;AACF;AAQO,SAAS,aAAa,QAAA,EAA8B;AACzD,EAAA,IAAI;AACF,IAAA,KAAA,MAAW,QAAQ,eAAA,EAAiB;AAClC,MAAA,KAAA,MAAW,MAAA,IAAU,KAAK,OAAA,EAAS;AACjC,QAAA,IAAI,MAAA,CAAO1B,IAAAA,CAAK,QAAA,EAAU,MAAM,CAAC,CAAA,EAAG;AAClC,UAAA,OAAO,EAAE,MAAA,EAAQ,IAAA,CAAK,MAAA,EAAQ,YAAY,MAAA,EAAO;AAAA,QACnD;AAAA,MACF;AAGA,MAAA,IAAI,IAAA,CAAK,MAAA,KAAW,MAAA,IAAU,gBAAA,CAAiB,QAAQ,CAAA,EAAG;AACxD,QAAA,OAAO,EAAE,MAAA,EAAQ,MAAA,EAAQ,UAAA,EAAY,gBAAA,EAAiB;AAAA,MACxD;AAAA,IACF;AAAA,EACF,CAAA,CAAA,MAAQ;AAEN,IAAA,OAAO,OAAA,EAAQ;AAAA,EACjB;AAEA,EAAA,OAAO,OAAA,EAAQ;AACjB;;;AvCqFA,UAAA,EAAA","file":"index.js","sourcesContent":["/**\n * Memory item type definitions using Zod schemas.\n *\n * Supports 4 memory item types via discriminated union:\n * - lesson: Knowledge learned from mistakes\n * - solution: Problem-resolution pairs\n * - pattern: Code pattern transformations (bad -> good)\n * - preference: User workflow preferences\n *\n * Deletion model:\n * - Set `deleted: true` and `deletedAt` on an item to mark it deleted\n * - LegacyTombstoneSchema handles backward-compat reads of old\n * minimal tombstone records { id, deleted: true, deletedAt }\n * - LegacyLessonSchema handles old quick/full type records\n */\n\nimport { createHash } from 'node:crypto';\nimport { z } from 'zod';\n\n// Source of lesson capture\nexport const SourceSchema = z.enum([\n 'user_correction',\n 'self_correction',\n 'test_failure',\n 'manual',\n]);\n\n// Context about when lesson was learned\nexport const ContextSchema = z.object({\n tool: z.string(),\n intent: z.string(),\n});\n\n// Code pattern (bad -> good)\nexport const PatternSchema = z.object({\n bad: z.string(),\n good: z.string(),\n});\n\n// Citation for lesson provenance tracking\nexport const CitationSchema = z.object({\n file: z.string().min(1), // Source file path (required, non-empty)\n line: z.number().int().positive().optional(), // Line number (optional, must be positive)\n commit: z.string().optional(), // Git commit hash (optional)\n});\n\n// Severity levels for lessons\nexport const SeveritySchema = z.enum(['high', 'medium', 'low']);\n\n// Compaction levels for age-based validity\nexport const CompactionLevelSchema = z.union([\n z.literal(0), // Active\n z.literal(1), // Flagged (>90 days)\n z.literal(2), // Archived\n]);\n\n/** @deprecated Use MemoryItemTypeSchema instead. Kept for parsing old JSONL records. */\nexport const LessonTypeSchema = z.enum(['quick', 'full']);\n\n/** Memory item type enum: lesson, solution, pattern, preference. */\nexport const MemoryItemTypeSchema = z.enum(['lesson', 'solution', 'pattern', 'preference']);\n\n// ---------------------------------------------------------------------------\n// Base fields shared by all memory item types\n// ---------------------------------------------------------------------------\n\nconst baseFields = {\n // Core identity (required)\n id: z.string(),\n trigger: z.string(),\n insight: z.string(),\n\n // Metadata (required)\n tags: z.array(z.string()),\n source: SourceSchema,\n context: ContextSchema,\n created: z.string(), // ISO8601\n confirmed: z.boolean(),\n\n // Relationships (required, can be empty arrays)\n supersedes: z.array(z.string()),\n related: z.array(z.string()),\n\n // Extended fields (optional)\n evidence: z.string().optional(),\n severity: SeveritySchema.optional(),\n\n // Lifecycle fields (optional)\n deleted: z.boolean().optional(),\n deletedAt: z.string().optional(),\n retrievalCount: z.number().optional(),\n\n // Provenance tracking (optional)\n citation: CitationSchema.optional(),\n\n // Age-based validity fields (optional)\n compactionLevel: CompactionLevelSchema.optional(),\n compactedAt: z.string().optional(),\n lastRetrieved: z.string().optional(),\n\n // Invalidation fields (optional)\n invalidatedAt: z.string().optional(),\n invalidationReason: z.string().optional(),\n} as const;\n\n// ---------------------------------------------------------------------------\n// Type-specific schemas\n// ---------------------------------------------------------------------------\n\n/**\n * Lesson memory item schema.\n * Replaces the old quick/full distinction with a single 'lesson' type.\n * Pattern field is optional for lessons.\n */\nexport const LessonItemSchema = z.object({\n ...baseFields,\n type: z.literal('lesson'),\n pattern: PatternSchema.optional(),\n});\n\n/**\n * Solution memory item schema.\n * Uses trigger as \"problem\" and insight as \"resolution\".\n * Pattern field is optional.\n */\nexport const SolutionItemSchema = z.object({\n ...baseFields,\n type: z.literal('solution'),\n pattern: PatternSchema.optional(),\n});\n\n/**\n * Pattern memory item schema.\n * Pattern field is REQUIRED (bad -> good code transformation).\n */\nexport const PatternItemSchema = z.object({\n ...baseFields,\n type: z.literal('pattern'),\n pattern: PatternSchema,\n});\n\n/**\n * Preference memory item schema.\n * Captures user workflow preferences.\n * Pattern field is optional.\n */\nexport const PreferenceItemSchema = z.object({\n ...baseFields,\n type: z.literal('preference'),\n pattern: PatternSchema.optional(),\n});\n\n// ---------------------------------------------------------------------------\n// Discriminated union of all memory item types\n// ---------------------------------------------------------------------------\n\n/**\n * Unified memory item schema (discriminated union on 'type' field).\n * Accepts: lesson, solution, pattern, preference.\n */\nexport const MemoryItemSchema = z.discriminatedUnion('type', [\n LessonItemSchema,\n SolutionItemSchema,\n PatternItemSchema,\n PreferenceItemSchema,\n]);\n\n// ---------------------------------------------------------------------------\n// Backward compatibility\n// ---------------------------------------------------------------------------\n\n/**\n * Legacy lesson schema for reading old JSONL records with type: 'quick' | 'full'.\n * Use this only for parsing existing data files; new records use MemoryItemSchema.\n */\nexport const LegacyLessonSchema = z.object({\n ...baseFields,\n type: LessonTypeSchema,\n pattern: PatternSchema.optional(),\n});\n\n/**\n * LessonSchema - now equivalent to LessonItemSchema.\n *\n * For backward compatibility, existing code that imports LessonSchema\n * continues to work. The type field is now z.literal('lesson').\n *\n * To parse old quick/full records, use LegacyLessonSchema.\n */\nexport const LessonSchema = LessonItemSchema;\n\n/**\n * Legacy tombstone format for backward-compatible reads.\n * Old JSONL files may contain minimal { id, deleted, deletedAt } records.\n */\nexport const LegacyTombstoneSchema = z.object({\n id: z.string(),\n deleted: z.literal(true),\n deletedAt: z.string(), // ISO8601\n});\n\n/**\n * LessonRecord schema - union for reading JSONL files.\n *\n * Accepts:\n * 1. Any new memory item type (lesson, solution, pattern, preference)\n * 2. A legacy lesson (type: 'quick' | 'full')\n * 3. A legacy tombstone (minimal: { id, deleted: true, deletedAt })\n */\nexport const LessonRecordSchema = z.union([\n MemoryItemSchema,\n LegacyLessonSchema,\n LegacyTombstoneSchema,\n]);\n\n/**\n * MemoryItemRecord schema - alias for LessonRecordSchema.\n * Parses all memory item types plus legacy formats.\n */\nexport const MemoryItemRecordSchema = LessonRecordSchema;\n\n// ---------------------------------------------------------------------------\n// Type exports\n// ---------------------------------------------------------------------------\n\nexport type Lesson = z.infer<typeof LessonSchema>;\n/** @deprecated Use MemoryItemType instead. */\nexport type LessonType = z.infer<typeof LessonTypeSchema>;\nexport type LessonRecord = z.infer<typeof LessonRecordSchema>;\nexport type Source = z.infer<typeof SourceSchema>;\nexport type Severity = z.infer<typeof SeveritySchema>;\nexport type Context = z.infer<typeof ContextSchema>;\nexport type Pattern = z.infer<typeof PatternSchema>;\nexport type Citation = z.infer<typeof CitationSchema>;\nexport type CompactionLevel = z.infer<typeof CompactionLevelSchema>;\n\n/** Unified memory item type (discriminated union). */\nexport type MemoryItem = z.infer<typeof MemoryItemSchema>;\n/** Memory item type enum: 'lesson' | 'solution' | 'pattern' | 'preference'. */\nexport type MemoryItemType = z.infer<typeof MemoryItemTypeSchema>;\n/** Solution memory item. */\nexport type Solution = z.infer<typeof SolutionItemSchema>;\n/** Pattern memory item (not to be confused with Pattern = {bad, good}). */\nexport type PatternItem = z.infer<typeof PatternItemSchema>;\n/** Preference memory item. */\nexport type Preference = z.infer<typeof PreferenceItemSchema>;\n/** Record type for reading JSONL files (all types + legacy). */\nexport type MemoryItemRecord = z.infer<typeof MemoryItemRecordSchema>;\n\n// ---------------------------------------------------------------------------\n// ID generation\n// ---------------------------------------------------------------------------\n\n/** Prefix mapping for memory item types. */\nconst TYPE_PREFIXES: Record<MemoryItemType, string> = {\n lesson: 'L',\n solution: 'S',\n pattern: 'P',\n preference: 'R',\n};\n\n/**\n * Generate deterministic memory item ID from insight text.\n * Format: {prefix} + 16 hex characters from SHA-256 hash (64 bits of entropy).\n *\n * @param insight - The insight text to hash\n * @param type - Memory item type (default: 'lesson' for backward compat)\n * @returns ID string like L1a2b3c4d5e6f7g8h\n */\nexport function generateId(insight: string, type?: MemoryItemType): string {\n const prefix = TYPE_PREFIXES[type ?? 'lesson'];\n const hash = createHash('sha256').update(insight).digest('hex');\n return `${prefix}${hash.slice(0, 16)}`;\n}\n","/**\n * JSONL storage layer for memory items\n *\n * Append-only storage with last-write-wins deduplication.\n * Source of truth - git trackable.\n *\n * Primary API:\n * appendMemoryItem() - Append any memory item type\n * readMemoryItems() - Read all non-deleted memory items\n *\n * Backward-compatible API:\n * appendLesson() - Append a lesson (delegates to appendMemoryItem)\n * readLessons() - Read lesson-type items only\n *\n * Deletion: append the item with `deleted: true` and `deletedAt`.\n * Read path also accepts old minimal tombstone records for backward compat.\n * Legacy type:'quick'/'full' records are converted to type:'lesson' on read.\n */\n\nimport { appendFile, mkdir, readFile } from 'node:fs/promises';\nimport { dirname, join } from 'node:path';\nimport {\n MemoryItemRecordSchema,\n type Lesson,\n type LessonRecord,\n type MemoryItem,\n} from '../types.js';\n\n/** Relative path to lessons file from repo root */\nexport const LESSONS_PATH = '.claude/lessons/index.jsonl';\n\n/** Options for reading memory items */\nexport interface ReadLessonsOptions {\n /** If true, throw on first parse error. Default: false (skip errors) */\n strict?: boolean;\n /** Callback for each parse error in non-strict mode */\n onParseError?: (error: ParseError) => void;\n}\n\n/** Parse error details */\nexport interface ParseError {\n /** 1-based line number */\n line: number;\n /** Error message */\n message: string;\n /** Original error */\n cause: unknown;\n}\n\n/** Result of reading lessons (backward-compat) */\nexport interface ReadLessonsResult {\n /** Successfully parsed lessons */\n lessons: Lesson[];\n /** Number of lines skipped due to errors */\n skippedCount: number;\n}\n\n/** Result of reading memory items */\nexport interface ReadMemoryItemsResult {\n /** Successfully parsed memory items */\n items: MemoryItem[];\n /** IDs that were tombstoned (deleted) */\n deletedIds: Set<string>;\n /** Number of lines skipped due to errors */\n skippedCount: number;\n}\n\n\n/**\n * Append a memory item to the JSONL file.\n * Creates directory structure if missing.\n * Primary write function for all memory item types.\n *\n * @param repoRoot - Repository root directory\n * @param item - Memory item to append (any type: lesson, solution, pattern, preference)\n */\nexport async function appendMemoryItem(repoRoot: string, item: MemoryItem): Promise<void> {\n const filePath = join(repoRoot, LESSONS_PATH);\n await mkdir(dirname(filePath), { recursive: true });\n\n const line = JSON.stringify(item) + '\\n';\n await appendFile(filePath, line, 'utf-8');\n}\n\n/**\n * Append a lesson to the JSONL file.\n * Backward-compatible wrapper around appendMemoryItem.\n *\n * @param repoRoot - Repository root directory\n * @param lesson - Lesson to append\n */\nexport async function appendLesson(repoRoot: string, lesson: Lesson): Promise<void> {\n return appendMemoryItem(repoRoot, lesson);\n}\n\n/**\n * Parse and validate a single JSON line.\n *\n * Accepts:\n * - New memory item types (lesson, solution, pattern, preference)\n * - Legacy lessons (type: 'quick' | 'full')\n * - Canonical tombstones ({ id, deleted: true, deletedAt })\n * - Legacy tombstones (full record with deleted:true)\n *\n * @returns Parsed record or null if invalid\n */\nfunction parseJsonLine(\n line: string,\n lineNumber: number,\n strict: boolean,\n onParseError?: (error: ParseError) => void\n): LessonRecord | null {\n // Try to parse JSON\n let parsed: unknown;\n try {\n parsed = JSON.parse(line);\n } catch (err) {\n const parseError: ParseError = {\n line: lineNumber,\n message: `Invalid JSON: ${(err as Error).message}`,\n cause: err,\n };\n if (strict) {\n throw new Error(`Parse error on line ${lineNumber}: ${parseError.message}`);\n }\n onParseError?.(parseError);\n return null;\n }\n\n // Validate against MemoryItemRecordSchema (accepts all types + legacy)\n const result = MemoryItemRecordSchema.safeParse(parsed);\n if (!result.success) {\n const parseError: ParseError = {\n line: lineNumber,\n message: `Schema validation failed: ${result.error.message}`,\n cause: result.error,\n };\n if (strict) {\n throw new Error(`Parse error on line ${lineNumber}: ${parseError.message}`);\n }\n onParseError?.(parseError);\n return null;\n }\n\n return result.data;\n}\n\n/**\n * Convert a parsed record to a MemoryItem.\n * Legacy type:'quick'/'full' records are converted to type:'lesson'.\n * Returns null for tombstone-only records (no MemoryItem data).\n */\nfunction toMemoryItem(record: LessonRecord): MemoryItem | null {\n // Type guard: exclude tombstone variant (no `type` field)\n if (record.deleted === true) return null;\n\n // Legacy type conversion: quick/full -> lesson\n if (record.type === 'quick' || record.type === 'full') {\n return { ...record, type: 'lesson' } as MemoryItem;\n }\n\n // Already a valid MemoryItem type\n return record as MemoryItem;\n}\n\n/**\n * Read all non-deleted memory items from the JSONL file.\n * Primary read function for the unified memory API.\n *\n * Applies last-write-wins deduplication by ID.\n * Converts legacy type:'quick'/'full' to type:'lesson'.\n *\n * Handles tombstone formats:\n * - Canonical: { id, deleted: true, deletedAt }\n * - Legacy: Full record with deleted:true field\n *\n * @param repoRoot - Repository root directory\n * @param options - Optional settings for error handling\n * @returns Result with items array and count of skipped lines\n */\nexport async function readMemoryItems(\n repoRoot: string,\n options: ReadLessonsOptions = {}\n): Promise<ReadMemoryItemsResult> {\n const { strict = false, onParseError } = options;\n const filePath = join(repoRoot, LESSONS_PATH);\n\n let content: string;\n try {\n content = await readFile(filePath, 'utf-8');\n } catch (err) {\n if ((err as NodeJS.ErrnoException).code === 'ENOENT') {\n return { items: [], deletedIds: new Set<string>(), skippedCount: 0 };\n }\n throw err;\n }\n\n const items = new Map<string, MemoryItem>();\n const deletedIds = new Set<string>();\n let skippedCount = 0;\n\n const lines = content.split('\\n');\n for (let i = 0; i < lines.length; i++) {\n const trimmed = lines[i]!.trim();\n if (!trimmed) continue;\n\n const record = parseJsonLine(trimmed, i + 1, strict, onParseError);\n if (!record) {\n skippedCount++;\n continue;\n }\n\n // Check if record is a tombstone (canonical or legacy)\n if (record.deleted === true) {\n items.delete(record.id);\n deletedIds.add(record.id);\n } else {\n const item = toMemoryItem(record);\n if (item) {\n items.set(record.id, item);\n }\n }\n }\n\n return { items: Array.from(items.values()), deletedIds, skippedCount };\n}\n\n/**\n * Read all non-deleted lessons from the JSONL file.\n * Backward-compatible wrapper that filters to lesson-type items only.\n *\n * @param repoRoot - Repository root directory\n * @param options - Optional settings for error handling\n * @returns Result with lessons array and count of skipped lines\n */\nexport async function readLessons(\n repoRoot: string,\n options: ReadLessonsOptions = {}\n): Promise<ReadLessonsResult> {\n const result = await readMemoryItems(repoRoot, options);\n\n // Filter to lesson-type items only\n const lessons = result.items.filter((item): item is Lesson => item.type === 'lesson');\n\n return { lessons, skippedCount: result.skippedCount };\n}\n","/**\n * SQLite availability check.\n *\n * Verifies that better-sqlite3 can be loaded. If it cannot, an error\n * is thrown -- there is no silent fallback to JSONL-only mode.\n */\n\nimport { createRequire } from 'node:module';\nimport type { Database as DatabaseType } from 'better-sqlite3';\n\n// Create require function for ESM compatibility\nconst require = createRequire(import.meta.url);\n\n/** Cached availability state */\nlet checked = false;\nlet DatabaseConstructor: (new (path: string) => DatabaseType) | null = null;\n\n/**\n * Ensure SQLite (better-sqlite3) is loadable.\n * Throws a clear error if the native module cannot be loaded.\n */\nexport function ensureSqliteAvailable(): void {\n if (checked) return;\n\n try {\n const module = require('better-sqlite3');\n const Constructor = module.default || module;\n const testDb = new Constructor(':memory:');\n testDb.close();\n DatabaseConstructor = Constructor;\n checked = true;\n } catch (cause) {\n throw new Error(\n 'better-sqlite3 failed to load.\\n' +\n 'For pnpm projects:\\n' +\n ' 1. Ensure package.json has: \"pnpm\": { \"onlyBuiltDependencies\": [\"better-sqlite3\"] }\\n' +\n ' 2. Run: pnpm install && pnpm rebuild better-sqlite3\\n' +\n 'For npm/yarn projects:\\n' +\n ' Run: npm rebuild better-sqlite3\\n' +\n 'If the error persists, check that build tools (python3, make, g++) are installed.',\n { cause }\n );\n }\n}\n\n/**\n * Get the SQLite Database constructor.\n * @returns Database constructor (never null -- throws if unavailable)\n */\nexport function getDatabaseConstructor(): new (path: string) => DatabaseType {\n ensureSqliteAvailable();\n return DatabaseConstructor!;\n}\n\n/**\n * Reset the cached SQLite availability state.\n * Used after rebuilding native modules to force a fresh check.\n */\nexport function resetSqliteAvailability(): void {\n checked = false;\n DatabaseConstructor = null;\n}\n","/**\n * SQLite schema definition for lessons database.\n *\n * The SQLite database is a rebuildable cache (JSONL is source of truth).\n * When SCHEMA_VERSION changes, the DB file is deleted and recreated.\n */\n\nimport type { Database as DatabaseType } from 'better-sqlite3';\n\n/**\n * Schema version for the SQLite cache.\n * Bump this when making incompatible schema changes.\n * The connection module auto-rebuilds when the DB version is older.\n */\nexport const SCHEMA_VERSION = 5;\n\n/** SQL schema for lessons database with FTS5 full-text search */\nconst SCHEMA_SQL = `\n CREATE TABLE IF NOT EXISTS lessons (\n id TEXT PRIMARY KEY,\n type TEXT NOT NULL,\n trigger TEXT NOT NULL,\n insight TEXT NOT NULL,\n evidence TEXT,\n severity TEXT,\n tags TEXT NOT NULL DEFAULT '',\n source TEXT NOT NULL,\n context TEXT NOT NULL DEFAULT '{}',\n supersedes TEXT NOT NULL DEFAULT '[]',\n related TEXT NOT NULL DEFAULT '[]',\n created TEXT NOT NULL,\n confirmed INTEGER NOT NULL DEFAULT 0,\n deleted INTEGER NOT NULL DEFAULT 0,\n retrieval_count INTEGER NOT NULL DEFAULT 0,\n last_retrieved TEXT,\n embedding BLOB,\n content_hash TEXT,\n embedding_insight BLOB,\n content_hash_insight TEXT,\n invalidated_at TEXT,\n invalidation_reason TEXT,\n citation_file TEXT,\n citation_line INTEGER,\n citation_commit TEXT,\n compaction_level INTEGER DEFAULT 0,\n compacted_at TEXT,\n pattern_bad TEXT,\n pattern_good TEXT\n );\n\n CREATE VIRTUAL TABLE IF NOT EXISTS lessons_fts USING fts5(\n id, trigger, insight, tags, pattern_bad, pattern_good,\n content='lessons', content_rowid='rowid'\n );\n\n CREATE TRIGGER IF NOT EXISTS lessons_ai AFTER INSERT ON lessons BEGIN\n INSERT INTO lessons_fts(rowid, id, trigger, insight, tags, pattern_bad, pattern_good)\n VALUES (new.rowid, new.id, new.trigger, new.insight, new.tags, new.pattern_bad, new.pattern_good);\n END;\n\n CREATE TRIGGER IF NOT EXISTS lessons_ad AFTER DELETE ON lessons BEGIN\n INSERT INTO lessons_fts(lessons_fts, rowid, id, trigger, insight, tags, pattern_bad, pattern_good)\n VALUES ('delete', old.rowid, old.id, old.trigger, old.insight, old.tags, old.pattern_bad, old.pattern_good);\n END;\n\n CREATE TRIGGER IF NOT EXISTS lessons_au AFTER UPDATE OF id, trigger, insight, tags, pattern_bad, pattern_good ON lessons BEGIN\n INSERT INTO lessons_fts(lessons_fts, rowid, id, trigger, insight, tags, pattern_bad, pattern_good)\n VALUES ('delete', old.rowid, old.id, old.trigger, old.insight, old.tags, old.pattern_bad, old.pattern_good);\n INSERT INTO lessons_fts(rowid, id, trigger, insight, tags, pattern_bad, pattern_good)\n VALUES (new.rowid, new.id, new.trigger, new.insight, new.tags, new.pattern_bad, new.pattern_good);\n END;\n\n CREATE INDEX IF NOT EXISTS idx_lessons_created ON lessons(created);\n CREATE INDEX IF NOT EXISTS idx_lessons_confirmed ON lessons(confirmed);\n CREATE INDEX IF NOT EXISTS idx_lessons_severity ON lessons(severity);\n CREATE INDEX IF NOT EXISTS idx_lessons_type ON lessons(type);\n\n CREATE TABLE IF NOT EXISTS metadata (\n key TEXT PRIMARY KEY,\n value TEXT NOT NULL\n );\n`;\n\n/**\n * Create the database schema and set the version pragma.\n * @param database - SQLite database instance\n */\nexport function createSchema(database: DatabaseType): void {\n database.exec(SCHEMA_SQL);\n const current = database.pragma('user_version', { simple: true }) as number;\n if (current !== SCHEMA_VERSION) {\n database.pragma(`user_version = ${SCHEMA_VERSION}`);\n }\n}\n","/**\n * SQLite database connection management.\n */\n\nimport { mkdirSync, unlinkSync } from 'node:fs';\nimport { dirname, join } from 'node:path';\nimport type { Database as DatabaseType } from 'better-sqlite3';\n\nimport type { DbOptions } from './types.js';\nimport { getDatabaseConstructor } from './availability.js';\nimport { createSchema, SCHEMA_VERSION } from './schema.js';\n\n/** Relative path to database file from repo root */\nexport const DB_PATH = '.claude/.cache/lessons.sqlite';\n\n/** Database connections keyed by resolved DB path */\nconst dbMap = new Map<string, DatabaseType>();\n\n/**\n * Check if the database has the expected schema version.\n * @param database - SQLite database instance\n * @returns true if the version matches SCHEMA_VERSION\n */\nfunction hasExpectedVersion(database: DatabaseType): boolean {\n const row = database.pragma('user_version', { simple: true }) as number;\n return row === SCHEMA_VERSION;\n}\n\n/**\n * Open the SQLite database connection.\n * If the database has an older schema version, it is deleted and recreated.\n * Throws if better-sqlite3 cannot be loaded.\n * @param repoRoot - Absolute path to repository root\n * @param options - Database options (e.g., inMemory for testing)\n * @returns Database instance\n */\nexport function openDb(repoRoot: string, options: DbOptions = {}): DatabaseType {\n const { inMemory = false } = options;\n\n // In-memory DBs are keyed by repoRoot so different repos stay isolated\n const key = inMemory ? `:memory:${repoRoot}` : join(repoRoot, DB_PATH);\n\n const cached = dbMap.get(key);\n if (cached) {\n return cached;\n }\n\n const Database = getDatabaseConstructor();\n let database: DatabaseType;\n\n if (inMemory) {\n database = new Database(':memory:');\n } else {\n const dir = dirname(key);\n mkdirSync(dir, { recursive: true });\n database = new Database(key);\n\n if (!hasExpectedVersion(database)) {\n database.close();\n try { unlinkSync(key); } catch { /* ENOENT is fine */ }\n database = new Database(key);\n }\n\n database.pragma('journal_mode = WAL');\n }\n\n createSchema(database);\n dbMap.set(key, database);\n return database;\n}\n\n/**\n * Close the SQLite database connection.\n */\nexport function closeDb(): void {\n for (const database of dbMap.values()) {\n database.close();\n }\n dbMap.clear();\n}\n","/**\n * Embedding cache operations for SQLite storage.\n */\n\nimport { createHash } from 'node:crypto';\nimport type { Database as DatabaseType } from 'better-sqlite3';\n\nimport type { CachedEmbeddingData } from './types.js';\nimport { openDb } from './connection.js';\n\n/**\n * Compute content hash for a lesson's trigger and insight.\n * Used to detect content changes for embedding cache invalidation.\n * @param trigger - The lesson trigger text\n * @param insight - The lesson insight text\n * @returns SHA-256 hash of the combined content\n */\nexport function contentHash(trigger: string, insight: string): string {\n return createHash('sha256').update(`${trigger} ${insight}`).digest('hex');\n}\n\n/**\n * Get cached embedding for a lesson.\n * @param repoRoot - Absolute path to repository root\n * @param lessonId - ID of the lesson\n * @param expectedHash - Optional content hash to validate cache freshness\n * @returns Embedding array or null if not cached\n */\nexport function getCachedEmbedding(\n repoRoot: string,\n lessonId: string,\n expectedHash?: string\n): Float32Array | null {\n const database = openDb(repoRoot);\n\n const row = database\n .prepare('SELECT embedding, content_hash FROM lessons WHERE id = ?')\n .get(lessonId) as { embedding: Buffer | null; content_hash: string | null } | undefined;\n\n if (!row || !row.embedding || !row.content_hash) {\n return null;\n }\n\n if (expectedHash && row.content_hash !== expectedHash) {\n return null;\n }\n\n return new Float32Array(\n row.embedding.buffer,\n row.embedding.byteOffset,\n row.embedding.byteLength / 4\n );\n}\n\n/**\n * Cache embedding for a lesson in SQLite.\n *\n * Uses UPDATE-only (not INSERT) — the row must already exist in the\n * lessons table. If the row hasn't been synced from JSONL yet, the\n * write is a silent no-op and the embedding will be recomputed on\n * next access. This is by-design: the cache is an optional\n * optimization, not the source of truth.\n *\n * @param repoRoot - Absolute path to repository root\n * @param lessonId - ID of the lesson\n * @param embedding - Embedding vector (Float32Array or number array)\n * @param hash - Content hash for cache validation\n */\nexport function setCachedEmbedding(\n repoRoot: string,\n lessonId: string,\n embedding: Float32Array | number[],\n hash: string\n): void {\n const database = openDb(repoRoot);\n\n const float32 = embedding instanceof Float32Array ? embedding : new Float32Array(embedding);\n const buffer = Buffer.from(float32.buffer, float32.byteOffset, float32.byteLength);\n\n database\n .prepare('UPDATE lessons SET embedding = ?, content_hash = ? WHERE id = ?')\n .run(buffer, hash, lessonId);\n}\n\n/** Entry returned by getCachedEmbeddingsBulk */\nexport interface CachedEmbeddingEntry {\n vector: Float32Array;\n hash: string;\n}\n\n/**\n * Bulk-read all cached embeddings in a single query.\n * Returns a Map of lessonId to {vector, hash} for every lesson\n * that has a cached embedding and content_hash.\n * Callers validate the hash themselves.\n */\nexport function getCachedEmbeddingsBulk(repoRoot: string): Map<string, CachedEmbeddingEntry> {\n const database = openDb(repoRoot);\n const rows = database\n .prepare('SELECT id, embedding, content_hash FROM lessons WHERE embedding IS NOT NULL')\n .all() as Array<{ id: string; embedding: Buffer; content_hash: string | null }>;\n\n const result = new Map<string, CachedEmbeddingEntry>();\n for (const row of rows) {\n if (!row.content_hash) continue;\n const float32 = new Float32Array(\n row.embedding.buffer,\n row.embedding.byteOffset,\n row.embedding.byteLength / 4\n );\n result.set(row.id, { vector: float32, hash: row.content_hash });\n }\n return result;\n}\n\n/**\n * Get cached insight-only embedding for a lesson.\n * Used by findSimilarLessons (insight-only hash, separate from searchVector's trigger+insight hash).\n */\nexport function getCachedInsightEmbedding(\n repoRoot: string,\n lessonId: string,\n expectedHash?: string\n): Float32Array | null {\n const database = openDb(repoRoot);\n\n const row = database\n .prepare('SELECT embedding_insight, content_hash_insight FROM lessons WHERE id = ?')\n .get(lessonId) as { embedding_insight: Buffer | null; content_hash_insight: string | null } | undefined;\n\n if (!row || !row.embedding_insight || !row.content_hash_insight) {\n return null;\n }\n\n if (expectedHash && row.content_hash_insight !== expectedHash) {\n return null;\n }\n\n return new Float32Array(\n row.embedding_insight.buffer,\n row.embedding_insight.byteOffset,\n row.embedding_insight.byteLength / 4\n );\n}\n\n/**\n * Cache insight-only embedding for a lesson in SQLite.\n * Uses UPDATE-only — the row must already exist.\n */\nexport function setCachedInsightEmbedding(\n repoRoot: string,\n lessonId: string,\n embedding: Float32Array | number[],\n hash: string\n): void {\n const database = openDb(repoRoot);\n\n const float32 = embedding instanceof Float32Array ? embedding : new Float32Array(embedding);\n const buffer = Buffer.from(float32.buffer, float32.byteOffset, float32.byteLength);\n\n database\n .prepare('UPDATE lessons SET embedding_insight = ?, content_hash_insight = ? WHERE id = ?')\n .run(buffer, hash, lessonId);\n}\n\n/**\n * Collect all cached embeddings from the database.\n * Used during index rebuild to preserve valid caches.\n * @param database - SQLite database instance\n * @returns Map of lesson ID to cached embedding data\n */\nexport function collectCachedEmbeddings(database: DatabaseType): Map<string, CachedEmbeddingData> {\n const cache = new Map<string, CachedEmbeddingData>();\n const rows = database\n .prepare('SELECT id, embedding, content_hash, embedding_insight, content_hash_insight FROM lessons WHERE embedding IS NOT NULL OR embedding_insight IS NOT NULL')\n .all() as Array<{ id: string; embedding: Buffer | null; content_hash: string | null; embedding_insight: Buffer | null; content_hash_insight: string | null }>;\n\n for (const row of rows) {\n if (row.embedding && row.content_hash) {\n cache.set(row.id, {\n embedding: row.embedding,\n contentHash: row.content_hash,\n embeddingInsight: row.embedding_insight,\n contentHashInsight: row.content_hash_insight,\n });\n } else if (row.embedding_insight && row.content_hash_insight) {\n // Only insight cache exists — still worth preserving\n cache.set(row.id, {\n embedding: row.embedding_insight, // placeholder, won't match hash\n contentHash: '',\n embeddingInsight: row.embedding_insight,\n contentHashInsight: row.content_hash_insight,\n });\n }\n }\n return cache;\n}\n","/**\n * SQLite index synchronization with JSONL source of truth.\n */\n\nimport { statSync } from 'node:fs';\nimport { join } from 'node:path';\nimport type { Database as DatabaseType } from 'better-sqlite3';\n\nimport type { MemoryItem } from '../../types.js';\nimport { LESSONS_PATH, readMemoryItems } from '../jsonl.js';\n\nimport type { SyncOptions } from './types.js';\nimport { openDb } from './connection.js';\nimport { collectCachedEmbeddings, contentHash } from './cache.js';\n\n/** SQL for inserting a lesson record */\nconst INSERT_LESSON_SQL = `\n INSERT INTO lessons (id, type, trigger, insight, evidence, severity, tags, source, context, supersedes, related, created, confirmed, deleted, retrieval_count, last_retrieved, embedding, content_hash, embedding_insight, content_hash_insight, invalidated_at, invalidation_reason, citation_file, citation_line, citation_commit, compaction_level, compacted_at, pattern_bad, pattern_good)\n VALUES (@id, @type, @trigger, @insight, @evidence, @severity, @tags, @source, @context, @supersedes, @related, @created, @confirmed, @deleted, @retrieval_count, @last_retrieved, @embedding, @content_hash, @embedding_insight, @content_hash_insight, @invalidated_at, @invalidation_reason, @citation_file, @citation_line, @citation_commit, @compaction_level, @compacted_at, @pattern_bad, @pattern_good)\n`;\n\n/**\n * Get the modification time of the JSONL file.\n * @param repoRoot - Absolute path to repository root\n * @returns Modification time in milliseconds or null if file doesn't exist\n */\nfunction getJsonlMtime(repoRoot: string): number | null {\n const jsonlPath = join(repoRoot, LESSONS_PATH);\n try {\n const stat = statSync(jsonlPath);\n return stat.mtimeMs;\n } catch {\n return null;\n }\n}\n\n/**\n * Get the last sync modification time from metadata.\n * @param database - SQLite database instance\n * @returns Last sync mtime or null if not set\n */\nfunction getLastSyncMtime(database: DatabaseType): number | null {\n const row = database\n .prepare('SELECT value FROM metadata WHERE key = ?')\n .get('last_sync_mtime') as { value: string } | undefined;\n return row ? parseFloat(row.value) : null;\n}\n\n/**\n * Set the last sync modification time in metadata.\n * @param database - SQLite database instance\n * @param mtime - Modification time to store\n */\nfunction setLastSyncMtime(database: DatabaseType, mtime: number): void {\n database\n .prepare('INSERT OR REPLACE INTO metadata (key, value) VALUES (?, ?)')\n .run('last_sync_mtime', mtime.toString());\n}\n\n/**\n * Rebuild the SQLite index from JSONL source of truth.\n * Preserves cached embeddings when item content hasn't changed.\n * @param repoRoot - Absolute path to repository root\n */\nexport async function rebuildIndex(repoRoot: string): Promise<void> {\n const database = openDb(repoRoot);\n\n const { items } = await readMemoryItems(repoRoot);\n const cachedEmbeddings = collectCachedEmbeddings(database);\n database.exec('DELETE FROM lessons');\n\n if (items.length === 0) {\n const mtime = getJsonlMtime(repoRoot);\n if (mtime !== null) {\n setLastSyncMtime(database, mtime);\n }\n return;\n }\n\n const insert = database.prepare(INSERT_LESSON_SQL);\n const insertMany = database.transaction((memoryItems: MemoryItem[]) => {\n for (const item of memoryItems) {\n const newHash = contentHash(item.trigger, item.insight);\n const insightHash = contentHash(item.insight, '');\n const cached = cachedEmbeddings.get(item.id);\n const hasValidCache = cached && cached.contentHash === newHash;\n const hasValidInsightCache = cached && cached.contentHashInsight === insightHash;\n\n insert.run({\n id: item.id,\n type: item.type,\n trigger: item.trigger,\n insight: item.insight,\n evidence: item.evidence ?? null,\n severity: item.severity ?? null,\n tags: item.tags.join(','),\n source: item.source,\n context: JSON.stringify(item.context),\n supersedes: JSON.stringify(item.supersedes),\n related: JSON.stringify(item.related),\n created: item.created,\n confirmed: item.confirmed ? 1 : 0,\n deleted: item.deleted ? 1 : 0,\n retrieval_count: item.retrievalCount ?? 0,\n last_retrieved: item.lastRetrieved ?? null,\n embedding: hasValidCache ? cached.embedding : null,\n content_hash: hasValidCache ? cached.contentHash : null,\n embedding_insight: hasValidInsightCache ? cached.embeddingInsight : null,\n content_hash_insight: hasValidInsightCache ? cached.contentHashInsight : null,\n invalidated_at: item.invalidatedAt ?? null,\n invalidation_reason: item.invalidationReason ?? null,\n citation_file: item.citation?.file ?? null,\n citation_line: item.citation?.line ?? null,\n citation_commit: item.citation?.commit ?? null,\n compaction_level: item.compactionLevel ?? 0,\n compacted_at: item.compactedAt ?? null,\n pattern_bad: item.pattern?.bad ?? null,\n pattern_good: item.pattern?.good ?? null,\n });\n }\n });\n\n insertMany(items);\n\n const mtime = getJsonlMtime(repoRoot);\n if (mtime !== null) {\n setLastSyncMtime(database, mtime);\n }\n}\n\n/**\n * Sync SQLite index if JSONL has changed.\n * @param repoRoot - Absolute path to repository root\n * @param options - Sync options\n * @returns true if sync was performed, false otherwise\n */\nexport async function syncIfNeeded(\n repoRoot: string,\n options: SyncOptions = {}\n): Promise<boolean> {\n const { force = false } = options;\n const jsonlMtime = getJsonlMtime(repoRoot);\n if (jsonlMtime === null && !force) {\n return false;\n }\n\n const database = openDb(repoRoot);\n\n const lastSyncMtime = getLastSyncMtime(database);\n const needsRebuild = force || lastSyncMtime === null || (jsonlMtime !== null && jsonlMtime > lastSyncMtime);\n\n if (needsRebuild) {\n await rebuildIndex(repoRoot);\n return true;\n }\n\n return false;\n}\n","/**\n * Hybrid search: BM25 normalization and result merging.\n *\n * Combines vector similarity (cosine) with FTS5 keyword matching (BM25)\n * into a single blended score for improved retrieval quality.\n */\n\nimport type { MemoryItem } from '../types.js';\nimport type { ScoredLesson } from './vector.js';\n\n/** Generic scored item for hybrid merge */\nexport interface GenericScoredItem<T> {\n item: T;\n score: number;\n}\n\n/** Keyword search result with normalized BM25 score */\nexport interface ScoredKeywordResult {\n lesson: MemoryItem;\n /** BM25 rank normalized to 0-1 */\n score: number;\n}\n\n/** Options for hybrid merge */\nexport interface HybridMergeOptions {\n vectorWeight?: number;\n textWeight?: number;\n limit?: number;\n /** Filter results below this blended score */\n minScore?: number;\n}\n\nexport const DEFAULT_VECTOR_WEIGHT = 0.7;\nexport const DEFAULT_TEXT_WEIGHT = 0.3;\nexport const CANDIDATE_MULTIPLIER = 4;\nexport const MIN_HYBRID_SCORE = 0.35;\n\n/**\n * Normalize FTS5 BM25 rank to a 0-1 score.\n *\n * FTS5 ranks are negative (lower = more relevant).\n * Uses: |rank| / (1 + |rank|) so that more negative ranks\n * (more relevant) produce higher scores, making keyword\n * matches meaningful in the hybrid blend.\n *\n * Examples: -10 -> ~0.909, -1 -> 0.5, 0 -> 0, NaN -> 0\n */\nexport function normalizeBm25Rank(rank: number): number {\n if (!Number.isFinite(rank)) return 0;\n const abs = Math.abs(rank);\n return abs / (1 + abs);\n}\n\n/**\n * Generic hybrid merge that works with any item type.\n * Requires an getId function to identify unique items for union.\n *\n * Algorithm:\n * 1. Normalize weights to sum to 1.0\n * 2. Union both result sets by item ID\n * 3. Blend: score = vecW * vectorScore + txtW * textScore (missing source = 0)\n * 4. Sort descending by blended score\n */\nexport function mergeHybridScores<T>(\n vectorResults: GenericScoredItem<T>[],\n keywordResults: GenericScoredItem<T>[],\n getId: (item: T) => string,\n options?: HybridMergeOptions\n): GenericScoredItem<T>[] {\n if (vectorResults.length === 0 && keywordResults.length === 0) return [];\n\n const rawVecW = options?.vectorWeight ?? DEFAULT_VECTOR_WEIGHT;\n const rawTxtW = options?.textWeight ?? DEFAULT_TEXT_WEIGHT;\n const total = rawVecW + rawTxtW;\n if (total <= 0) return [];\n const vecW = rawVecW / total;\n const txtW = rawTxtW / total;\n const limit = options?.limit;\n const minScore = options?.minScore;\n\n // Union by item ID\n const merged = new Map<string, { item: T; vecScore: number; txtScore: number }>();\n\n for (const v of vectorResults) {\n merged.set(getId(v.item), { item: v.item, vecScore: v.score, txtScore: 0 });\n }\n\n for (const k of keywordResults) {\n const id = getId(k.item);\n const existing = merged.get(id);\n if (existing) {\n existing.txtScore = k.score;\n } else {\n merged.set(id, { item: k.item, vecScore: 0, txtScore: k.score });\n }\n }\n\n // Blend and sort\n const results: GenericScoredItem<T>[] = [];\n for (const entry of merged.values()) {\n results.push({\n item: entry.item,\n score: vecW * entry.vecScore + txtW * entry.txtScore,\n });\n }\n\n results.sort((a, b) => b.score - a.score);\n\n const filtered = minScore !== undefined ? results.filter((r) => r.score >= minScore) : results;\n return limit !== undefined ? filtered.slice(0, limit) : filtered;\n}\n\n/**\n * Merge vector and keyword search results into a single ranked list.\n * Delegates to the generic mergeHybridScores.\n */\nexport function mergeHybridResults(\n vectorResults: ScoredLesson[],\n keywordResults: ScoredKeywordResult[],\n options?: HybridMergeOptions\n): ScoredLesson[] {\n const genericVec = vectorResults.map((v) => ({ item: v.lesson, score: v.score }));\n const genericKw = keywordResults.map((k) => ({ item: k.lesson, score: k.score }));\n const merged = mergeHybridScores(genericVec, genericKw, (item) => item.id, options);\n return merged.map((m) => ({ lesson: m.item, score: m.score }));\n}\n","/**\n * SQLite search operations using FTS5 full-text search.\n */\n\nimport { MemoryItemSchema } from '../../types.js';\nimport type { MemoryItem, MemoryItemType } from '../../types.js';\nimport { normalizeBm25Rank, type ScoredKeywordResult } from '../../search/hybrid.js';\n\nimport type { MemoryItemRow, RetrievalStat } from './types.js';\nimport { openDb } from './connection.js';\n\n/**\n * Convert a database row to a MemoryItem object.\n * @param row - Database row\n * @returns MemoryItem object\n */\nfunction safeJsonParse<T>(value: string, fallback: T): T {\n try {\n return JSON.parse(value) as T;\n } catch {\n return fallback;\n }\n}\n\nfunction rowToMemoryItem(row: MemoryItemRow): MemoryItem | null {\n const item = {\n id: row.id,\n type: row.type,\n trigger: row.trigger,\n insight: row.insight,\n tags: row.tags ? row.tags.split(',').filter(Boolean) : [],\n source: row.source,\n context: safeJsonParse(row.context, {}),\n supersedes: safeJsonParse(row.supersedes, []),\n related: safeJsonParse(row.related, []),\n created: row.created,\n confirmed: row.confirmed === 1,\n } as Record<string, unknown>;\n\n if (row.evidence !== null) item.evidence = row.evidence;\n if (row.severity !== null) item.severity = row.severity;\n if (row.deleted === 1) item.deleted = true;\n if (row.retrieval_count > 0) item.retrievalCount = row.retrieval_count;\n if (row.invalidated_at !== null) item.invalidatedAt = row.invalidated_at;\n if (row.invalidation_reason !== null) item.invalidationReason = row.invalidation_reason;\n if (row.citation_file !== null) {\n item.citation = {\n file: row.citation_file,\n ...(row.citation_line !== null && { line: row.citation_line }),\n ...(row.citation_commit !== null && { commit: row.citation_commit }),\n };\n }\n if (row.compaction_level !== null && row.compaction_level !== 0) {\n item.compactionLevel = row.compaction_level;\n }\n if (row.compacted_at !== null) item.compactedAt = row.compacted_at;\n if (row.last_retrieved !== null) item.lastRetrieved = row.last_retrieved;\n if (row.pattern_bad !== null && row.pattern_good !== null) {\n item.pattern = { bad: row.pattern_bad, good: row.pattern_good };\n }\n\n const result = MemoryItemSchema.safeParse(item);\n if (!result.success) return null;\n return result.data;\n}\n\n\n/**\n * Read all non-invalidated memory items from the SQLite cache.\n * Use this instead of readMemoryItems() when the SQLite index is\n * already synced, to avoid a redundant JSONL parse + Zod validation.\n *\n * @param repoRoot - Absolute path to repository root\n * @returns Array of MemoryItem objects\n */\nexport function readAllFromSqlite(repoRoot: string): MemoryItem[] {\n const database = openDb(repoRoot);\n\n const rows = database\n .prepare('SELECT * FROM lessons WHERE invalidated_at IS NULL')\n .all() as MemoryItemRow[];\n\n return rows.map(rowToMemoryItem).filter((x): x is MemoryItem => x !== null);\n}\n\n/** FTS5 operator tokens to remove */\nconst FTS_OPERATORS = new Set(['AND', 'OR', 'NOT', 'NEAR']);\n\n/**\n * Sanitize a query string for safe use with FTS5 MATCH.\n * Strips special FTS5 syntax characters and operators.\n * @param query - Raw user query\n * @returns Sanitized query safe for FTS5\n */\nexport function sanitizeFtsQuery(query: string): string {\n // Strip FTS5 special chars: \" * ^ + - ( ) : { }\n const stripped = query.replace(/[\"*^+\\-():{}]/g, '');\n // Tokenize by whitespace, remove FTS operators, filter empty\n const tokens = stripped\n .split(/\\s+/)\n .filter((t) => t.length > 0 && !FTS_OPERATORS.has(t));\n return tokens.join(' ');\n}\n\n/**\n * Increment retrieval count for lessons.\n * @param repoRoot - Absolute path to repository root\n * @param lessonIds - IDs of retrieved lessons\n */\nexport function incrementRetrievalCount(repoRoot: string, lessonIds: string[]): void {\n if (lessonIds.length === 0) return;\n\n const database = openDb(repoRoot);\n\n const now = new Date().toISOString();\n\n const update = database.prepare(`\n UPDATE lessons\n SET retrieval_count = retrieval_count + 1,\n last_retrieved = ?\n WHERE id = ?\n `);\n\n const updateMany = database.transaction((ids: string[]) => {\n for (const id of ids) {\n update.run(now, id);\n }\n });\n\n updateMany(lessonIds);\n}\n\n/**\n * Row type for scored keyword query (includes FTS5 rank).\n */\ninterface ScoredRow extends MemoryItemRow {\n rank: number;\n}\n\n/**\n * Shared FTS5 query execution. Builds the SQL with optional rank column\n * and ORDER BY, then runs the query with sanitization and error handling.\n */\nfunction executeFtsQuery(\n repoRoot: string,\n query: string,\n limit: number,\n options: { includeRank: boolean; typeFilter?: MemoryItemType }\n): ScoredRow[] {\n const database = openDb(repoRoot);\n\n const sanitized = sanitizeFtsQuery(query);\n if (sanitized === '') return [];\n\n const selectCols = options.includeRank ? 'l.*, fts.rank' : 'l.*';\n const orderClause = options.includeRank ? 'ORDER BY fts.rank' : '';\n const typeClause = options.typeFilter ? 'AND l.type = ?' : '';\n\n const sql = `\n SELECT ${selectCols}\n FROM lessons l\n JOIN lessons_fts fts ON l.rowid = fts.rowid\n WHERE lessons_fts MATCH ?\n AND l.invalidated_at IS NULL\n ${typeClause}\n ${orderClause}\n LIMIT ?\n `;\n\n const params = options.typeFilter\n ? [sanitized, options.typeFilter, limit]\n : [sanitized, limit];\n\n try {\n return database.prepare(sql).all(...params) as ScoredRow[];\n } catch (err) {\n const message = err instanceof Error ? err.message : 'Unknown FTS5 error';\n console.error(`[compound-agent] search error: ${message}`);\n return [];\n }\n}\n\n/**\n * Search lessons using FTS5 full-text search.\n * @param repoRoot - Absolute path to repository root\n * @param query - FTS5 query string\n * @param limit - Maximum number of results\n * @param typeFilter - Optional memory item type to filter by\n * @returns Matching lessons\n */\nexport async function searchKeyword(\n repoRoot: string,\n query: string,\n limit: number,\n typeFilter?: MemoryItemType\n): Promise<MemoryItem[]> {\n const rows = executeFtsQuery(repoRoot, query, limit, { includeRank: false, typeFilter });\n return rows.map(rowToMemoryItem).filter((x): x is MemoryItem => x !== null);\n}\n\n/**\n * Search lessons using FTS5 with normalized BM25 scores.\n *\n * @param repoRoot - Absolute path to repository root\n * @param query - FTS5 query string\n * @param limit - Maximum number of results\n * @param typeFilter - Optional memory item type to filter by\n * @returns Scored keyword results with BM25 scores normalized to 0-1\n */\nexport async function searchKeywordScored(\n repoRoot: string,\n query: string,\n limit: number,\n typeFilter?: MemoryItemType\n): Promise<ScoredKeywordResult[]> {\n const rows = executeFtsQuery(repoRoot, query, limit, { includeRank: true, typeFilter });\n const results: ScoredKeywordResult[] = [];\n for (const row of rows) {\n const lesson = rowToMemoryItem(row);\n if (lesson) {\n results.push({ lesson, score: normalizeBm25Rank(row.rank) });\n }\n }\n return results;\n}\n\n/**\n * Get retrieval statistics for all lessons.\n * @param repoRoot - Absolute path to repository root\n * @returns Array of retrieval statistics\n */\nexport function getRetrievalStats(repoRoot: string): RetrievalStat[] {\n const database = openDb(repoRoot);\n\n const rows = database\n .prepare('SELECT id, retrieval_count, last_retrieved FROM lessons')\n .all() as Array<{ id: string; retrieval_count: number; last_retrieved: string | null }>;\n\n return rows.map((row) => ({\n id: row.id,\n count: row.retrieval_count,\n lastRetrieved: row.last_retrieved,\n }));\n}\n","/**\n * SQLite storage module - rebuildable index with FTS5 full-text search.\n *\n * SQLite is required. If better-sqlite3 fails to load, a clear error\n * is thrown.\n */\n\n// Types\nexport type { DbOptions, RetrievalStat, SyncOptions } from './types.js';\n\n// Connection\nexport { closeDb, DB_PATH, openDb } from './connection.js';\n\n// Cache\nexport {\n contentHash,\n getCachedEmbedding,\n getCachedEmbeddingsBulk,\n getCachedInsightEmbedding,\n setCachedEmbedding,\n setCachedInsightEmbedding,\n} from './cache.js';\nexport type { CachedEmbeddingEntry } from './cache.js';\n\n// Sync\nexport { rebuildIndex, syncIfNeeded } from './sync.js';\n\n// Availability\nexport { ensureSqliteAvailable, resetSqliteAvailability } from './availability.js';\n\n// Search\nexport {\n getRetrievalStats,\n incrementRetrievalCount,\n readAllFromSqlite,\n searchKeyword,\n searchKeywordScored,\n} from './search.js';\n","/**\n * Tombstone removal and JSONL rewrite\n *\n * Handles:\n * - Removing tombstones through JSONL rewrite\n * - Tracking compaction thresholds\n */\n\nimport { mkdir, readFile, rename, writeFile } from 'node:fs/promises';\nimport { dirname, join } from 'node:path';\n\nimport { MemoryItemSchema } from '../types.js';\nimport type { MemoryItem } from '../types.js';\n\nimport { LESSONS_PATH } from './jsonl.js';\n\n/** Number of tombstones that triggers automatic compaction */\nexport const TOMBSTONE_THRESHOLD = 100;\n\n/**\n * Result of a compaction operation\n */\nexport interface CompactResult {\n /** Number of lessons moved to archive (always 0, kept for API compat) */\n archived: number;\n /** Number of tombstones removed */\n tombstonesRemoved: number;\n /** Number of lessons remaining in index.jsonl */\n lessonsRemaining: number;\n /** Number of records dropped due to invalid schema */\n droppedInvalid: number;\n}\n\n/**\n * Parse raw JSONL lines from the lessons file.\n * Returns all lines (including invalid ones) as parsed objects or null.\n */\nasync function parseRawJsonlLines(\n repoRoot: string\n): Promise<Array<{ line: string; parsed: Record<string, unknown> | null }>> {\n const filePath = join(repoRoot, LESSONS_PATH);\n let content: string;\n try {\n content = await readFile(filePath, 'utf-8');\n } catch {\n return [];\n }\n\n const results: Array<{ line: string; parsed: Record<string, unknown> | null }> = [];\n for (const line of content.split('\\n')) {\n const trimmed = line.trim();\n if (!trimmed) continue;\n\n try {\n const parsed = JSON.parse(trimmed) as Record<string, unknown>;\n results.push({ line: trimmed, parsed });\n } catch {\n results.push({ line: trimmed, parsed: null });\n }\n }\n return results;\n}\n\n/**\n * Count the number of tombstones (deleted: true records) in the JSONL file.\n */\nexport async function countTombstones(repoRoot: string): Promise<number> {\n const lines = await parseRawJsonlLines(repoRoot);\n let count = 0;\n for (const { parsed } of lines) {\n if (parsed && parsed['deleted'] === true) {\n count++;\n }\n }\n return count;\n}\n\n/**\n * Check if compaction is needed based on tombstone count.\n */\nexport async function needsCompaction(repoRoot: string): Promise<boolean> {\n const count = await countTombstones(repoRoot);\n return count >= TOMBSTONE_THRESHOLD;\n}\n\n/**\n * Run compaction: remove tombstones and invalid records, rewrite JSONL.\n *\n * Reads the JSONL file exactly once, deduplicates in-memory,\n * then atomically replaces the main file.\n */\nexport async function compact(repoRoot: string): Promise<CompactResult> {\n const filePath = join(repoRoot, LESSONS_PATH);\n\n // 1. Read file ONCE\n let content: string;\n try {\n content = await readFile(filePath, 'utf-8');\n } catch {\n return { archived: 0, tombstonesRemoved: 0, lessonsRemaining: 0, droppedInvalid: 0 };\n }\n\n // 2. Parse all records in-memory with last-write-wins dedup\n const lessonMap = new Map<string, MemoryItem>();\n let tombstoneCount = 0;\n let droppedCount = 0;\n\n for (const rawLine of content.split('\\n')) {\n const trimmed = rawLine.trim();\n if (!trimmed) continue;\n\n let parsed: Record<string, unknown>;\n try {\n parsed = JSON.parse(trimmed) as Record<string, unknown>;\n } catch {\n continue;\n }\n\n if (parsed['deleted'] === true) {\n lessonMap.delete(parsed['id'] as string);\n tombstoneCount++;\n } else {\n const result = MemoryItemSchema.safeParse(parsed);\n if (result.success) {\n lessonMap.set(result.data.id, result.data);\n } else {\n droppedCount++;\n }\n }\n }\n\n // 3. Collect all remaining lessons\n const toKeep = [...lessonMap.values()];\n\n // 4. Atomic write of main JSONL with only kept lessons\n await mkdir(dirname(filePath), { recursive: true });\n const tempPath = filePath + '.tmp';\n const lines = toKeep.map((lesson) => JSON.stringify(lesson) + '\\n');\n await writeFile(tempPath, lines.join(''), 'utf-8');\n await rename(tempPath, filePath);\n\n return {\n archived: 0,\n tombstonesRemoved: tombstoneCount,\n lessonsRemaining: toKeep.length,\n droppedInvalid: droppedCount,\n };\n}\n","/**\n * Storage module - JSONL + SQLite storage layer\n *\n * JSONL is the source of truth (git-tracked).\n * SQLite is a rebuildable index with FTS5 and embedding cache.\n */\n\n// JSONL storage (source of truth)\nexport { appendLesson, appendMemoryItem, LESSONS_PATH, readLessons, readMemoryItems } from './jsonl.js';\nexport type { ParseError, ReadLessonsOptions, ReadLessonsResult, ReadMemoryItemsResult } from './jsonl.js';\n\n// SQLite storage (rebuildable index)\nexport {\n closeDb,\n contentHash,\n DB_PATH,\n ensureSqliteAvailable,\n getCachedEmbedding,\n getCachedEmbeddingsBulk,\n getCachedInsightEmbedding,\n getRetrievalStats,\n incrementRetrievalCount,\n openDb,\n readAllFromSqlite,\n rebuildIndex,\n resetSqliteAvailability,\n searchKeyword,\n searchKeywordScored,\n setCachedEmbedding,\n setCachedInsightEmbedding,\n syncIfNeeded,\n} from './sqlite/index.js';\nexport type { CachedEmbeddingEntry, DbOptions, RetrievalStat, SyncOptions } from './sqlite/index.js';\n\n// Compaction (tombstone removal)\nexport {\n compact,\n countTombstones,\n needsCompaction,\n TOMBSTONE_THRESHOLD,\n} from './compact.js';\nexport type { CompactResult } from './compact.js';\n","/**\n * Embedding model resolution using node-llama-cpp's built-in resolver.\n *\n * Uses resolveModelFile for automatic download and caching.\n * Model is stored in ~/.node-llama-cpp/models/ by default.\n */\n\nimport { existsSync } from 'node:fs';\nimport { homedir } from 'node:os';\nimport { join } from 'node:path';\nimport { getLlama, LlamaLogLevel, resolveModelFile } from 'node-llama-cpp';\n\n/**\n * HuggingFace URI for EmbeddingGemma-300M (Q4_0 quantization).\n *\n * - Size: ~278MB\n * - Dimensions: 768 (default), supports MRL truncation to 512/256/128\n * - Context: 2048 tokens\n */\nexport const MODEL_URI = 'hf:ggml-org/embeddinggemma-300M-qat-q4_0-GGUF/embeddinggemma-300M-qat-Q4_0.gguf';\n\n/**\n * Expected model filename after download.\n * node-llama-cpp uses format: hf_{org}_{filename}\n */\nexport const MODEL_FILENAME = 'hf_ggml-org_embeddinggemma-300M-qat-Q4_0.gguf';\n\n/** Default model directory used by node-llama-cpp */\nconst DEFAULT_MODEL_DIR = join(homedir(), '.node-llama-cpp', 'models');\n\n/** Cached usability result (per-process) */\nlet cachedUsability: UsabilityResult | null = null;\n\n/**\n * Check if the embedding model is available locally (fs existence only).\n *\n * Use this for cheap pre-flight checks (e.g. spawnBackgroundEmbed) where\n * failure is handled gracefully. Use {@link isModelUsable} when you need\n * runtime verification that the model can actually initialize.\n *\n * @returns true if model file exists\n */\nexport function isModelAvailable(): boolean {\n return existsSync(join(DEFAULT_MODEL_DIR, MODEL_FILENAME));\n}\n\n/**\n * Result of checking if the model is usable at runtime.\n *\n * A discriminated union where `usable` determines which fields are present:\n * - usable=true: Model can initialize and create embedding context\n * - usable=false: Model cannot be used, with reason and actionable fix\n */\nexport type UsabilityResult =\n | { usable: true; reason?: undefined; action?: undefined }\n | { usable: false; reason: string; action: string };\n\n/**\n * Check if the embedding model is usable at runtime.\n *\n * Goes beyond file existence to verify the model can actually initialize:\n * 1. Checks if model file exists (fast fail)\n * 2. Attempts to load llama runtime\n * 3. Attempts to load model\n * 4. Attempts to create embedding context\n * 5. Cleans up all resources after check\n *\n * WARNING: This function allocates ~150MB of native C++ memory for the probe.\n * NEVER call at module top-level in test files. When dispose() SIGABRTs in\n * vitest workers, that memory is permanently leaked. For test skip-gating,\n * use isModelAvailable() instead (zero native allocation). Reserve this\n * function for production code paths where runtime verification is needed.\n *\n * @returns UsabilityResult with usable status and actionable error if failed\n */\nexport async function isModelUsable(): Promise<UsabilityResult> {\n // Return cached result if available (avoids double initialization)\n if (cachedUsability !== null) {\n return cachedUsability;\n }\n\n // Fast fail if model file doesn't exist\n if (!isModelAvailable()) {\n cachedUsability = {\n usable: false,\n reason: 'Embedding model file not found',\n action: 'Run: npx ca download-model',\n };\n return cachedUsability;\n }\n\n // Attempt runtime initialization\n let llama = null;\n let model = null;\n let context = null;\n\n try {\n const modelPath = join(DEFAULT_MODEL_DIR, MODEL_FILENAME);\n\n // Step 1: Get llama runtime\n llama = await getLlama({\n build: 'never', // Never compile from source in a deployed tool\n progressLogs: false, // Suppress prebuilt binary fallback warnings\n logLevel: LlamaLogLevel.error, // Only surface real errors from C++ backend\n // Set NODE_LLAMA_CPP_DEBUG=true to re-enable all output for troubleshooting\n });\n\n // Step 2: Load model\n model = await llama.loadModel({ modelPath });\n\n // Step 3: Create embedding context\n context = await model.createEmbeddingContext();\n\n // Success - cache and return\n cachedUsability = { usable: true };\n return cachedUsability;\n } catch (err) {\n const message = err instanceof Error ? err.message : 'Unknown error';\n cachedUsability = {\n usable: false,\n reason: `Embedding model runtime initialization failed: ${message}`,\n action: 'Check system compatibility or reinstall: npx ca download-model',\n };\n return cachedUsability;\n } finally {\n // Clean up resources in reverse order\n if (context) {\n try { await context.dispose(); } catch { /* ignore cleanup errors */ }\n }\n if (model) {\n try { await model.dispose(); } catch { /* ignore cleanup errors */ }\n }\n if (llama) {\n try { await llama.dispose(); } catch { /* ignore cleanup errors */ }\n }\n }\n}\n\n/**\n * Clear the cached usability result.\n *\n * Primarily for testing purposes. Clears the cached result so the next\n * call to isModelUsable() will perform a fresh check.\n */\nexport function clearUsabilityCache(): void {\n cachedUsability = null;\n}\n\n/**\n * Resolve the embedding model path, downloading if necessary.\n *\n * Uses node-llama-cpp's resolveModelFile for automatic download with progress.\n *\n * @param options - Optional configuration\n * @param options.cli - Show download progress in console (default: true)\n * @returns Path to the resolved model file\n *\n * @example\n * ```typescript\n * const modelPath = await resolveModel();\n * const llama = await getLlama({ build: 'never', logLevel: LlamaLogLevel.error });\n * const model = await llama.loadModel({ modelPath });\n * ```\n */\nexport async function resolveModel(options: { cli?: boolean } = {}): Promise<string> {\n const { cli = true } = options;\n return resolveModelFile(MODEL_URI, { cli });\n}\n","/**\n * Text embedding via node-llama-cpp with EmbeddingGemma model\n *\n * **Resource lifecycle:**\n * - Model is loaded lazily on first embedding call (~150MB in memory)\n * - Once loaded, the model remains in memory until `unloadEmbedding()` is called\n * - Loading is slow (~1-3s); keeping loaded improves subsequent call performance\n *\n * **Memory usage:**\n * - Embedding model: ~150MB RAM when loaded\n * - Embeddings themselves: ~3KB per embedding (768 dimensions x 4 bytes)\n *\n * @see {@link unloadEmbedding} for releasing memory\n * @see {@link getEmbedding} for the lazy-loading mechanism\n */\n\nimport type { Llama, LlamaModel } from 'node-llama-cpp';\nimport { getLlama, LlamaEmbeddingContext, LlamaLogLevel } from 'node-llama-cpp';\n\nimport { isModelAvailable, resolveModel } from './model.js';\n\n/** Singleton embedding context */\nlet embeddingContext: LlamaEmbeddingContext | null = null;\n/** Pending initialization promise (prevents concurrent duplicate loads) */\nlet pendingInit: Promise<LlamaEmbeddingContext> | null = null;\n/** Native resource refs for proper cleanup */\nlet llamaInstance: Llama | null = null;\nlet modelInstance: LlamaModel | null = null;\n\n/**\n * Get the LlamaEmbeddingContext instance for generating embeddings.\n *\n * **Lazy loading behavior:**\n * - First call loads the embedding model (~150MB) into memory\n * - Loading takes ~1-3 seconds depending on hardware\n * - Subsequent calls return the cached instance immediately\n * - Downloads model automatically if not present\n *\n * **Resource lifecycle:**\n * - Once loaded, model stays in memory until `unloadEmbedding()` is called\n * - For CLI commands: typically load once, use, then unload on exit\n * - For long-running processes: keep loaded for performance\n *\n * @returns The singleton embedding context\n * @throws Error if model download fails\n *\n * @example\n * ```typescript\n * // Direct usage (prefer embedText for simple cases)\n * const ctx = await getEmbedding();\n * const result = await ctx.getEmbeddingFor('some text');\n *\n * // Ensure cleanup\n * process.on('exit', () => unloadEmbedding());\n * ```\n *\n * @see {@link embedText} for simpler text-to-vector conversion\n * @see {@link unloadEmbedding} for releasing memory\n */\nexport async function getEmbedding(): Promise<LlamaEmbeddingContext> {\n if (embeddingContext) return embeddingContext;\n if (pendingInit) return pendingInit;\n\n pendingInit = (async () => {\n try {\n const modelPath = await resolveModel({ cli: true });\n llamaInstance = await getLlama({\n build: 'never', // Never compile from source in a deployed tool\n progressLogs: false, // Suppress prebuilt binary fallback warnings\n logLevel: LlamaLogLevel.error, // Only surface real errors from C++ backend\n // Set NODE_LLAMA_CPP_DEBUG=true to re-enable all output for troubleshooting\n });\n modelInstance = await llamaInstance.loadModel({ modelPath });\n embeddingContext = await modelInstance.createEmbeddingContext();\n return embeddingContext;\n } catch (err) {\n pendingInit = null; // Allow retry on failure\n throw err;\n }\n })();\n\n return pendingInit;\n}\n\n/**\n * Await disposal of all loaded embedding resources.\n *\n * This is intended for CLI shutdown paths that must wait for the native addon\n * to release worker threads before allowing the process to exit.\n */\nexport async function unloadEmbeddingResources(): Promise<void> {\n const pending = pendingInit;\n if (pending) {\n try {\n await pending;\n } catch {\n // Ignore initialization failures; dispose any partially created refs below.\n }\n }\n\n const context = embeddingContext;\n const model = modelInstance;\n const llama = llamaInstance;\n\n embeddingContext = null;\n modelInstance = null;\n llamaInstance = null;\n pendingInit = null;\n\n const disposals: Promise<unknown>[] = [];\n\n if (context) {\n disposals.push(context.dispose());\n }\n if (model) {\n disposals.push(model.dispose());\n }\n if (llama) {\n disposals.push(llama.dispose());\n }\n\n if (disposals.length > 0) {\n await Promise.allSettled(disposals);\n }\n}\n\n/**\n * Unload the embedding context to free memory (~150MB).\n *\n * **Resource lifecycle:**\n * - Disposes the underlying LlamaEmbeddingContext\n * - Releases ~150MB of RAM used by the model\n * - After unloading, subsequent embedding calls will reload the model\n *\n * **When to call:**\n * - At the end of CLI commands to ensure clean process exit\n * - In memory-constrained environments after batch processing\n * - Before process exit in graceful shutdown handlers\n * - When switching to a different model (if supported in future)\n *\n * **Best practices:**\n * - For single-operation scripts: call before exit\n * - For daemon/server processes: call in shutdown handler\n * - Not needed between embedding calls in the same process\n *\n * @example\n * ```typescript\n * // CLI command pattern\n * try {\n * const embedding = await embedText('some text');\n * // ... use embedding\n * } finally {\n * unloadEmbedding();\n * closeDb();\n * }\n *\n * // Graceful shutdown pattern\n * process.on('SIGTERM', () => {\n * unloadEmbedding();\n * closeDb();\n * process.exit(0);\n * });\n * ```\n *\n * @see {@link getEmbedding} for loading the model\n * @see {@link closeDb} for database cleanup (often used together)\n */\nexport function unloadEmbedding(): void {\n void unloadEmbeddingResources();\n}\n\n/**\n * Run a callback with embedding resources, guaranteeing cleanup.\n *\n * The model loads lazily on the first embedText/embedTexts call inside\n * the callback (via the existing singleton). After the callback completes\n * or throws, all native resources (~150MB) are disposed.\n *\n * Use this instead of manually pairing embedText with unloadEmbeddingResources.\n */\nexport async function withEmbedding<T>(fn: () => Promise<T>): Promise<T> {\n try {\n return await fn();\n } finally {\n await unloadEmbeddingResources();\n }\n}\n\n/**\n * Embed a single text string into a vector.\n *\n * **Lazy loading:** First call loads the embedding model (~150MB, ~1-3s).\n * Subsequent calls use the cached model and complete in milliseconds.\n *\n * @param text - The text to embed\n * @returns A 768-dimensional Float32Array vector\n * @throws Error if model download fails\n *\n * @example\n * ```typescript\n * const vector = await embedText('TypeScript error handling');\n * console.log(vector.length); // 768\n *\n * // Remember to clean up when done\n * unloadEmbedding();\n * ```\n *\n * @see {@link embedTexts} for batch embedding\n * @see {@link unloadEmbedding} for releasing memory\n */\nexport async function embedText(text: string): Promise<Float32Array> {\n const ctx = await getEmbedding();\n const result = await ctx.getEmbeddingFor(text);\n return new Float32Array(result.vector);\n}\n\n/**\n * Embed multiple texts into vectors.\n *\n * **Lazy loading:** First call loads the embedding model (~150MB, ~1-3s).\n * Subsequent calls use the cached model.\n *\n * **Note:** Texts are embedded sequentially (node-llama-cpp uses a mutex lock).\n * The only advantage over a manual loop is shared model initialization.\n *\n * @param texts - Array of texts to embed\n * @returns Array of 768-dimensional vectors, same order as input\n * @throws Error if model download fails\n *\n * @example\n * ```typescript\n * const texts = ['first text', 'second text'];\n * const vectors = await embedTexts(texts);\n * console.log(vectors.length); // 2\n * console.log(vectors[0].length); // 768\n *\n * // Remember to clean up when done\n * unloadEmbedding();\n * ```\n *\n * @see {@link embedText} for single text embedding\n * @see {@link unloadEmbedding} for releasing memory\n */\nexport async function embedTexts(texts: string[]): Promise<Float32Array[]> {\n if (texts.length === 0) return [];\n\n const ctx = await getEmbedding();\n const results: Float32Array[] = [];\n\n for (const text of texts) {\n const result = await ctx.getEmbeddingFor(text);\n results.push(new Float32Array(result.vector));\n }\n\n return results;\n}\n\n// Re-export isModelAvailable for test utilities\nexport { isModelAvailable };\n","/**\n * Embeddings module - Text embedding via EmbeddingGemma\n *\n * Provides text embedding for semantic search.\n * Model is downloaded automatically on first use (~150MB).\n */\n\n// Embedding functions\nexport { embedText, embedTexts, getEmbedding, isModelAvailable, unloadEmbedding, unloadEmbeddingResources, withEmbedding } from './nomic.js';\n\n// Model resolution\nexport { clearUsabilityCache, isModelUsable, MODEL_FILENAME, MODEL_URI, resolveModel } from './model.js';\nexport type { UsabilityResult } from './model.js';\n","/**\n * Clustering module for grouping similar memory items.\n *\n * Uses single-linkage agglomerative clustering with cosine similarity.\n */\n\nimport { cosineSimilarity } from '../memory/search/index.js';\nimport type { MemoryItem } from '../memory/index.js';\nimport type { ClusterResult } from './types.js';\n\n/** Default similarity threshold for clustering */\nconst DEFAULT_THRESHOLD = 0.75;\n\n/**\n * Build a pairwise cosine similarity matrix from embedding vectors.\n *\n * @param embeddings - Array of embedding vectors\n * @returns NxN similarity matrix\n */\nexport function buildSimilarityMatrix(embeddings: ArrayLike<number>[]): number[][] {\n const n = embeddings.length;\n const matrix: number[][] = Array.from({ length: n }, () => new Array<number>(n).fill(0));\n\n for (let i = 0; i < n; i++) {\n matrix[i]![i] = 1.0;\n for (let j = i + 1; j < n; j++) {\n const sim = cosineSimilarity(embeddings[i]!, embeddings[j]!);\n matrix[i]![j] = sim;\n matrix[j]![i] = sim;\n }\n }\n\n return matrix;\n}\n\n/**\n * Cluster memory items by embedding similarity using single-linkage\n * agglomerative clustering.\n *\n * @param items - Memory items to cluster\n * @param embeddings - Embedding vectors (same order as items)\n * @param threshold - Minimum similarity to merge clusters (default: 0.75)\n * @returns Clusters of similar items and noise (unclustered items)\n */\nexport function clusterBySimilarity(\n items: MemoryItem[],\n embeddings: ArrayLike<number>[],\n threshold: number = DEFAULT_THRESHOLD\n): ClusterResult {\n const n = items.length;\n if (n === 0) return { clusters: [], noise: [] };\n\n const matrix = buildSimilarityMatrix(embeddings);\n\n // Union-Find for single-linkage clustering\n const parent = Array.from({ length: n }, (_, i) => i);\n\n function find(x: number): number {\n while (parent[x] !== x) {\n parent[x] = parent[parent[x]!]!; // path compression\n x = parent[x]!;\n }\n return x;\n }\n\n function union(a: number, b: number): void {\n const rootA = find(a);\n const rootB = find(b);\n if (rootA !== rootB) parent[rootA] = rootB;\n }\n\n // Merge pairs above threshold\n for (let i = 0; i < n; i++) {\n for (let j = i + 1; j < n; j++) {\n if (matrix[i]![j]! >= threshold) {\n union(i, j);\n }\n }\n }\n\n // Group items by their root\n const groups = new Map<number, MemoryItem[]>();\n for (let i = 0; i < n; i++) {\n const root = find(i);\n let group = groups.get(root);\n if (!group) {\n group = [];\n groups.set(root, group);\n }\n group.push(items[i]!);\n }\n\n const clusters: MemoryItem[][] = [];\n const noise: MemoryItem[] = [];\n for (const group of groups.values()) {\n if (group.length === 1) {\n noise.push(group[0]!);\n } else {\n clusters.push(group);\n }\n }\n return { clusters, noise };\n}\n","/**\n * Types for the compounding module.\n *\n * CctPattern represents a cross-cutting pattern synthesized\n * from multiple similar lessons.\n */\n\nimport { createHash } from 'node:crypto';\nimport { z } from 'zod';\n\nimport type { MemoryItem } from '../memory/index.js';\n\n/** Relative path to CCT patterns file from repo root */\nexport const CCT_PATTERNS_PATH = '.claude/lessons/cct-patterns.jsonl';\n\n/** Schema for a cross-cutting pattern */\nexport const CctPatternSchema = z.object({\n id: z.string().regex(/^CCT-[a-f0-9]{8}$/),\n name: z.string().min(1),\n description: z.string().min(1),\n frequency: z.number().int().positive(),\n testable: z.boolean(),\n testApproach: z.string().optional(),\n sourceIds: z.array(z.string()).min(1),\n created: z.string(), // ISO8601\n});\n\n/** Inferred type from CctPatternSchema */\nexport type CctPattern = z.infer<typeof CctPatternSchema>;\n\n/** Result from clustering operation */\nexport interface ClusterResult {\n /** Groups of similar items */\n clusters: MemoryItem[][];\n /** Items that didn't fit any cluster */\n noise: MemoryItem[];\n}\n\n/**\n * Generate a CCT pattern ID from a cluster ID string.\n * Format: \"CCT-\" + first 8 hex chars of SHA-256 hash.\n */\nexport function generateCctId(input: string): string {\n const hash = createHash('sha256').update(input).digest('hex');\n return `CCT-${hash.slice(0, 8)}`;\n}\n","/**\n * I/O module for CctPattern persistence.\n *\n * Append-only JSONL storage, following the same pattern as\n * src/memory/storage/jsonl.ts.\n */\n\nimport { appendFile, mkdir, readFile } from 'node:fs/promises';\nimport { dirname, join } from 'node:path';\n\nimport { CCT_PATTERNS_PATH, CctPatternSchema, type CctPattern } from './types.js';\n\n/**\n * Read all CCT patterns from the JSONL file.\n *\n * @param repoRoot - Repository root directory\n * @returns Array of CctPattern objects\n */\nexport async function readCctPatterns(repoRoot: string): Promise<CctPattern[]> {\n const filePath = join(repoRoot, CCT_PATTERNS_PATH);\n\n let content: string;\n try {\n content = await readFile(filePath, 'utf-8');\n } catch (err) {\n if ((err as NodeJS.ErrnoException).code === 'ENOENT') {\n return [];\n }\n throw err;\n }\n\n const patterns: CctPattern[] = [];\n const lines = content.split('\\n');\n for (const line of lines) {\n const trimmed = line.trim();\n if (!trimmed) continue;\n\n let parsed: unknown;\n try {\n parsed = JSON.parse(trimmed);\n } catch {\n continue; // Skip malformed JSONL lines\n }\n const result = CctPatternSchema.safeParse(parsed);\n if (result.success) {\n patterns.push(result.data);\n }\n }\n\n return patterns;\n}\n\n/**\n * Append CCT patterns to the JSONL file (append-only).\n *\n * @param repoRoot - Repository root directory\n * @param patterns - Patterns to append\n */\nexport async function writeCctPatterns(repoRoot: string, patterns: CctPattern[]): Promise<void> {\n const filePath = join(repoRoot, CCT_PATTERNS_PATH);\n await mkdir(dirname(filePath), { recursive: true });\n\n const lines = patterns.map((p) => JSON.stringify(p) + '\\n').join('');\n await appendFile(filePath, lines, 'utf-8');\n}\n","/**\n * Synthesis module for extracting cross-cutting patterns from clusters.\n *\n * Takes a cluster of similar memory items and produces a CctPattern\n * summarizing the common theme.\n */\n\nimport type { MemoryItem } from '../memory/index.js';\nimport { generateCctId, type CctPattern } from './types.js';\n\n/**\n * Synthesize a CctPattern from a cluster of similar memory items.\n *\n * @param cluster - Group of similar memory items\n * @param clusterId - Identifier for this cluster (used for ID generation)\n * @returns A CctPattern summarizing the cluster\n */\nexport function synthesizePattern(cluster: MemoryItem[], clusterId: string): CctPattern {\n const id = generateCctId(clusterId);\n const frequency = cluster.length;\n const sourceIds = cluster.map((item) => item.id);\n\n // Collect all tags with frequency counts\n const tagCounts = new Map<string, number>();\n for (const item of cluster) {\n for (const tag of item.tags) {\n tagCounts.set(tag, (tagCounts.get(tag) ?? 0) + 1);\n }\n }\n\n // Sort tags by frequency (descending)\n const sortedTags = [...tagCounts.entries()]\n .sort((a, b) => b[1] - a[1])\n .map(([tag]) => tag);\n\n // Build name from top tags or first insight\n const name = sortedTags.length > 0\n ? sortedTags.slice(0, 3).join(', ')\n : cluster[0]!.insight.slice(0, 50);\n\n // Build description from all insights\n const description = cluster.map((item) => item.insight).join('; ');\n\n // Determine testability: true if any item has high severity or evidence\n const hasHighSeverity = cluster.some(\n (item) => 'severity' in item && item.severity === 'high'\n );\n const hasEvidence = cluster.some(\n (item) => 'evidence' in item && item.evidence\n );\n const testable = hasHighSeverity || hasEvidence;\n\n // Generate test approach when testable\n const testApproach = testable\n ? `Verify pattern: ${name}. Check ${frequency} related lesson(s).`\n : undefined;\n\n return {\n id,\n name,\n description,\n frequency,\n testable,\n ...(testApproach !== undefined && { testApproach }),\n sourceIds,\n created: new Date().toISOString(),\n };\n}\n","/**\n * Compounding module barrel export.\n *\n * Provides clustering, synthesis, and I/O for cross-cutting patterns.\n */\n\nexport { buildSimilarityMatrix, clusterBySimilarity } from './clustering.js';\nexport { readCctPatterns, writeCctPatterns } from './io.js';\nexport { synthesizePattern } from './synthesis.js';\nexport { CCT_PATTERNS_PATH, CctPatternSchema, generateCctId } from './types.js';\nexport type { CctPattern, ClusterResult } from './types.js';\n","/**\n * Vector search with cosine similarity\n *\n * Embeds query text and ranks lessons by semantic similarity.\n * Uses SQLite cache to avoid recomputing embeddings.\n */\n\nimport { readCctPatterns, type CctPattern } from '../../compound/index.js';\nimport { embedText } from '../embeddings/index.js';\nimport { isModelAvailable } from '../embeddings/model.js';\nimport { contentHash, getCachedEmbeddingsBulk, getCachedInsightEmbedding, readAllFromSqlite, setCachedEmbedding, setCachedInsightEmbedding, syncIfNeeded } from '../storage/index.js';\nimport type { MemoryItem } from '../types.js';\n\n/**\n * In-memory embedding cache for CCT patterns.\n * CCT patterns don't have rows in the SQLite lessons table,\n * so setCachedEmbedding (UPDATE-only) is a no-op for them.\n * This Map caches embeddings keyed by \"id:contentHash\".\n */\nconst cctEmbeddingCache = new Map<string, Float32Array>();\n\n/** Clear the CCT embedding cache. Exported for testing. */\nexport function clearCctEmbeddingCache(): void {\n cctEmbeddingCache.clear();\n}\n\n/**\n * Calculate cosine similarity between two vectors.\n * Returns value between -1 (opposite) and 1 (identical).\n */\nexport function cosineSimilarity(a: ArrayLike<number>, b: ArrayLike<number>): number {\n if (a.length !== b.length) {\n throw new Error('Vectors must have same length');\n }\n\n let dotProduct = 0;\n let normA = 0;\n let normB = 0;\n\n for (let i = 0; i < a.length; i++) {\n dotProduct += a[i]! * b[i]!;\n normA += a[i]! * a[i]!;\n normB += b[i]! * b[i]!;\n }\n\n const magnitude = Math.sqrt(normA) * Math.sqrt(normB);\n if (magnitude === 0) return 0;\n\n return dotProduct / magnitude;\n}\n\n/**\n * Memory item with similarity score.\n * The `lesson` field holds any MemoryItem type (not just Lesson).\n * Field name kept for backward compatibility.\n */\nexport interface ScoredLesson {\n lesson: MemoryItem;\n score: number;\n}\n\n/** Options for vector search */\nexport interface SearchVectorOptions {\n /** Maximum number of results to return (default: 10) */\n limit?: number;\n}\n\n/** Default number of results to return */\nconst DEFAULT_LIMIT = 10;\n\n/**\n * Search lessons by vector similarity to query text.\n * Returns top N lessons sorted by similarity score (descending).\n * Uses embedding cache to avoid recomputing embeddings.\n */\n/**\n * Convert a CctPattern to a MemoryItem-like shape for search results.\n */\nfunction cctToMemoryItem(pattern: CctPattern): MemoryItem {\n return {\n id: pattern.id,\n type: 'lesson',\n trigger: pattern.name,\n insight: pattern.description,\n tags: [],\n source: 'manual',\n context: { tool: 'compound', intent: 'synthesis' },\n created: pattern.created,\n confirmed: true,\n supersedes: [],\n related: pattern.sourceIds,\n };\n}\n\nexport async function searchVector(\n repoRoot: string,\n query: string,\n options?: SearchVectorOptions\n): Promise<ScoredLesson[]> {\n const limit = options?.limit ?? DEFAULT_LIMIT;\n // Ensure SQLite cache is fresh, then read from it (avoids redundant JSONL parse)\n await syncIfNeeded(repoRoot);\n const items = readAllFromSqlite(repoRoot);\n\n // Read CCT patterns if available\n let cctPatterns: CctPattern[] = [];\n try {\n cctPatterns = await readCctPatterns(repoRoot);\n } catch {\n // File doesn't exist or is unreadable — proceed without CCT patterns\n }\n\n if (items.length === 0 && cctPatterns.length === 0) return [];\n\n // Embed the query\n const queryVector = await embedText(query);\n\n // Bulk-read all cached embeddings in one query (instead of N individual reads)\n const cachedEmbeddings = getCachedEmbeddingsBulk(repoRoot);\n\n // Score each item, skipping invalidated ones\n const scored: ScoredLesson[] = [];\n for (const item of items) {\n // Skip invalidated items\n if (item.invalidatedAt) continue;\n\n try {\n const itemText = `${item.trigger} ${item.insight}`;\n const hash = contentHash(item.trigger, item.insight);\n\n // Try bulk cache first\n const cached = cachedEmbeddings.get(item.id);\n let itemVector: Float32Array;\n\n if (cached && cached.hash === hash) {\n itemVector = cached.vector;\n } else {\n // Cache miss or stale - compute and store\n itemVector = await embedText(itemText);\n setCachedEmbedding(repoRoot, item.id, itemVector, hash);\n }\n\n const score = cosineSimilarity(queryVector, itemVector);\n scored.push({ lesson: item, score });\n } catch {\n // Skip items that fail embedding — return partial results\n continue;\n }\n }\n\n // Score CCT patterns (use in-memory cache since they lack SQLite rows)\n for (const pattern of cctPatterns) {\n try {\n const text = `${pattern.name} ${pattern.description}`;\n const hash = contentHash(pattern.name, pattern.description);\n const cacheKey = `${pattern.id}:${hash}`;\n\n let vec = cctEmbeddingCache.get(cacheKey);\n if (!vec) {\n vec = await embedText(text);\n cctEmbeddingCache.set(cacheKey, vec);\n }\n\n const score = cosineSimilarity(queryVector, vec);\n scored.push({ lesson: cctToMemoryItem(pattern), score });\n } catch {\n continue;\n }\n }\n\n // Sort by score descending and take top N\n scored.sort((a, b) => b.score - a.score);\n return scored.slice(0, limit);\n}\n\nexport interface SimilarLesson {\n item: MemoryItem;\n score: number;\n}\n\nexport interface FindSimilarOptions {\n threshold?: number;\n excludeId?: string;\n /** Pre-loaded items to search. When provided, skips readMemoryItems(). */\n items?: MemoryItem[];\n}\n\nconst DEFAULT_THRESHOLD = 0.80;\n\n/**\n * Find lessons semantically similar to the given text.\n * Embeds using insight text only (not trigger) to avoid noise from generic triggers.\n * Does NOT include CCT patterns.\n */\nexport async function findSimilarLessons(\n repoRoot: string,\n text: string,\n options?: FindSimilarOptions\n): Promise<SimilarLesson[]> {\n const threshold = options?.threshold ?? DEFAULT_THRESHOLD;\n const excludeId = options?.excludeId;\n\n if (!isModelAvailable()) return [];\n\n let items: MemoryItem[];\n if (options?.items) {\n items = options.items;\n } else {\n await syncIfNeeded(repoRoot);\n items = readAllFromSqlite(repoRoot);\n }\n if (items.length === 0) return [];\n\n const queryVector = await embedText(text);\n\n const scored: SimilarLesson[] = [];\n for (const item of items) {\n if (item.invalidatedAt) continue;\n if (excludeId && item.id === excludeId) continue;\n\n try {\n // Use insight ONLY for embedding (NOT trigger + insight).\n // Stored in separate columns to avoid cache conflicts with searchVector.\n const hash = contentHash(item.insight, '');\n let itemVector = getCachedInsightEmbedding(repoRoot, item.id, hash);\n\n if (!itemVector) {\n itemVector = await embedText(item.insight);\n setCachedInsightEmbedding(repoRoot, item.id, itemVector, hash);\n }\n\n const score = cosineSimilarity(queryVector, itemVector);\n if (score >= threshold) {\n scored.push({ item, score });\n }\n } catch {\n continue;\n }\n }\n\n scored.sort((a, b) => b.score - a.score);\n return scored;\n}\n","/**\n * Shared utility functions for the Learning Agent.\n */\n\n/** Milliseconds per day for time calculations */\nexport const MS_PER_DAY = 24 * 60 * 60 * 1000;\n\n/**\n * Calculate the age of a lesson in days from its created date.\n *\n * @param lesson - Object with a created field (ISO8601 string)\n * @returns Age in days (integer, rounded down)\n */\nexport function getLessonAgeDays(lesson: { created: string }): number {\n const created = new Date(lesson.created).getTime();\n const now = Date.now();\n return Math.floor((now - created) / MS_PER_DAY);\n}\n","/**\n * Multi-factor memory item ranking system\n *\n * Combines vector similarity with semantic boosts:\n * - Severity: high=1.5, medium=1.0, low=0.8\n * - Recency: 1.2 for items ≤30 days old\n * - Confirmation: 1.3 for confirmed items\n */\n\nimport type { MemoryItem } from '../types.js';\nimport { getLessonAgeDays } from '../../utils.js';\n\nimport type { ScoredLesson } from './vector.js';\n\n/** Lesson/memory item with final ranked score */\nexport interface RankedLesson extends ScoredLesson {\n finalScore?: number;\n}\n\nconst RECENCY_THRESHOLD_DAYS = 30;\nconst HIGH_SEVERITY_BOOST = 1.5;\nconst MEDIUM_SEVERITY_BOOST = 1.0;\nconst LOW_SEVERITY_BOOST = 0.8;\nconst RECENCY_BOOST = 1.2;\nconst CONFIRMATION_BOOST = 1.3;\n\n/**\n * Maximum combined boost multiplier.\n *\n * Without clamping, the max boost is 1.5 * 1.2 * 1.3 = 2.34x, which lets\n * a 0.4 similarity item outrank a 0.9 similarity item. With a 1.8 cap,\n * an item needs at least ~0.53 similarity with all boosts to beat a 0.95\n * unboosted match, keeping semantic relevance as the primary ranking signal.\n */\nconst MAX_COMBINED_BOOST = 1.8;\n\n/**\n * Calculate severity boost based on item severity.\n * Items without severity get 1.0 (medium boost).\n */\nexport function severityBoost(item: MemoryItem): number {\n switch (item.severity) {\n case 'high':\n return HIGH_SEVERITY_BOOST;\n case 'medium':\n return MEDIUM_SEVERITY_BOOST;\n case 'low':\n return LOW_SEVERITY_BOOST;\n default:\n return MEDIUM_SEVERITY_BOOST;\n }\n}\n\n/**\n * Calculate recency boost based on item age.\n * Items ≤30 days old get 1.2, older get 1.0.\n */\nexport function recencyBoost(item: MemoryItem): number {\n const ageDays = getLessonAgeDays(item);\n return ageDays <= RECENCY_THRESHOLD_DAYS ? RECENCY_BOOST : 1.0;\n}\n\n/**\n * Calculate confirmation boost.\n * Confirmed items get 1.3, unconfirmed get 1.0.\n */\nexport function confirmationBoost(item: MemoryItem): number {\n return item.confirmed ? CONFIRMATION_BOOST : 1.0;\n}\n\n/**\n * Calculate combined score for a memory item.\n * score = vectorSimilarity * min(severity * recency * confirmation, MAX_COMBINED_BOOST)\n */\nexport function calculateScore(item: MemoryItem, vectorSimilarity: number): number {\n const boost = Math.min(\n severityBoost(item) * recencyBoost(item) * confirmationBoost(item),\n MAX_COMBINED_BOOST,\n );\n return vectorSimilarity * boost;\n}\n\n/**\n * Rank lessons by combined score.\n * Returns new array sorted by finalScore descending.\n *\n * Works with ScoredLesson[] (uses .lesson field).\n */\nexport function rankLessons(lessons: ScoredLesson[]): RankedLesson[] {\n return lessons\n .map((scored) => ({\n ...scored,\n finalScore: calculateScore(scored.lesson, scored.score),\n }))\n .sort((a, b) => (b.finalScore ?? 0) - (a.finalScore ?? 0));\n}\n\n","/**\n * Pre-warm lesson embedding cache.\n *\n * Embeds all lessons that are missing or have stale cached embeddings.\n * Called after `ca init` or index rebuild so the first `ca search` is fast.\n */\n\nimport { isModelAvailable } from '../embeddings/model.js';\nimport { embedText } from '../embeddings/index.js';\nimport {\n contentHash,\n getCachedEmbeddingsBulk,\n readAllFromSqlite,\n setCachedEmbedding,\n syncIfNeeded,\n} from '../storage/index.js';\n\nexport interface PreWarmResult {\n embedded: number;\n skipped: number;\n}\n\n/**\n * Pre-warm lesson embeddings so the first search is fast.\n *\n * 1. Checks model availability (returns early if unavailable)\n * 2. Syncs SQLite from JSONL\n * 3. Reads all non-invalidated items\n * 4. Finds items with missing or stale cached embeddings\n * 5. Embeds and caches them\n *\n * @param repoRoot - Absolute path to repository root\n * @returns Counts of embedded and skipped items\n */\nexport async function preWarmLessonEmbeddings(repoRoot: string): Promise<PreWarmResult> {\n if (!isModelAvailable()) {\n return { embedded: 0, skipped: 0 };\n }\n\n await syncIfNeeded(repoRoot);\n const items = readAllFromSqlite(repoRoot);\n\n if (items.length === 0) {\n return { embedded: 0, skipped: 0 };\n }\n\n const cached = getCachedEmbeddingsBulk(repoRoot);\n\n const toEmbed: Array<{ id: string; text: string; hash: string }> = [];\n for (const item of items) {\n const hash = contentHash(item.trigger, item.insight);\n const entry = cached.get(item.id);\n if (!entry || entry.hash !== hash) {\n toEmbed.push({ id: item.id, text: `${item.trigger} ${item.insight}`, hash });\n }\n }\n\n if (toEmbed.length === 0) {\n return { embedded: 0, skipped: items.length };\n }\n\n let embedded = 0;\n for (const { id, text, hash } of toEmbed) {\n const vector = await embedText(text);\n setCachedEmbedding(repoRoot, id, vector, hash);\n embedded++;\n }\n\n return { embedded, skipped: items.length - embedded };\n}\n","/**\n * Search module - Vector similarity and ranking\n *\n * Provides semantic search with multi-factor ranking:\n * - Vector similarity (cosine)\n * - Severity boost\n * - Recency boost\n * - Confirmation boost\n */\n\n// Vector search\nexport { cosineSimilarity, findSimilarLessons, searchVector } from './vector.js';\nexport type { FindSimilarOptions, ScoredLesson, SearchVectorOptions, SimilarLesson } from './vector.js';\n\n// Ranking\nexport {\n calculateScore,\n confirmationBoost,\n rankLessons,\n recencyBoost,\n severityBoost,\n} from './ranking.js';\nexport type { RankedLesson } from './ranking.js';\n\n// Pre-warm\nexport { preWarmLessonEmbeddings } from './prewarm.js';\nexport type { PreWarmResult } from './prewarm.js';\n\n// Hybrid search\nexport {\n CANDIDATE_MULTIPLIER,\n DEFAULT_TEXT_WEIGHT,\n DEFAULT_VECTOR_WEIGHT,\n MIN_HYBRID_SCORE,\n mergeHybridResults,\n mergeHybridScores,\n normalizeBm25Rank,\n} from './hybrid.js';\nexport type { GenericScoredItem, HybridMergeOptions, ScoredKeywordResult } from './hybrid.js';\n","/**\n * SQLite schema definition for knowledge database.\n *\n * The knowledge database stores documentation chunks with FTS5 search.\n * When KNOWLEDGE_SCHEMA_VERSION changes, the DB file is deleted and recreated.\n */\n\nimport type { Database as DatabaseType } from 'better-sqlite3';\n\n/**\n * Schema version for the knowledge SQLite cache.\n * Bump this when making incompatible schema changes.\n */\nexport const KNOWLEDGE_SCHEMA_VERSION = 2;\n\n/** SQL schema for knowledge database with FTS5 full-text search */\nconst SCHEMA_SQL = `\n CREATE TABLE IF NOT EXISTS chunks (\n id TEXT PRIMARY KEY,\n file_path TEXT NOT NULL,\n start_line INTEGER NOT NULL,\n end_line INTEGER NOT NULL,\n content_hash TEXT NOT NULL,\n text TEXT NOT NULL,\n embedding BLOB,\n model TEXT,\n updated_at TEXT NOT NULL\n );\n\n CREATE VIRTUAL TABLE IF NOT EXISTS chunks_fts USING fts5(\n text,\n content='chunks', content_rowid='rowid'\n );\n\n CREATE TRIGGER IF NOT EXISTS chunks_ai AFTER INSERT ON chunks BEGIN\n INSERT INTO chunks_fts(rowid, text)\n VALUES (new.rowid, new.text);\n END;\n\n CREATE TRIGGER IF NOT EXISTS chunks_ad AFTER DELETE ON chunks BEGIN\n INSERT INTO chunks_fts(chunks_fts, rowid, text)\n VALUES ('delete', old.rowid, old.text);\n END;\n\n CREATE TRIGGER IF NOT EXISTS chunks_au AFTER UPDATE ON chunks BEGIN\n INSERT INTO chunks_fts(chunks_fts, rowid, text)\n VALUES ('delete', old.rowid, old.text);\n INSERT INTO chunks_fts(rowid, text)\n VALUES (new.rowid, new.text);\n END;\n\n CREATE INDEX IF NOT EXISTS idx_chunks_file_path ON chunks(file_path);\n\n CREATE TABLE IF NOT EXISTS metadata (\n key TEXT PRIMARY KEY,\n value TEXT NOT NULL\n );\n`;\n\n/**\n * Create the knowledge database schema and set the version pragma.\n * @param database - SQLite database instance\n */\nexport function createKnowledgeSchema(database: DatabaseType): void {\n database.exec(SCHEMA_SQL);\n database.pragma(`user_version = ${KNOWLEDGE_SCHEMA_VERSION}`);\n}\n","/**\n * Knowledge SQLite database connection management.\n *\n * Separate singleton map from the lessons DB -- completely independent.\n */\n\nimport { mkdirSync, unlinkSync } from 'node:fs';\nimport { dirname, join } from 'node:path';\nimport type { Database as DatabaseType } from 'better-sqlite3';\n\nimport type { KnowledgeDbOptions } from './types.js';\nimport { getDatabaseConstructor } from '../sqlite/availability.js';\nimport { createKnowledgeSchema, KNOWLEDGE_SCHEMA_VERSION } from './schema.js';\n\n/** Relative path to knowledge database file from repo root */\nexport const KNOWLEDGE_DB_PATH = '.claude/.cache/knowledge.sqlite';\n\n/** Knowledge database connections keyed by resolved DB path */\nconst knowledgeDbMap = new Map<string, DatabaseType>();\n\n/**\n * Open the knowledge SQLite database connection.\n * If the database has an older schema version, it is deleted and recreated.\n * @param repoRoot - Absolute path to repository root\n * @param options - Database options (e.g., inMemory for testing)\n * @returns Database instance\n */\nexport function openKnowledgeDb(\n repoRoot: string,\n options: KnowledgeDbOptions = {}\n): DatabaseType {\n const { inMemory = false } = options;\n\n const key = inMemory ? `:memory:${repoRoot}` : join(repoRoot, KNOWLEDGE_DB_PATH);\n\n const cached = knowledgeDbMap.get(key);\n if (cached) {\n return cached;\n }\n\n const Database = getDatabaseConstructor();\n let database: DatabaseType;\n\n if (inMemory) {\n database = new Database(':memory:');\n } else {\n const dir = dirname(key);\n mkdirSync(dir, { recursive: true });\n database = new Database(key);\n\n const version = database.pragma('user_version', { simple: true }) as number;\n if (version !== 0 && version !== KNOWLEDGE_SCHEMA_VERSION) {\n database.close();\n try { unlinkSync(key); } catch { /* ENOENT is fine */ }\n database = new Database(key);\n }\n\n database.pragma('journal_mode = WAL');\n }\n\n createKnowledgeSchema(database);\n knowledgeDbMap.set(key, database);\n return database;\n}\n\n/**\n * Close all knowledge SQLite database connections.\n */\nexport function closeKnowledgeDb(): void {\n for (const database of knowledgeDbMap.values()) {\n database.close();\n }\n knowledgeDbMap.clear();\n}\n","import { createHash } from 'node:crypto';\n\nexport interface Chunk {\n /** Unique ID: SHA-256 of filePath + startLine + endLine */\n id: string;\n /** Relative path from repo root */\n filePath: string;\n /** 1-indexed start line */\n startLine: number;\n /** 1-indexed end line (inclusive) */\n endLine: number;\n /** The chunk text content */\n text: string;\n /** SHA-256 of text content for cache invalidation */\n contentHash: string;\n}\n\nexport interface ChunkOptions {\n /** Target chunk size in characters (default: 1600 ~= 400 tokens) */\n targetSize?: number;\n /** Overlap size in characters (default: 320 ~= 80 tokens) */\n overlapSize?: number;\n}\n\n/** Supported file extensions for chunking */\nexport const SUPPORTED_EXTENSIONS: ReadonlySet<string> = new Set([\n '.md', '.txt', '.rst', '.ts', '.py', '.js', '.tsx', '.jsx',\n]);\n\n/** Code file extensions (subset of SUPPORTED_EXTENSIONS) */\nexport const CODE_EXTENSIONS: ReadonlySet<string> = new Set([\n '.ts', '.tsx', '.js', '.jsx', '.py',\n]);\n\n/** Generate chunk ID from file path and line range */\nexport function generateChunkId(filePath: string, startLine: number, endLine: number): string {\n return createHash('sha256').update(`${filePath}:${startLine}:${endLine}`).digest('hex').slice(0, 16);\n}\n\n/** Generate content hash */\nexport function chunkContentHash(text: string): string {\n return createHash('sha256').update(text).digest('hex');\n}\n","/**\n * Embedding cache operations for knowledge chunks.\n */\n\nimport type { Database as DatabaseType } from 'better-sqlite3';\n\nimport type { CachedEmbeddingData } from '../sqlite/types.js';\nimport { openKnowledgeDb } from './connection.js';\nimport { chunkContentHash } from '../../knowledge/types.js';\n\nexport { chunkContentHash };\n\n/**\n * Get cached embedding for a knowledge chunk.\n * @param repoRoot - Absolute path to repository root\n * @param chunkId - ID of the chunk\n * @param expectedHash - Optional content hash to validate cache freshness\n * @returns Float32Array embedding or null if not cached\n */\nexport function getCachedChunkEmbedding(\n repoRoot: string,\n chunkId: string,\n expectedHash?: string\n): Float32Array | null {\n const database = openKnowledgeDb(repoRoot);\n\n const row = database\n .prepare('SELECT embedding, content_hash FROM chunks WHERE id = ?')\n .get(chunkId) as { embedding: Buffer | null; content_hash: string | null } | undefined;\n\n if (!row || !row.embedding || !row.content_hash) {\n return null;\n }\n\n if (expectedHash && row.content_hash !== expectedHash) {\n return null;\n }\n\n return new Float32Array(\n row.embedding.buffer,\n row.embedding.byteOffset,\n row.embedding.byteLength / 4\n );\n}\n\n/**\n * Cache embedding for a knowledge chunk (UPDATE-only).\n * The chunk row must already exist. If it doesn't, this is a silent no-op.\n * @param repoRoot - Absolute path to repository root\n * @param chunkId - ID of the chunk\n * @param embedding - Embedding vector\n * @param hash - Content hash for cache validation\n */\nexport function setCachedChunkEmbedding(\n repoRoot: string,\n chunkId: string,\n embedding: Float32Array | number[],\n hash: string\n): void {\n const database = openKnowledgeDb(repoRoot);\n\n const float32 = embedding instanceof Float32Array ? embedding : new Float32Array(embedding);\n const buffer = Buffer.from(float32.buffer, float32.byteOffset, float32.byteLength);\n\n database\n .prepare('UPDATE chunks SET embedding = ?, content_hash = ? WHERE id = ?')\n .run(buffer, hash, chunkId);\n}\n\n/**\n * Collect all cached chunk embeddings from the database.\n * @param database - SQLite database instance\n * @returns Map of chunk ID to cached embedding data\n */\nexport function collectCachedChunkEmbeddings(\n database: DatabaseType\n): Map<string, CachedEmbeddingData> {\n const cache = new Map<string, CachedEmbeddingData>();\n const rows = database\n .prepare('SELECT id, embedding, content_hash FROM chunks WHERE embedding IS NOT NULL')\n .all() as Array<{ id: string; embedding: Buffer; content_hash: string | null }>;\n\n for (const row of rows) {\n if (row.embedding && row.content_hash) {\n cache.set(row.id, { embedding: row.embedding, contentHash: row.content_hash });\n }\n }\n return cache;\n}\n","/**\n * Knowledge chunk search operations using FTS5 full-text search.\n */\n\nimport type { KnowledgeChunk, ScoredChunk } from './types.js';\nimport { openKnowledgeDb } from './connection.js';\nimport { sanitizeFtsQuery } from '../sqlite/search.js';\nimport { normalizeBm25Rank } from '../../search/hybrid.js';\n\n/** Internal row type from SQLite query */\ninterface ChunkRow {\n id: string;\n file_path: string;\n start_line: number;\n end_line: number;\n content_hash: string;\n text: string;\n model: string | null;\n updated_at: string;\n}\n\n/** Row type with FTS5 rank score */\ninterface ScoredChunkRow extends ChunkRow {\n rank: number;\n}\n\nfunction rowToChunk(row: ChunkRow): KnowledgeChunk {\n const chunk: KnowledgeChunk = {\n id: row.id,\n filePath: row.file_path,\n startLine: row.start_line,\n endLine: row.end_line,\n contentHash: row.content_hash,\n text: row.text,\n updatedAt: row.updated_at,\n };\n if (row.model !== null) {\n chunk.model = row.model;\n }\n return chunk;\n}\n\n/**\n * Search knowledge chunks with normalized BM25 scores.\n * @param repoRoot - Absolute path to repository root\n * @param query - Search query string\n * @param limit - Maximum number of results\n * @returns Scored chunks with BM25 scores normalized to 0-1\n */\nexport function searchChunksKeywordScored(\n repoRoot: string,\n query: string,\n limit: number\n): ScoredChunk[] {\n const database = openKnowledgeDb(repoRoot);\n\n const sanitized = sanitizeFtsQuery(query);\n if (sanitized === '') return [];\n\n try {\n // ORDER BY fts.rank: BM25 ranks are negative (lower = more relevant)\n const rows = database\n .prepare(\n `SELECT c.*, fts.rank\n FROM chunks c\n JOIN chunks_fts fts ON c.rowid = fts.rowid\n WHERE chunks_fts MATCH ?\n ORDER BY fts.rank\n LIMIT ?`\n )\n .all(sanitized, limit) as ScoredChunkRow[];\n\n return rows.map((row) => ({\n chunk: rowToChunk(row),\n score: normalizeBm25Rank(row.rank),\n }));\n } catch (err) {\n const message = err instanceof Error ? err.message : 'Unknown FTS5 error';\n console.error(`[compound-agent] knowledge scored search error: ${message}`);\n return [];\n }\n}\n","/**\n * Knowledge chunk sync operations: upsert, delete, metadata tracking.\n */\n\nimport type { KnowledgeChunk } from './types.js';\nimport { openKnowledgeDb } from './connection.js';\n\n/**\n * Upsert chunks into the knowledge database.\n * Uses INSERT OR REPLACE for conflict resolution on id.\n * @param repoRoot - Absolute path to repository root\n * @param chunks - Chunks to upsert\n * @param embeddings - Optional map of chunk ID to embedding vector\n */\nexport function upsertChunks(\n repoRoot: string,\n chunks: KnowledgeChunk[],\n embeddings?: Map<string, Float32Array>\n): void {\n if (chunks.length === 0) return;\n\n const database = openKnowledgeDb(repoRoot);\n\n const insert = database.prepare(`\n INSERT OR REPLACE INTO chunks (id, file_path, start_line, end_line, content_hash, text, embedding, model, updated_at)\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)\n `);\n\n const upsertMany = database.transaction((items: KnowledgeChunk[]) => {\n for (const chunk of items) {\n const emb = embeddings?.get(chunk.id);\n const embBuffer = emb\n ? Buffer.from(emb.buffer, emb.byteOffset, emb.byteLength)\n : null;\n\n insert.run(\n chunk.id,\n chunk.filePath,\n chunk.startLine,\n chunk.endLine,\n chunk.contentHash,\n chunk.text,\n embBuffer,\n chunk.model ?? null,\n chunk.updatedAt\n );\n }\n });\n\n upsertMany(chunks);\n}\n\n/**\n * Delete all chunks for the given file paths.\n * @param repoRoot - Absolute path to repository root\n * @param filePaths - File paths whose chunks should be removed\n */\nexport function deleteChunksByFilePath(repoRoot: string, filePaths: string[]): void {\n if (filePaths.length === 0) return;\n\n const database = openKnowledgeDb(repoRoot);\n\n const del = database.prepare('DELETE FROM chunks WHERE file_path = ?');\n\n const deleteMany = database.transaction((paths: string[]) => {\n for (const path of paths) {\n del.run(path);\n }\n });\n\n deleteMany(filePaths);\n}\n\n/**\n * Get all distinct file paths currently indexed in the knowledge database.\n * @param repoRoot - Absolute path to repository root\n * @returns Array of file paths\n */\nexport function getIndexedFilePaths(repoRoot: string): string[] {\n const database = openKnowledgeDb(repoRoot);\n\n const rows = database\n .prepare('SELECT DISTINCT file_path FROM chunks')\n .all() as Array<{ file_path: string }>;\n\n return rows.map((r) => r.file_path);\n}\n\n/**\n * Get the last index time from metadata.\n * @param repoRoot - Absolute path to repository root\n * @returns ISO timestamp or null if never indexed\n */\nexport function getLastIndexTime(repoRoot: string): string | null {\n const database = openKnowledgeDb(repoRoot);\n\n const row = database\n .prepare(\"SELECT value FROM metadata WHERE key = 'last_index_time'\")\n .get() as { value: string } | undefined;\n\n return row?.value ?? null;\n}\n\n/**\n * Get total chunk count across all files.\n * @param repoRoot - Absolute path to repository root\n */\nexport function getChunkCount(repoRoot: string): number {\n const database = openKnowledgeDb(repoRoot);\n const row = database.prepare('SELECT COUNT(*) as cnt FROM chunks').get() as { cnt: number };\n return row.cnt;\n}\n\n/**\n * Get chunk count for a specific file path.\n * @param repoRoot - Absolute path to repository root\n * @param filePath - Relative file path\n */\nexport function getChunkCountByFilePath(repoRoot: string, filePath: string): number {\n const database = openKnowledgeDb(repoRoot);\n const row = database\n .prepare('SELECT COUNT(*) as cnt FROM chunks WHERE file_path = ?')\n .get(filePath) as { cnt: number };\n return row.cnt;\n}\n\n/**\n * Set the last index time in metadata.\n * @param repoRoot - Absolute path to repository root\n * @param time - ISO timestamp\n */\nexport function setLastIndexTime(repoRoot: string, time: string): void {\n const database = openKnowledgeDb(repoRoot);\n\n database\n .prepare(\"INSERT OR REPLACE INTO metadata (key, value) VALUES ('last_index_time', ?)\")\n .run(time);\n}\n","/**\n * Knowledge SQLite storage module - documentation chunks with FTS5 search.\n */\n\n// Types\nexport type { KnowledgeChunk, KnowledgeDbOptions, ScoredChunk } from './types.js';\n\n// Connection\nexport { openKnowledgeDb, closeKnowledgeDb, KNOWLEDGE_DB_PATH } from './connection.js';\n\n// Schema\nexport { KNOWLEDGE_SCHEMA_VERSION } from './schema.js';\n\n// Cache\nexport {\n chunkContentHash,\n collectCachedChunkEmbeddings,\n getCachedChunkEmbedding,\n setCachedChunkEmbedding,\n} from './cache.js';\n\n// Search\nexport { searchChunksKeywordScored } from './search.js';\n\n// Sync\nexport {\n upsertChunks,\n deleteChunksByFilePath,\n getIndexedFilePaths,\n getChunkCount,\n getChunkCountByFilePath,\n getLastIndexTime,\n setLastIndexTime,\n} from './sync.js';\n","import { extname } from 'node:path';\n\nimport {\n chunkContentHash,\n generateChunkId,\n CODE_EXTENSIONS,\n type Chunk,\n type ChunkOptions,\n} from './types.js';\n\nconst DEFAULT_TARGET_SIZE = 1600;\nconst DEFAULT_OVERLAP_SIZE = 320;\n\n/** Check if content looks binary (contains null bytes). */\nfunction isBinary(content: string): boolean {\n return content.includes('\\0');\n}\n\n/**\n * Split content into logical sections based on file type.\n * Returns arrays of line groups, where each group is an array of\n * { lineNumber (1-indexed), text } objects.\n */\nfunction splitIntoSections(\n fileLines: string[],\n ext: string,\n): { lineNumber: number; text: string }[][] {\n if (ext === '.md') {\n return splitMarkdown(fileLines);\n }\n if (ext === '.rst') {\n return splitParagraphs(fileLines);\n }\n if (CODE_EXTENSIONS.has(ext)) {\n return splitCode(fileLines);\n }\n // Plain text: split on double newlines (paragraph boundaries)\n return splitParagraphs(fileLines);\n}\n\n/**\n * Split markdown into sections at H2+ headers and paragraph boundaries.\n * Keeps fenced code blocks intact.\n */\nfunction splitMarkdown(\n fileLines: string[],\n): { lineNumber: number; text: string }[][] {\n const sections: { lineNumber: number; text: string }[][] = [];\n let current: { lineNumber: number; text: string }[] = [];\n let inCodeBlock = false;\n\n for (let i = 0; i < fileLines.length; i++) {\n const line = fileLines[i]!;\n const lineObj = { lineNumber: i + 1, text: line };\n\n // Track fenced code blocks\n if (line.trimStart().startsWith('```')) {\n inCodeBlock = !inCodeBlock;\n current.push(lineObj);\n continue;\n }\n\n // Split on H2+ headers when not inside code block\n if (!inCodeBlock && /^#{2,}\\s/.test(line) && current.length > 0) {\n sections.push(current);\n current = [lineObj];\n continue;\n }\n\n // Split on blank lines (paragraph boundary) when not in code block\n // A blank line after non-blank content marks a paragraph break\n if (\n !inCodeBlock &&\n line.trim() === '' &&\n current.length > 0 &&\n current.some((l) => l.text.trim() !== '')\n ) {\n // Include this blank line in current section, then start fresh\n current.push(lineObj);\n sections.push(current);\n current = [];\n continue;\n }\n\n current.push(lineObj);\n }\n\n if (current.length > 0) {\n sections.push(current);\n }\n\n return sections;\n}\n\n/** Split code at blank lines between top-level definitions. */\nfunction splitCode(\n fileLines: string[],\n): { lineNumber: number; text: string }[][] {\n const sections: { lineNumber: number; text: string }[][] = [];\n let current: { lineNumber: number; text: string }[] = [];\n\n for (let i = 0; i < fileLines.length; i++) {\n const line = fileLines[i]!;\n const lineObj = { lineNumber: i + 1, text: line };\n\n if (line.trim() === '' && current.length > 0) {\n // Check if there's a next non-blank line (forward scan, no slice copy)\n let hasNextNonBlank = false;\n for (let j = i + 1; j < fileLines.length; j++) {\n if (fileLines[j]!.trim() !== '') {\n hasNextNonBlank = true;\n break;\n }\n }\n if (hasNextNonBlank) {\n sections.push(current);\n current = [lineObj];\n continue;\n }\n }\n\n current.push(lineObj);\n }\n\n if (current.length > 0) {\n sections.push(current);\n }\n\n return sections;\n}\n\n/** Split plain text on paragraph boundaries (blank lines). */\nfunction splitParagraphs(\n fileLines: string[],\n): { lineNumber: number; text: string }[][] {\n const sections: { lineNumber: number; text: string }[][] = [];\n let current: { lineNumber: number; text: string }[] = [];\n\n for (let i = 0; i < fileLines.length; i++) {\n const line = fileLines[i]!;\n const lineObj = { lineNumber: i + 1, text: line };\n\n if (line.trim() === '' && current.length > 0) {\n sections.push(current);\n current = [lineObj];\n continue;\n }\n\n current.push(lineObj);\n }\n\n if (current.length > 0) {\n sections.push(current);\n }\n\n return sections;\n}\n\n/** Get text from a section (array of line objects). */\nfunction sectionText(section: { text: string }[]): string {\n return section.map((l) => l.text).join('\\n');\n}\n\n/**\n * Chunk a file into semantic pieces with overlap.\n *\n * @param filePath - Relative path from repo root\n * @param content - File text content\n * @param options - Chunking options (targetSize, overlapSize)\n * @returns Array of Chunk objects\n */\nexport function chunkFile(\n filePath: string,\n content: string,\n options?: ChunkOptions,\n): Chunk[] {\n // Empty or whitespace-only\n if (content.trim() === '') return [];\n\n // Binary detection\n if (isBinary(content)) return [];\n\n const targetSize = options?.targetSize ?? DEFAULT_TARGET_SIZE;\n const overlapSize = options?.overlapSize ?? DEFAULT_OVERLAP_SIZE;\n\n const fileLines = content.split('\\n');\n const ext = extname(filePath).toLowerCase();\n\n const sections = splitIntoSections(fileLines, ext);\n\n // Merge small sections until reaching targetSize, then emit a chunk\n const chunks: Chunk[] = [];\n let accumulated: { lineNumber: number; text: string }[] = [];\n let accumulatedLength = 0;\n\n function emitChunk(\n lines: { lineNumber: number; text: string }[],\n overlapLines: { lineNumber: number; text: string }[],\n ): { lineNumber: number; text: string }[] {\n if (lines.length === 0) return [];\n\n const allLines = [...overlapLines, ...lines];\n const text = allLines.map((l) => l.text).join('\\n');\n const startLine = allLines[0]!.lineNumber;\n const endLine = allLines[allLines.length - 1]!.lineNumber;\n\n chunks.push({\n id: generateChunkId(filePath, startLine, endLine),\n filePath,\n startLine,\n endLine,\n text,\n contentHash: chunkContentHash(text),\n });\n\n // Compute overlap: take lines from the end of `lines` that fit overlapSize\n if (overlapSize <= 0) return [];\n const overlapResult: { lineNumber: number; text: string }[] = [];\n let overlapLen = 0;\n for (let i = lines.length - 1; i >= 0; i--) {\n const lineLen = lines[i]!.text.length + 1; // +1 for newline\n if (overlapLen + lineLen > overlapSize && overlapResult.length > 0) break;\n overlapResult.unshift(lines[i]!);\n overlapLen += lineLen;\n }\n return overlapResult;\n }\n\n let overlapLines: { lineNumber: number; text: string }[] = [];\n\n for (const section of sections) {\n const sectionLen = sectionText(section).length;\n\n // If adding this section exceeds targetSize and we have accumulated content, emit\n if (accumulatedLength > 0 && accumulatedLength + sectionLen > targetSize) {\n overlapLines = emitChunk(accumulated, overlapLines);\n accumulated = [];\n accumulatedLength = 0;\n }\n\n accumulated.push(...section);\n accumulatedLength += sectionLen;\n\n // If this single section alone exceeds targetSize, emit it immediately\n if (accumulatedLength > targetSize) {\n overlapLines = emitChunk(accumulated, overlapLines);\n accumulated = [];\n accumulatedLength = 0;\n }\n }\n\n // Emit remaining accumulated content\n if (accumulated.length > 0) {\n emitChunk(accumulated, overlapLines);\n }\n\n return chunks;\n}\n","/**\n * Core embedding function for knowledge chunks.\n *\n * Embeds unembedded (or all) knowledge chunks using the local embedding model.\n * Uses batch embedding and transactional writes for performance.\n */\n\nimport { embedTexts } from '../embeddings/nomic.js';\nimport { openKnowledgeDb } from '../storage/sqlite-knowledge/connection.js';\n\nconst BATCH_SIZE = 16;\n\nexport interface EmbedChunksOptions {\n /** Only embed chunks with no embedding (default: true) */\n onlyMissing?: boolean;\n}\n\nexport interface EmbedChunksResult {\n chunksEmbedded: number;\n chunksSkipped: number;\n durationMs: number;\n}\n\n/**\n * Count chunks that have no embedding stored.\n * @param repoRoot - Absolute path to repository root\n */\nexport function getUnembeddedChunkCount(repoRoot: string): number {\n const db = openKnowledgeDb(repoRoot);\n const row = db\n .prepare('SELECT COUNT(*) as count FROM chunks WHERE embedding IS NULL')\n .get() as { count: number };\n return row.count;\n}\n\n/**\n * Embed knowledge chunks using the local embedding model.\n *\n * Processes chunks in batches of BATCH_SIZE for efficient embedding and\n * wraps each batch's DB writes in a transaction (1 fsync per batch).\n *\n * @param repoRoot - Absolute path to repository root\n * @param options - Embedding options\n * @returns Stats about the embedding run\n */\nexport async function embedChunks(\n repoRoot: string,\n options?: EmbedChunksOptions\n): Promise<EmbedChunksResult> {\n const start = Date.now();\n const onlyMissing = options?.onlyMissing ?? true;\n const db = openKnowledgeDb(repoRoot);\n\n const query = onlyMissing\n ? 'SELECT id, text, content_hash FROM chunks WHERE embedding IS NULL'\n : 'SELECT id, text, content_hash FROM chunks';\n const rows = db\n .prepare(query)\n .all() as Array<{ id: string; text: string; content_hash: string }>;\n\n // Count already-embedded chunks for reporting\n const totalRow = db.prepare('SELECT COUNT(*) as count FROM chunks').get() as { count: number };\n const chunksSkipped = totalRow.count - rows.length;\n\n let chunksEmbedded = 0;\n\n const updateStmt = db.prepare(\n 'UPDATE chunks SET embedding = ?, content_hash = ? WHERE id = ?'\n );\n const writeBatch = db.transaction((batch: Array<{ id: string; content_hash: string; vector: Float32Array }>) => {\n for (const item of batch) {\n const buffer = Buffer.from(item.vector.buffer, item.vector.byteOffset, item.vector.byteLength);\n updateStmt.run(buffer, item.content_hash, item.id);\n }\n });\n\n for (let i = 0; i < rows.length; i += BATCH_SIZE) {\n const batch = rows.slice(i, i + BATCH_SIZE);\n const texts = batch.map(r => r.text);\n const vectors = await embedTexts(texts);\n if (vectors.length !== texts.length) {\n throw new Error(`embedTexts returned ${vectors.length} vectors for ${texts.length} inputs`);\n }\n const enriched = batch.map((r, j) => ({ ...r, vector: vectors[j]! }));\n writeBatch(enriched);\n chunksEmbedded += batch.length;\n }\n\n return {\n chunksEmbedded,\n chunksSkipped,\n durationMs: Date.now() - start,\n };\n}\n","/**\n * Knowledge indexing pipeline.\n *\n * Walks a docs directory, chunks files, embeds chunks (if model available),\n * and stores in the knowledge SQLite database.\n */\n\nimport { createHash } from 'node:crypto';\nimport { readdir, readFile } from 'node:fs/promises';\nimport { extname, join, relative } from 'node:path';\n\nimport {\n openKnowledgeDb,\n} from '../storage/sqlite-knowledge/connection.js';\nimport {\n upsertChunks,\n deleteChunksByFilePath,\n getChunkCountByFilePath,\n getIndexedFilePaths,\n setLastIndexTime,\n} from '../storage/sqlite-knowledge/sync.js';\nimport type { KnowledgeChunk } from '../storage/sqlite-knowledge/types.js';\n\nimport { chunkFile } from './chunking.js';\nimport { SUPPORTED_EXTENSIONS } from './types.js';\n\nexport interface IndexOptions {\n /** Force re-index all files (ignore cache) */\n force?: boolean;\n /** Directory to index (default: 'docs') */\n docsDir?: string;\n /** Embed chunks after indexing (default: false) */\n embed?: boolean;\n}\n\nexport interface IndexResult {\n filesIndexed: number;\n filesSkipped: number;\n filesErrored: number;\n chunksCreated: number;\n chunksDeleted: number;\n chunksEmbedded: number;\n durationMs: number;\n}\n\n/** Compute SHA-256 hash of file content for change detection */\nfunction fileHash(content: string): string {\n return createHash('sha256').update(content).digest('hex');\n}\n\n/** Build metadata key for file hash */\nfunction fileHashKey(relativePath: string): string {\n return 'file_hash:' + relativePath;\n}\n\n/** Get stored file hash from metadata table */\nfunction getStoredFileHash(repoRoot: string, relativePath: string): string | null {\n const db = openKnowledgeDb(repoRoot);\n const row = db\n .prepare('SELECT value FROM metadata WHERE key = ?')\n .get(fileHashKey(relativePath)) as { value: string } | undefined;\n return row?.value ?? null;\n}\n\n/** Store file hash in metadata table */\nfunction setFileHash(repoRoot: string, relativePath: string, hash: string): void {\n const db = openKnowledgeDb(repoRoot);\n db.prepare('INSERT OR REPLACE INTO metadata (key, value) VALUES (?, ?)')\n .run(fileHashKey(relativePath), hash);\n}\n\n/** Remove file hash from metadata table */\nfunction removeFileHash(repoRoot: string, relativePath: string): void {\n const db = openKnowledgeDb(repoRoot);\n db.prepare('DELETE FROM metadata WHERE key = ?').run(fileHashKey(relativePath));\n}\n\n/** Recursively walk directory and return relative paths of supported files */\nasync function walkSupportedFiles(baseDir: string, repoRoot: string): Promise<string[]> {\n const results: string[] = [];\n\n let entries;\n try {\n entries = await readdir(baseDir, { recursive: true, withFileTypes: true });\n } catch {\n // Directory doesn't exist or can't be read\n return results;\n }\n\n for (const entry of entries) {\n if (!entry.isFile()) continue;\n const ext = extname(entry.name).toLowerCase();\n if (!SUPPORTED_EXTENSIONS.has(ext)) continue;\n\n // Build the full path. With recursive readdir, parentPath gives\n // the directory containing the entry.\n const fullPath = join(entry.parentPath ?? entry.path, entry.name);\n const relPath = relative(repoRoot, fullPath);\n results.push(relPath);\n }\n\n return results;\n}\n\n/**\n * Embed indexed chunks using the local model.\n * Uses dynamic imports to avoid loading llama-cpp when not needed.\n *\n * @throws Error if model is not usable (caller explicitly requested --embed)\n * @returns Number of chunks embedded\n */\nasync function tryEmbedChunks(repoRoot: string): Promise<number> {\n const { isModelUsable } = await import('../embeddings/model.js');\n const usability = await isModelUsable();\n if (!usability.usable) {\n throw new Error(`Embedding failed: ${usability.reason}. ${usability.action}`);\n }\n const { embedChunks } = await import('./embed-chunks.js');\n const embedResult = await embedChunks(repoRoot);\n return embedResult.chunksEmbedded;\n}\n\n/**\n * Index documentation files into the knowledge database.\n *\n * @param repoRoot - Absolute path to repository root\n * @param options - Indexing options\n * @returns Statistics about the indexing operation\n */\nexport async function indexDocs(\n repoRoot: string,\n options: IndexOptions = {},\n): Promise<IndexResult> {\n const start = Date.now();\n const docsDir = options.docsDir ?? 'docs';\n const force = options.force ?? false;\n\n const stats: IndexResult = {\n filesIndexed: 0,\n filesSkipped: 0,\n filesErrored: 0,\n chunksCreated: 0,\n chunksDeleted: 0,\n chunksEmbedded: 0,\n durationMs: 0,\n };\n\n const docsPath = join(repoRoot, docsDir);\n const filePaths = await walkSupportedFiles(docsPath, repoRoot);\n\n // Process each file\n for (const relPath of filePaths) {\n const fullPath = join(repoRoot, relPath);\n let content: string;\n try {\n content = await readFile(fullPath, 'utf-8');\n } catch {\n stats.filesErrored++;\n continue;\n }\n\n const hash = fileHash(content);\n const storedHash = getStoredFileHash(repoRoot, relPath);\n\n // Skip if unchanged and not forced\n if (!force && storedHash === hash) {\n stats.filesSkipped++;\n continue;\n }\n\n // Chunk the file\n const chunks = chunkFile(relPath, content);\n\n // Convert to KnowledgeChunk format\n const now = new Date().toISOString();\n const knowledgeChunks: KnowledgeChunk[] = chunks.map((chunk) => ({\n id: chunk.id,\n filePath: chunk.filePath,\n startLine: chunk.startLine,\n endLine: chunk.endLine,\n contentHash: chunk.contentHash,\n text: chunk.text,\n updatedAt: now,\n }));\n\n // Atomically replace chunks for this file\n const db = openKnowledgeDb(repoRoot);\n db.transaction(() => {\n deleteChunksByFilePath(repoRoot, [relPath]);\n if (knowledgeChunks.length > 0) {\n upsertChunks(repoRoot, knowledgeChunks);\n }\n setFileHash(repoRoot, relPath, hash);\n })();\n\n stats.filesIndexed++;\n stats.chunksCreated += knowledgeChunks.length;\n }\n\n // Clean up stale files: find DB paths not in current file set\n const indexedPaths = getIndexedFilePaths(repoRoot);\n const currentPathSet = new Set(filePaths);\n const stalePaths = indexedPaths.filter((p) => !currentPathSet.has(p));\n\n if (stalePaths.length > 0) {\n // Count chunks that will be deleted\n for (const path of stalePaths) {\n stats.chunksDeleted += getChunkCountByFilePath(repoRoot, path);\n }\n\n deleteChunksByFilePath(repoRoot, stalePaths);\n\n // Clean up file hashes for stale files\n for (const path of stalePaths) {\n removeFileHash(repoRoot, path);\n }\n }\n\n // Update last index time\n setLastIndexTime(repoRoot, new Date().toISOString());\n\n // Embed chunks if requested\n if (options.embed) {\n stats.chunksEmbedded = await tryEmbedChunks(repoRoot);\n }\n\n stats.durationMs = Date.now() - start;\n return stats;\n}\n","/**\n * PID-based lock file for embedding processes.\n *\n * Prevents concurrent embedding when background embed (ca init/setup)\n * and post-commit hook run simultaneously.\n *\n * Lock file: {repoRoot}/.claude/.cache/embed.lock\n * Content: { pid: number, startedAt: string } (ISO timestamp)\n */\n\nimport {\n existsSync,\n mkdirSync,\n readFileSync,\n unlinkSync,\n writeFileSync,\n} from 'node:fs';\nimport { join } from 'node:path';\n\ninterface LockAcquired {\n acquired: true;\n release: () => void;\n}\n\ninterface LockBusy {\n acquired: false;\n holder: number;\n}\n\nexport type LockResult = LockAcquired | LockBusy;\n\n/** Max lock age before considered expired (1 hour). */\nconst LOCK_MAX_AGE_MS = 60 * 60 * 1000;\n\ninterface LockContent {\n pid: number;\n startedAt: string;\n}\n\nfunction lockPath(repoRoot: string): string {\n return join(repoRoot, '.claude', '.cache', 'embed.lock');\n}\n\nfunction lockDir(repoRoot: string): string {\n return join(repoRoot, '.claude', '.cache');\n}\n\n/** Check if a process is alive via kill(pid, 0). */\nfunction isProcessAlive(pid: number): boolean {\n try {\n process.kill(pid, 0);\n return true;\n } catch {\n return false;\n }\n}\n\n/** Read and parse lock file. Returns null on any error or invalid shape. */\nfunction readLock(filePath: string): LockContent | null {\n try {\n const raw = readFileSync(filePath, 'utf-8');\n const parsed: unknown = JSON.parse(raw);\n if (\n typeof parsed === 'object' && parsed !== null &&\n typeof (parsed as Record<string, unknown>).pid === 'number' &&\n typeof (parsed as Record<string, unknown>).startedAt === 'string'\n ) {\n return parsed as LockContent;\n }\n return null;\n } catch {\n return null;\n }\n}\n\n/**\n * Acquire the embed lock for this process.\n *\n * Uses writeFileSync with 'wx' flag for atomic exclusive creation.\n * On EEXIST: reads holder PID and checks staleness via process.kill(pid, 0).\n * If stale (holder dead): overwrites lock. If alive: returns acquired: false.\n */\nexport function acquireEmbedLock(repoRoot: string): LockResult {\n const dir = lockDir(repoRoot);\n const file = lockPath(repoRoot);\n const content: LockContent = { pid: process.pid, startedAt: new Date().toISOString() };\n\n mkdirSync(dir, { recursive: true });\n\n try {\n writeFileSync(file, JSON.stringify(content), { flag: 'wx' });\n return { acquired: true, release: () => releaseLock(file) };\n } catch (err: unknown) {\n if ((err as NodeJS.ErrnoException).code !== 'EEXIST') throw err;\n\n // Lock file exists -- check if holder is alive and lock is not expired\n const existing = readLock(file);\n if (existing && isProcessAlive(existing.pid)) {\n const lockAge = Date.now() - new Date(existing.startedAt).getTime();\n if (lockAge < LOCK_MAX_AGE_MS) {\n return { acquired: false, holder: existing.pid };\n }\n // Lock expired -- fall through to overwrite\n }\n\n // Stale lock -- delete then re-create atomically with 'wx'\n try { unlinkSync(file); } catch { /* already gone */ }\n try {\n writeFileSync(file, JSON.stringify(content), { flag: 'wx' });\n return { acquired: true, release: () => releaseLock(file) };\n } catch {\n // Another process won the race\n const winner = readLock(file);\n return { acquired: false, holder: winner?.pid ?? -1 };\n }\n }\n}\n\n/** Check if an embed lock is currently held by a live process. */\nexport function isEmbedLocked(repoRoot: string): boolean {\n const file = lockPath(repoRoot);\n if (!existsSync(file)) return false;\n\n const content = readLock(file);\n if (!content) return false;\n\n return isProcessAlive(content.pid);\n}\n\nfunction releaseLock(file: string): void {\n try {\n unlinkSync(file);\n } catch {\n // Silently ignore -- lock may already be removed\n }\n}\n","/**\n * Embedding status file: tracks state of background embedding process.\n *\n * Status file lives at {repoRoot}/.claude/.cache/embed-status.json\n */\n\nimport { mkdirSync, readFileSync, writeFileSync } from 'node:fs';\nimport { dirname, join } from 'node:path';\n\nexport type EmbedStatus =\n | { state: 'idle' }\n | { state: 'running'; startedAt: string }\n | { state: 'completed'; chunksEmbedded: number; completedAt: string; durationMs: number }\n | { state: 'failed'; error: string; durationMs: number };\n\nconst STATUS_FILE = '.claude/.cache/embed-status.json';\n\nfunction statusPath(repoRoot: string): string {\n return join(repoRoot, STATUS_FILE);\n}\n\n/** Write embedding status to disk. Creates parent directories if needed. */\nexport function writeEmbedStatus(repoRoot: string, status: EmbedStatus): void {\n const filePath = statusPath(repoRoot);\n mkdirSync(dirname(filePath), { recursive: true });\n writeFileSync(filePath, JSON.stringify(status, null, 2), 'utf-8');\n}\n\nconst VALID_STATES = new Set(['idle', 'running', 'completed', 'failed']);\n\n/** Read embedding status from disk. Returns null on missing file, parse error, or invalid shape. */\nexport function readEmbedStatus(repoRoot: string): EmbedStatus | null {\n try {\n const raw = readFileSync(statusPath(repoRoot), 'utf-8');\n const parsed = JSON.parse(raw) as Record<string, unknown>;\n if (!parsed || typeof parsed !== 'object' || !VALID_STATES.has(parsed.state as string)) {\n return null;\n }\n return parsed as EmbedStatus;\n } catch {\n return null;\n }\n}\n","/**\n * Background embedding: spawn a detached worker or run embedding in-process.\n *\n * spawnBackgroundEmbed(repoRoot) - spawns detached child process (sync, non-blocking)\n * runBackgroundEmbed(repoRoot) - worker entry point that does the actual embedding\n */\n\nimport { spawn } from 'node:child_process';\nimport { existsSync } from 'node:fs';\nimport { dirname, join } from 'node:path';\nimport { fileURLToPath } from 'node:url';\n\nimport { isModelAvailable, withEmbedding } from '../embeddings/index.js';\nimport { closeKnowledgeDb } from '../storage/sqlite-knowledge/index.js';\nimport { acquireEmbedLock, isEmbedLocked } from './embed-lock.js';\nimport { writeEmbedStatus } from './embed-status.js';\nimport { embedChunks, getUnembeddedChunkCount } from './embed-chunks.js';\n\nexport interface SpawnEmbedResult {\n spawned: boolean;\n reason?: string;\n pid?: number;\n}\n\n/**\n * Resolve the CLI entry point for spawning the embed-worker subprocess.\n *\n * Strategy: walk up from this module to find dist/cli.js (works in both\n * bundled output and dev). Falls back to npx ca if not found.\n */\nfunction resolveCliInvocation(): { command: string; args: string[] } {\n let dir = dirname(fileURLToPath(import.meta.url));\n for (let i = 0; i < 10; i++) {\n const candidate = join(dir, 'dist', 'cli.js');\n if (existsSync(candidate)) {\n return { command: process.execPath, args: [candidate] };\n }\n const parent = dirname(dir);\n if (parent === dir) break;\n dir = parent;\n }\n return { command: 'npx', args: ['ca'] };\n}\n\n/**\n * Spawn a detached background process to embed chunks.\n * Synchronous -- fires and forgets.\n *\n * Pre-flight checks (lock, model, count) are advisory only. The worker\n * acquires its own lock, so TOCTOU here cannot cause double-embedding --\n * at worst we spawn a worker that exits immediately.\n */\nexport function spawnBackgroundEmbed(repoRoot: string): SpawnEmbedResult {\n if (isEmbedLocked(repoRoot)) {\n return { spawned: false, reason: 'Embedding already in progress' };\n }\n if (!isModelAvailable()) {\n return { spawned: false, reason: 'Model not available' };\n }\n if (getUnembeddedChunkCount(repoRoot) === 0) {\n return { spawned: false, reason: 'All chunks already embedded' };\n }\n\n const cli = resolveCliInvocation();\n const child = spawn(cli.command, [...cli.args, 'embed-worker', repoRoot], {\n detached: true,\n stdio: 'ignore',\n });\n child.unref();\n\n return { spawned: true, pid: child.pid };\n}\n\n/**\n * Worker entry point: acquire lock, embed chunks, write status, clean up.\n */\nexport async function runBackgroundEmbed(repoRoot: string): Promise<void> {\n const lock = acquireEmbedLock(repoRoot);\n if (!lock.acquired) return;\n\n // Open DB after lock to avoid leaking connection on contention\n const { openKnowledgeDb } = await import('../storage/sqlite-knowledge/index.js');\n openKnowledgeDb(repoRoot);\n\n const start = Date.now();\n writeEmbedStatus(repoRoot, { state: 'running', startedAt: new Date().toISOString() });\n\n try {\n const result = await withEmbedding(async () => embedChunks(repoRoot, { onlyMissing: true }));\n writeEmbedStatus(repoRoot, {\n state: 'completed',\n chunksEmbedded: result.chunksEmbedded,\n completedAt: new Date().toISOString(),\n durationMs: result.durationMs,\n });\n } catch (err) {\n const msg = err instanceof Error ? err.message : 'Unknown error';\n writeEmbedStatus(repoRoot, {\n state: 'failed',\n error: msg,\n durationMs: Date.now() - start,\n });\n } finally {\n closeKnowledgeDb();\n lock.release();\n }\n}\n\n/**\n * Index docs/ and spawn background embedding if docs/ exists.\n * Shared helper for init and setup commands.\n *\n * @returns SpawnEmbedResult or null if docs/ doesn't exist\n */\nexport async function indexAndSpawnEmbed(repoRoot: string): Promise<SpawnEmbedResult | null> {\n const docsPath = join(repoRoot, 'docs');\n if (!existsSync(docsPath)) return null;\n const { indexDocs } = await import('./indexing.js');\n await indexDocs(repoRoot);\n return spawnBackgroundEmbed(repoRoot);\n}\n","/**\n * Package version - lightweight module to avoid circular dependency chains.\n */\n\nimport { createRequire } from 'node:module';\n\nconst _require = createRequire(import.meta.url);\nconst _pkg = _require('../package.json') as { version: string };\n\nexport const VERSION: string = _pkg.version;\n","/**\n * Compound Agent - Repository-scoped learning system for Claude Code\n *\n * This package helps Claude Code learn from mistakes and avoid repeating them.\n * It captures lessons during coding sessions and retrieves relevant lessons\n * when planning new work.\n *\n * ## Quick Start\n *\n * ```typescript\n * import { appendLesson, retrieveForPlan, loadSessionLessons } from 'compound-agent';\n *\n * // At session start, load high-severity lessons\n * const criticalLessons = await loadSessionLessons(repoRoot);\n *\n * // When planning, retrieve relevant lessons\n * const { lessons, message } = await retrieveForPlan(repoRoot, planText);\n *\n * // When capturing a lesson\n * await appendLesson(repoRoot, lesson);\n * ```\n *\n * ## Setup\n *\n * Run `npx ca init` in your project root to configure hooks and AGENTS.md.\n *\n * ## Resource Management\n *\n * This library manages two heavyweight resources that require cleanup:\n *\n * ### SQLite Database\n * - **Acquired:** Lazily on first database operation (search, rebuild, etc.)\n * - **Memory:** Minimal (~few KB for connection, index cached by OS)\n * - **Cleanup:** Call `closeDb()` before process exit\n *\n * ### Embedding Model\n * - **Acquired:** Lazily on first embedding call (embedText, embedTexts, searchVector)\n * - **Memory:** ~150MB RAM for the EmbeddingGemma model\n * - **Cleanup:** Call `unloadEmbedding()` before process exit\n *\n * ### Recommended Cleanup Pattern\n *\n * ```typescript\n * import { closeDb, unloadEmbedding } from 'compound-agent';\n *\n * // For CLI commands - use try/finally\n * async function main() {\n * try {\n * // ... your code that uses compound-agent\n * } finally {\n * unloadEmbedding();\n * closeDb();\n * }\n * }\n *\n * // For long-running processes - use shutdown handlers\n * process.on('SIGTERM', () => {\n * unloadEmbedding();\n * closeDb();\n * process.exit(0);\n * });\n * process.on('SIGINT', () => {\n * unloadEmbedding();\n * closeDb();\n * process.exit(0);\n * });\n * ```\n *\n * **Note:** Failing to clean up will not corrupt data, but may cause:\n * - Memory leaks in long-running processes\n * - Unclean process exits (warnings in some environments)\n *\n * @see {@link closeDb} for database cleanup\n * @see {@link unloadEmbedding} for embedding model cleanup\n * @module compound-agent\n */\n\n/** Package version, read from package.json. */\nexport { VERSION } from './version.js';\n\n// Storage API (JSONL source of truth + SQLite index)\nexport {\n appendLesson,\n appendMemoryItem,\n closeDb,\n DB_PATH,\n LESSONS_PATH,\n readLessons,\n readMemoryItems,\n rebuildIndex,\n searchKeyword,\n} from './memory/storage/index.js';\nexport type { ParseError, ReadLessonsOptions, ReadLessonsResult, ReadMemoryItemsResult } from './memory/storage/index.js';\n\n// Embeddings API\nexport {\n embedText,\n embedTexts,\n getEmbedding,\n isModelAvailable,\n isModelUsable,\n MODEL_FILENAME,\n MODEL_URI,\n resolveModel,\n unloadEmbedding,\n} from './memory/embeddings/index.js';\nexport type { UsabilityResult } from './memory/embeddings/index.js';\n\n// Search API (vector similarity + ranking + hybrid)\nexport {\n calculateScore,\n CANDIDATE_MULTIPLIER,\n confirmationBoost,\n cosineSimilarity,\n DEFAULT_TEXT_WEIGHT,\n DEFAULT_VECTOR_WEIGHT,\n mergeHybridResults,\n normalizeBm25Rank,\n rankLessons,\n recencyBoost,\n searchVector,\n severityBoost,\n} from './memory/search/index.js';\nexport type { HybridMergeOptions, RankedLesson, ScoredKeywordResult, ScoredLesson, SearchVectorOptions } from './memory/search/index.js';\n\n// Capture API (quality filters + trigger detection)\nexport {\n detectSelfCorrection,\n detectTestFailure,\n detectUserCorrection,\n isActionable,\n isNovel,\n isSpecific,\n shouldPropose,\n} from './memory/capture/index.js';\nexport type {\n ActionabilityResult,\n CorrectionSignal,\n DetectedCorrection,\n DetectedSelfCorrection,\n DetectedTestFailure,\n EditEntry,\n EditHistory,\n NoveltyOptions,\n NoveltyResult,\n ProposeResult,\n SpecificityResult,\n TestResult,\n} from './memory/capture/index.js';\n\n// Retrieval API (session + plan time)\nexport { formatLessonsCheck, loadSessionLessons, retrieveForPlan } from './memory/retrieval/index.js';\nexport type { PlanRetrievalResult } from './memory/retrieval/index.js';\n\n// Knowledge API (docs embeddings + search)\nexport {\n closeKnowledgeDb,\n collectCachedChunkEmbeddings,\n getCachedChunkEmbedding,\n KNOWLEDGE_DB_PATH,\n KNOWLEDGE_SCHEMA_VERSION,\n openKnowledgeDb,\n searchChunksKeywordScored,\n setCachedChunkEmbedding,\n} from './memory/storage/sqlite-knowledge/index.js';\nexport type { KnowledgeChunk, KnowledgeDbOptions, ScoredChunk } from './memory/storage/sqlite-knowledge/index.js';\n\nexport {\n chunkFile,\n indexDocs,\n searchKnowledge,\n searchKnowledgeVector,\n embedChunks,\n getUnembeddedChunkCount,\n acquireEmbedLock,\n isEmbedLocked,\n writeEmbedStatus,\n readEmbedStatus,\n spawnBackgroundEmbed,\n runBackgroundEmbed,\n indexAndSpawnEmbed,\n} from './memory/knowledge/index.js';\nexport type {\n IndexOptions,\n IndexResult,\n KnowledgeSearchOptions,\n EmbedChunksOptions,\n EmbedChunksResult,\n LockResult,\n EmbedStatus,\n SpawnEmbedResult,\n} from './memory/knowledge/index.js';\n\n// Context recovery API\nexport { getPrimeContext } from './commands/index.js';\n\n// Audit API\nexport { runAudit, AuditFindingSchema, AuditReportSchema } from './audit/index.js';\nexport type { AuditFinding, AuditReport, AuditOptions } from './audit/index.js';\n\n// Compound API (clustering, synthesis, pattern I/O)\nexport {\n buildSimilarityMatrix,\n CCT_PATTERNS_PATH,\n CctPatternSchema,\n clusterBySimilarity,\n readCctPatterns,\n synthesizePattern,\n writeCctPatterns,\n} from './compound/index.js';\nexport type { CctPattern, ClusterResult } from './compound/index.js';\n\n// Lint detection API\nexport { detectLinter, LinterInfoSchema, LinterNameSchema } from './lint/index.js';\nexport type { LinterInfo, LinterName } from './lint/index.js';\n\n// Types and schemas\nexport {\n generateId,\n LessonItemSchema,\n LessonSchema,\n MemoryItemRecordSchema,\n MemoryItemSchema,\n MemoryItemTypeSchema,\n PatternItemSchema,\n PreferenceItemSchema,\n SolutionItemSchema,\n} from './memory/types.js';\nexport type {\n Context,\n Lesson,\n LessonRecord,\n LessonType,\n MemoryItem,\n MemoryItemRecord,\n MemoryItemType,\n PatternItem,\n Preference,\n Severity,\n Solution,\n Source,\n} from './memory/types.js';\n","/**\n * Quality filters for lesson capture\n *\n * Filters to ensure lessons are:\n * - Novel (not duplicate)\n * - Specific (not vague)\n *\n * Actionability check is available but not part of the capture gate.\n * Strategy: capture aggressively, prune later.\n */\n\nimport { isModelAvailable } from '../embeddings/model.js';\nimport { findSimilarLessons } from '../search/index.js';\nimport { syncIfNeeded } from '../storage/index.js';\n\n/** Cosine similarity threshold for near-duplicate detection */\nconst DUPLICATE_THRESHOLD = 0.98;\n\n/** Result of novelty check */\nexport interface NoveltyResult {\n novel: boolean;\n reason?: string;\n existingId?: string;\n}\n\n/** Options for novelty check */\nexport interface NoveltyOptions {\n threshold?: number;\n}\n\n/**\n * Check if an insight is novel (not a near-duplicate of existing lessons).\n * Uses semantic embeddings with cosine similarity.\n * Falls back to novel: true when model is unavailable or on error.\n */\nexport async function isNovel(\n repoRoot: string,\n insight: string,\n options: NoveltyOptions = {}\n): Promise<NoveltyResult> {\n const threshold = options.threshold ?? DUPLICATE_THRESHOLD;\n\n if (!isModelAvailable()) {\n return { novel: true };\n }\n\n try {\n await syncIfNeeded(repoRoot);\n const similar = await findSimilarLessons(repoRoot, insight, { threshold });\n const top = similar[0];\n if (top) {\n return {\n novel: false,\n reason: `Near-duplicate of existing lesson: \"${top.item.insight.slice(0, 50)}...\"`,\n existingId: top.item.id,\n };\n }\n return { novel: true };\n } catch (err) {\n if (process.env['CA_DEBUG']) {\n process.stderr.write(`[CA_DEBUG] isNovel catch: ${err instanceof Error ? err.message : String(err)}\\n`);\n }\n return { novel: true };\n }\n}\n\n/** Minimum word count for a specific insight */\nconst MIN_WORD_COUNT = 4;\n\n/** Vague patterns that indicate non-specific advice */\nconst VAGUE_PATTERNS = [\n /\\bwrite better\\b/i,\n /\\bbe careful\\b/i,\n /\\bremember to\\b/i,\n /\\bmake sure\\b/i,\n /\\btry to\\b/i,\n /\\bdouble check\\b/i,\n];\n\n/** Generic \"always/never\" phrases (short, lacking specificity) */\nconst GENERIC_IMPERATIVE_PATTERN = /^(always|never)\\s+\\w+(\\s+\\w+){0,2}$/i;\n\n/** Result of specificity check */\nexport interface SpecificityResult {\n specific: boolean;\n reason?: string;\n}\n\n/**\n * Check if an insight is specific enough to be useful.\n * Rejects vague, generic advice that doesn't provide actionable guidance.\n */\nexport function isSpecific(insight: string): SpecificityResult {\n // Check minimum length first\n const words = insight.trim().split(/\\s+/).filter((w) => w.length > 0);\n if (words.length < MIN_WORD_COUNT) {\n return { specific: false, reason: 'Insight is too short to be actionable' };\n }\n\n // Check for vague patterns\n for (const pattern of VAGUE_PATTERNS) {\n if (pattern.test(insight)) {\n return { specific: false, reason: 'Insight matches a vague pattern' };\n }\n }\n\n // Check for generic \"Always X\" or \"Never X\" phrases\n if (GENERIC_IMPERATIVE_PATTERN.test(insight)) {\n return { specific: false, reason: 'Insight matches a vague pattern' };\n }\n\n return { specific: true };\n}\n\n/** Action word patterns that indicate actionable guidance */\nconst ACTION_PATTERNS = [\n /\\buse\\s+.+\\s+instead\\s+of\\b/i, // \"use X instead of Y\"\n /\\bprefer\\s+.+\\s+(over|to)\\b/i, // \"prefer X over Y\" or \"prefer X to Y\"\n /\\balways\\s+.+\\s+when\\b/i, // \"always X when Y\"\n /\\bnever\\s+.+\\s+without\\b/i, // \"never X without Y\"\n /\\bavoid\\s+(using\\s+)?\\w+/i, // \"avoid X\" or \"avoid using X\"\n /\\bcheck\\s+.+\\s+before\\b/i, // \"check X before Y\"\n /^(run|use|add|remove|install|update|configure|set|enable|disable)\\s+/i, // Imperative commands at start\n];\n\n/** Result of actionability check */\nexport interface ActionabilityResult {\n actionable: boolean;\n reason?: string;\n}\n\n/**\n * Check if an insight contains actionable guidance.\n * Returns false for pure observations or questions.\n */\nexport function isActionable(insight: string): ActionabilityResult {\n // Check for action patterns\n for (const pattern of ACTION_PATTERNS) {\n if (pattern.test(insight)) {\n return { actionable: true };\n }\n }\n\n return { actionable: false, reason: 'Insight lacks clear action guidance' };\n}\n\n/** Result of combined quality check */\nexport interface ProposeResult {\n shouldPropose: boolean;\n reason?: string;\n}\n\n/**\n * Combined quality check for lesson proposals.\n * Returns true only if insight is novel AND specific.\n * Actionability gate removed: capture aggressively, prune later.\n */\nexport async function shouldPropose(\n repoRoot: string,\n insight: string\n): Promise<ProposeResult> {\n // Check specificity first (fast, no DB)\n const specificResult = isSpecific(insight);\n if (!specificResult.specific) {\n return { shouldPropose: false, reason: specificResult.reason };\n }\n\n // Check novelty (requires DB lookup)\n const noveltyResult = await isNovel(repoRoot, insight);\n if (!noveltyResult.novel) {\n return { shouldPropose: false, reason: noveltyResult.reason };\n }\n\n return { shouldPropose: true };\n}\n","/**\n * Trigger detection for automatic memory capture\n *\n * Detects patterns that indicate potential learning opportunities:\n * - User corrections\n * - Self-corrections\n * - Test failures\n *\n * Also infers memory item type from insight text:\n * - pattern: \"use X instead of Y\", \"prefer X over Y\"\n * - solution: \"when X, do Y\", \"if X then Y\", \"to fix X\"\n * - preference: \"always X\", \"never X\"\n * - lesson: default for unclassified insights\n */\n\nimport type { Context, MemoryItemType } from '../types.js';\n\n/** Signal data for correction detection */\nexport interface CorrectionSignal {\n messages: string[];\n context: Context;\n}\n\n/** Detected correction result */\nexport interface DetectedCorrection {\n trigger: string;\n correctionMessage: string;\n context: Context;\n}\n\n/** User correction patterns */\nconst USER_CORRECTION_PATTERNS = [\n /\\bno\\b[,.]?\\s/i, // \"no, ...\" or \"no ...\"\n /\\bwrong\\b/i, // \"wrong\"\n /\\bactually\\b/i, // \"actually...\"\n /\\bnot that\\b/i, // \"not that\"\n /\\bi meant\\b/i, // \"I meant\"\n];\n\n/**\n * Detect user correction signals in conversation.\n *\n * Looks for patterns that indicate the user is correcting Claude's\n * understanding or actions.\n *\n * @param signals - Messages and context to analyze\n * @returns Detected correction or null if none found\n */\nexport function detectUserCorrection(signals: CorrectionSignal): DetectedCorrection | null {\n const { messages, context } = signals;\n\n if (messages.length < 2) {\n return null;\n }\n\n // Check later messages for correction patterns\n for (let i = 1; i < messages.length; i++) {\n const message = messages[i];\n if (!message) continue;\n\n for (const pattern of USER_CORRECTION_PATTERNS) {\n if (pattern.test(message)) {\n return {\n trigger: `User correction during ${context.intent}`,\n correctionMessage: message,\n context,\n };\n }\n }\n }\n\n return null;\n}\n\n/** Edit history entry */\nexport interface EditEntry {\n file: string;\n success: boolean;\n timestamp: number;\n}\n\n/** Edit history for self-correction detection */\nexport interface EditHistory {\n edits: EditEntry[];\n}\n\n/** Detected self-correction */\nexport interface DetectedSelfCorrection {\n file: string;\n trigger: string;\n}\n\n/**\n * Detect self-correction patterns in edit history.\n *\n * Looks for edit→fail→re-edit patterns on the same file,\n * which indicate Claude had to correct its own work.\n *\n * @param history - Edit history to analyze\n * @returns Detected self-correction or null if none found\n */\nexport function detectSelfCorrection(history: EditHistory): DetectedSelfCorrection | null {\n const { edits } = history;\n\n if (edits.length < 3) {\n return null;\n }\n\n // Look for edit→fail→re-edit pattern on same file\n for (let i = 0; i <= edits.length - 3; i++) {\n const first = edits[i];\n const second = edits[i + 1];\n const third = edits[i + 2];\n\n if (!first || !second || !third) continue;\n\n // Pattern: success → fail → success on same file\n if (\n first.file === second.file &&\n second.file === third.file &&\n first.success &&\n !second.success &&\n third.success\n ) {\n return {\n file: first.file,\n trigger: `Self-correction on ${first.file}`,\n };\n }\n }\n\n return null;\n}\n\n/** Test result for failure detection */\nexport interface TestResult {\n passed: boolean;\n output: string;\n testFile: string;\n}\n\n/** Detected test failure */\nexport interface DetectedTestFailure {\n testFile: string;\n errorOutput: string;\n trigger: string;\n}\n\n/**\n * Detect test failure patterns.\n *\n * When tests fail, this creates a potential learning opportunity\n * if the failure is later fixed.\n *\n * @param testResult - Test result to analyze\n * @returns Detected test failure or null if tests passed\n */\nexport function detectTestFailure(testResult: TestResult): DetectedTestFailure | null {\n if (testResult.passed) {\n return null;\n }\n\n // Extract first meaningful error line for trigger\n const lines = testResult.output.split('\\n').filter((line) => line.trim().length > 0);\n const errorLine = lines.find((line) => /error|fail|assert/i.test(line)) ?? lines[0] ?? '';\n\n return {\n testFile: testResult.testFile,\n errorOutput: testResult.output,\n trigger: `Test failure in ${testResult.testFile}: ${errorLine.slice(0, 100)}`,\n };\n}\n\n/** Patterns indicating a code pattern (bad -> good transformation) */\nconst PATTERN_INDICATORS = [\n /\\buse\\s+.+\\s+instead\\s+of\\b/i,\n /\\bprefer\\s+.+\\s+(over|to)\\b/i,\n];\n\n/** Patterns indicating a solution (problem -> resolution) */\nconst SOLUTION_INDICATORS = [\n /\\bwhen\\s+.+,\\s/i,\n /\\bif\\s+.+\\bthen\\b/i,\n /\\bif\\s+.+,\\s/i,\n /\\bto\\s+fix\\b/i,\n];\n\n/** Patterns indicating a preference (user workflow choice) */\nconst PREFERENCE_INDICATORS = [\n /\\balways\\s+/i,\n /\\bnever\\s+/i,\n];\n\n/**\n * Infer the memory item type from insight text.\n *\n * Rules (checked in priority order):\n * - \"use X instead of Y\" / \"prefer X over Y\" → pattern\n * - \"when X, do Y\" / \"if X then Y\" / \"to fix X\" → solution\n * - \"always X\" / \"never X\" → preference\n * - Default → lesson\n *\n * @param insight - The insight text to classify\n * @returns The inferred memory item type\n */\nexport function inferMemoryItemType(insight: string): MemoryItemType {\n for (const pattern of PATTERN_INDICATORS) {\n if (pattern.test(insight)) return 'pattern';\n }\n\n for (const pattern of SOLUTION_INDICATORS) {\n if (pattern.test(insight)) return 'solution';\n }\n\n for (const pattern of PREFERENCE_INDICATORS) {\n if (pattern.test(insight)) return 'preference';\n }\n\n return 'lesson';\n}\n","/**\n * Trigger detection integration\n *\n * Orchestrates detection -> quality filter -> memory item proposal flow.\n * Infers memory item type from insight content.\n * Provides a high-level API for CLI and hooks.\n */\n\nimport * as fs from 'node:fs/promises';\n\nimport { z } from 'zod';\n\nimport { ContextSchema } from '../types.js';\nimport type { MemoryItemType, Source } from '../types.js';\nimport { shouldPropose } from './quality.js';\nimport {\n detectUserCorrection,\n detectSelfCorrection,\n detectTestFailure,\n inferMemoryItemType,\n} from './triggers.js';\nimport type {\n CorrectionSignal,\n EditHistory,\n TestResult,\n} from './triggers.js';\n\n/** Detection input types */\nexport type DetectionType = 'user' | 'self' | 'test';\n\n/** Input for user correction detection */\nexport interface UserDetectionInput {\n type: 'user';\n data: CorrectionSignal;\n}\n\n/** Input for self correction detection */\nexport interface SelfDetectionInput {\n type: 'self';\n data: EditHistory;\n}\n\n/** Input for test failure detection */\nexport interface TestDetectionInput {\n type: 'test';\n data: TestResult;\n}\n\n/** Union type for all detection inputs */\nexport type DetectionInput = UserDetectionInput | SelfDetectionInput | TestDetectionInput;\n\n/** Result of successful detection */\nexport interface DetectionResult {\n trigger: string;\n source: Source;\n proposedInsight: string;\n memoryItemType: MemoryItemType;\n}\n\n/**\n * Detect triggers and propose lessons.\n *\n * Runs the appropriate detector based on input type, then filters\n * through quality checks. Returns a proposal if detection passes\n * all quality filters.\n *\n * @param repoRoot - Repository root path\n * @param input - Detection input with type and data\n * @returns Detection result with proposed insight, or null\n */\nexport async function detectAndPropose(\n repoRoot: string,\n input: DetectionInput\n): Promise<DetectionResult | null> {\n const detected = runDetector(input);\n if (!detected) {\n return null;\n }\n\n const { trigger, source, proposedInsight } = detected;\n\n // Run quality filters on proposed insight\n const quality = await shouldPropose(repoRoot, proposedInsight);\n if (!quality.shouldPropose) {\n return null;\n }\n\n // Infer memory item type from insight content\n const memoryItemType = inferMemoryItemType(proposedInsight);\n\n return { trigger, source, proposedInsight, memoryItemType };\n}\n\n/** Internal detection result before quality filtering */\ninterface RawDetection {\n trigger: string;\n source: Source;\n proposedInsight: string;\n}\n\n/**\n * Run the appropriate detector based on input type.\n */\nfunction runDetector(input: DetectionInput): RawDetection | null {\n switch (input.type) {\n case 'user':\n return detectUserCorrectionFlow(input.data);\n case 'self':\n return detectSelfCorrectionFlow(input.data);\n case 'test':\n return detectTestFailureFlow(input.data);\n }\n}\n\n/**\n * Detect user correction and extract insight.\n */\nfunction detectUserCorrectionFlow(data: CorrectionSignal): RawDetection | null {\n const result = detectUserCorrection(data);\n if (!result) {\n return null;\n }\n\n return {\n trigger: result.trigger,\n source: 'user_correction',\n proposedInsight: result.correctionMessage,\n };\n}\n\n/**\n * Detect self correction and extract insight.\n */\nfunction detectSelfCorrectionFlow(data: EditHistory): RawDetection | null {\n const result = detectSelfCorrection(data);\n if (!result) {\n return null;\n }\n\n return {\n trigger: result.trigger,\n source: 'self_correction',\n // Self-corrections need context to form useful insights\n proposedInsight: `Check ${result.file} for common errors before editing`,\n };\n}\n\n/**\n * Detect test failure and extract insight.\n */\nfunction detectTestFailureFlow(data: TestResult): RawDetection | null {\n const result = detectTestFailure(data);\n if (!result) {\n return null;\n }\n\n return {\n trigger: result.trigger,\n source: 'test_failure',\n proposedInsight: result.errorOutput,\n };\n}\n\n/** Zod schema for CorrectionSignal */\nconst CorrectionSignalSchema = z.object({\n messages: z.array(z.string()),\n context: ContextSchema,\n});\n\n/** Zod schema for EditEntry */\nconst EditEntrySchema = z.object({\n file: z.string(),\n success: z.boolean(),\n timestamp: z.number(),\n});\n\n/** Zod schema for EditHistory */\nconst EditHistorySchema = z.object({\n edits: z.array(EditEntrySchema),\n});\n\n/** Zod schema for TestResult */\nconst TestResultSchema = z.object({\n passed: z.boolean(),\n output: z.string(),\n testFile: z.string(),\n});\n\n/** Zod discriminated union for DetectionInput */\nconst DetectionInputSchema = z.discriminatedUnion('type', [\n z.object({ type: z.literal('user'), data: CorrectionSignalSchema }),\n z.object({ type: z.literal('self'), data: EditHistorySchema }),\n z.object({ type: z.literal('test'), data: TestResultSchema }),\n]);\n\n/**\n * Parse detection input from a JSON file.\n *\n * @param filePath - Path to JSON input file\n * @returns Parsed detection input\n * @throws ZodError if file content doesn't match expected schema\n */\nexport async function parseInputFile(filePath: string): Promise<DetectionInput> {\n const content = await fs.readFile(filePath, 'utf-8');\n const data: unknown = JSON.parse(content);\n return DetectionInputSchema.parse(data);\n}\n","/**\n * Session-start lesson retrieval\n *\n * Loads high-severity lessons at the start of a session.\n * No vector search - just filter by severity and recency.\n */\n\nimport { incrementRetrievalCount, readMemoryItems } from '../storage/index.js';\nimport type { MemoryItem, Severity } from '../types.js';\n\n/** Default number of lessons to load at session start */\nconst DEFAULT_LIMIT = 5;\n\n/** A memory item with severity field present */\ntype LessonWithSeverity = MemoryItem & { severity: Severity };\n\n/**\n * Type guard to check if a memory item has severity set\n */\nfunction hasSeverity(item: MemoryItem): item is MemoryItem & { severity: Severity } {\n return item.severity !== undefined;\n}\n\n/**\n * Load high-severity lessons for session start.\n *\n * Returns confirmed, high-severity lessons sorted by recency.\n * These are the most important lessons to surface at the start\n * of a coding session.\n *\n * @param repoRoot - Repository root directory\n * @param limit - Maximum number of lessons to return (default: 5)\n * @returns Array of high-severity lessons, most recent first\n */\nexport async function loadSessionLessons(\n repoRoot: string,\n limit: number = DEFAULT_LIMIT\n): Promise<LessonWithSeverity[]> {\n const { items } = await readMemoryItems(repoRoot);\n\n // Filter for high-severity, confirmed items of any type (excluding invalidated)\n const highSeverityLessons = items.filter(\n (item): item is MemoryItem & { severity: Severity } =>\n hasSeverity(item) &&\n item.severity === 'high' &&\n item.confirmed &&\n !item.invalidatedAt\n );\n\n // Sort by recency (most recent first)\n highSeverityLessons.sort((a, b) => {\n const dateA = new Date(a.created).getTime();\n const dateB = new Date(b.created).getTime();\n return dateB - dateA;\n });\n\n // Return top N and track surfaced lessons as retrieved.\n const topLessons = highSeverityLessons.slice(0, limit);\n if (topLessons.length > 0) {\n incrementRetrievalCount(repoRoot, topLessons.map((lesson) => lesson.id));\n }\n\n return topLessons;\n}\n","/**\n * Plan-time lesson retrieval\n *\n * Retrieves relevant lessons when planning an implementation.\n * Uses vector search to find semantically similar lessons.\n */\n\nimport { CANDIDATE_MULTIPLIER, DEFAULT_TEXT_WEIGHT, MIN_HYBRID_SCORE, mergeHybridResults, rankLessons, searchVector, type RankedLesson, type ScoredLesson } from '../search/index.js';\nimport { incrementRetrievalCount, searchKeywordScored } from '../storage/index.js';\n\n/** Default number of lessons to retrieve */\nconst DEFAULT_LIMIT = 5;\n\n/** Result of plan-time retrieval */\nexport interface PlanRetrievalResult {\n lessons: RankedLesson[];\n message: string;\n}\n\n/**\n * Retrieve relevant lessons for a plan.\n *\n * Uses hybrid search (vector similarity + FTS5 keyword matching)\n * then applies ranking boosts for severity, recency, and confirmation.\n *\n * Falls back to keyword-only search when the embedding model is unavailable.\n *\n * @param repoRoot - Repository root directory\n * @param planText - The plan text to search against\n * @param limit - Maximum number of lessons to return (default: 5)\n * @returns Ranked lessons and formatted message\n */\nexport async function retrieveForPlan(\n repoRoot: string,\n planText: string,\n limit: number = DEFAULT_LIMIT\n): Promise<PlanRetrievalResult> {\n const candidateLimit = limit * CANDIDATE_MULTIPLIER;\n\n // Attempt hybrid search: vector similarity + keyword matching.\n // If vector search fails (model unavailable/broken), fall back to keyword-only.\n let vectorResults: ScoredLesson[] = [];\n let vectorFailed = false;\n const keywordResultsPromise = searchKeywordScored(repoRoot, planText, candidateLimit);\n\n try {\n vectorResults = await searchVector(repoRoot, planText, { limit: candidateLimit });\n } catch {\n vectorFailed = true;\n console.error('[compound-agent] Vector search unavailable, falling back to keyword-only search');\n }\n\n const keywordResults = await keywordResultsPromise;\n\n let merged: ScoredLesson[];\n if (vectorFailed) {\n // Keyword-only: use text scores directly (no vector blending, no minScore filter\n // since keyword-only scores are lower than hybrid blended scores)\n merged = mergeHybridResults([], keywordResults, {\n vectorWeight: 0,\n textWeight: DEFAULT_TEXT_WEIGHT,\n });\n } else {\n merged = mergeHybridResults(vectorResults, keywordResults, { minScore: MIN_HYBRID_SCORE });\n }\n\n // Apply ranking boosts (severity, recency, confirmation)\n const ranked = rankLessons(merged);\n\n // Take top N after ranking\n const topLessons = ranked.slice(0, limit);\n\n // Track actual plan-time retrieval usage only for surfaced lessons.\n if (topLessons.length > 0) {\n incrementRetrievalCount(repoRoot, topLessons.map((item) => item.lesson.id));\n }\n\n // Format the Lessons Check message\n const message = formatLessonsCheck(topLessons);\n\n return { lessons: topLessons, message };\n}\n\n/**\n * Format a \"Lessons Check\" message for display.\n *\n * This message is intended to be shown at plan-time to remind\n * the developer of relevant lessons before implementation.\n *\n * @param lessons - Ranked lessons to include in the message\n * @returns Formatted message string\n */\nexport function formatLessonsCheck(lessons: ScoredLesson[]): string {\n const header = 'Lessons Check\\n' + '─'.repeat(40);\n\n if (lessons.length === 0) {\n return `${header}\\nNo relevant lessons found for this plan.`;\n }\n\n const lessonLines = lessons.map((l, i) => {\n const bullet = `${i + 1}.`;\n const insight = l.lesson.insight;\n return `${bullet} ${insight}`;\n });\n\n return `${header}\\n${lessonLines.join('\\n')}`;\n}\n","export { chunkFile } from './chunking.js';\nexport {\n chunkContentHash,\n generateChunkId,\n SUPPORTED_EXTENSIONS,\n} from './types.js';\nexport type { Chunk, ChunkOptions } from './types.js';\n\nexport { indexDocs } from './indexing.js';\nexport type { IndexOptions, IndexResult } from './indexing.js';\n\nexport { searchKnowledge, searchKnowledgeVector } from './search.js';\nexport type { KnowledgeSearchOptions } from './search.js';\n\nexport { embedChunks, getUnembeddedChunkCount } from './embed-chunks.js';\nexport type { EmbedChunksOptions, EmbedChunksResult } from './embed-chunks.js';\n\nexport { acquireEmbedLock, isEmbedLocked } from './embed-lock.js';\nexport type { LockResult } from './embed-lock.js';\n\nexport { writeEmbedStatus, readEmbedStatus } from './embed-status.js';\nexport type { EmbedStatus } from './embed-status.js';\n\nexport { spawnBackgroundEmbed, runBackgroundEmbed, indexAndSpawnEmbed } from './embed-background.js';\nexport type { SpawnEmbedResult } from './embed-background.js';\n","/**\n * Knowledge chunk search: vector similarity and hybrid (vector + FTS5).\n */\n\nimport type { KnowledgeChunk } from '../storage/sqlite-knowledge/types.js';\nimport type { GenericScoredItem } from '../search/hybrid.js';\nimport { openKnowledgeDb } from '../storage/sqlite-knowledge/connection.js';\nimport { searchChunksKeywordScored } from '../storage/sqlite-knowledge/search.js';\nimport { embedText } from '../embeddings/nomic.js';\nimport { cosineSimilarity } from '../search/vector.js';\nimport { mergeHybridScores, CANDIDATE_MULTIPLIER, MIN_HYBRID_SCORE } from '../search/hybrid.js';\nimport { isModelUsable } from '../embeddings/model.js';\n\nexport interface KnowledgeSearchOptions {\n limit?: number;\n}\n\nconst DEFAULT_KNOWLEDGE_LIMIT = 6;\n\n/** Lightweight row for phase-1 similarity scoring (no text payload) */\ninterface EmbeddingRow {\n id: string;\n embedding: Buffer;\n}\n\n/** Full row for phase-2 hydration of top-k results */\ninterface ChunkDataRow {\n id: string;\n file_path: string;\n start_line: number;\n end_line: number;\n content_hash: string;\n text: string;\n model: string | null;\n updated_at: string;\n}\n\n/**\n * Vector search over knowledge chunks (two-phase for memory efficiency).\n *\n * Phase 1: Load only IDs + embeddings, compute similarity, select top-k.\n * Phase 2: Hydrate full chunk data for top-k results only.\n */\nexport async function searchKnowledgeVector(\n repoRoot: string,\n query: string,\n options?: KnowledgeSearchOptions\n): Promise<GenericScoredItem<KnowledgeChunk>[]> {\n const limit = options?.limit ?? DEFAULT_KNOWLEDGE_LIMIT;\n const database = openKnowledgeDb(repoRoot);\n\n // Phase 1: IDs + embeddings only (avoids loading all text into memory)\n const embRows = database\n .prepare('SELECT id, embedding FROM chunks WHERE embedding IS NOT NULL')\n .all() as EmbeddingRow[];\n\n if (embRows.length === 0) return [];\n\n const queryVector = await embedText(query);\n\n const scored: { id: string; score: number }[] = [];\n for (const row of embRows) {\n const embFloat = new Float32Array(\n row.embedding.buffer,\n row.embedding.byteOffset,\n row.embedding.byteLength / 4\n );\n scored.push({ id: row.id, score: cosineSimilarity(queryVector, embFloat) });\n }\n\n scored.sort((a, b) => b.score - a.score);\n const topK = scored.slice(0, limit);\n if (topK.length === 0) return [];\n\n // Phase 2: Hydrate full data for top-k only\n // Safe: placeholders is a string of '?' characters, not user input\n const placeholders = topK.map(() => '?').join(',');\n const sql = `SELECT id, file_path, start_line, end_line, content_hash, text, model, updated_at FROM chunks WHERE id IN (${placeholders})`;\n const dataRows = database\n .prepare(sql)\n .all(...topK.map((r) => r.id)) as ChunkDataRow[];\n\n const dataMap = new Map(dataRows.map((r) => [r.id, r]));\n const results: GenericScoredItem<KnowledgeChunk>[] = [];\n\n for (const { id, score } of topK) {\n const row = dataMap.get(id);\n if (!row) continue;\n const chunk: KnowledgeChunk = {\n id: row.id,\n filePath: row.file_path,\n startLine: row.start_line,\n endLine: row.end_line,\n contentHash: row.content_hash,\n text: row.text,\n updatedAt: row.updated_at,\n };\n if (row.model !== null) {\n chunk.model = row.model;\n }\n results.push({ item: chunk, score });\n }\n\n return results;\n}\n\n/**\n * Hybrid search combining vector + FTS5 keyword on knowledge.sqlite.\n *\n * When embedding model is usable: parallel vector + keyword search, merged.\n * When model unavailable: FTS5-only fallback.\n */\nexport async function searchKnowledge(\n repoRoot: string,\n query: string,\n options?: KnowledgeSearchOptions\n): Promise<GenericScoredItem<KnowledgeChunk>[]> {\n const limit = options?.limit ?? DEFAULT_KNOWLEDGE_LIMIT;\n const candidateLimit = limit * CANDIDATE_MULTIPLIER;\n\n const usability = await isModelUsable();\n\n if (usability.usable) {\n // Hybrid: parallel vector + keyword\n const [vectorResults, keywordResults] = await Promise.all([\n searchKnowledgeVector(repoRoot, query, { limit: candidateLimit }),\n Promise.resolve(searchChunksKeywordScored(repoRoot, query, candidateLimit)),\n ]);\n\n // When no embeddings stored, vector results are empty and hybrid merge\n // would suppress keyword-only results below MIN_HYBRID_SCORE. Fall back\n // to keyword results directly.\n if (vectorResults.length === 0) {\n return keywordResults\n .map((k) => ({ item: k.chunk, score: k.score }))\n .slice(0, limit);\n }\n\n const genericKw: GenericScoredItem<KnowledgeChunk>[] = keywordResults.map((k) => ({\n item: k.chunk,\n score: k.score,\n }));\n\n const merged = mergeHybridScores(\n vectorResults,\n genericKw,\n (item) => item.id,\n { limit, minScore: MIN_HYBRID_SCORE }\n );\n\n return merged;\n }\n\n // FTS-only fallback\n const keywordResults = searchChunksKeywordScored(repoRoot, query, limit);\n return keywordResults.map((k) => ({ item: k.chunk, score: k.score }));\n}\n","/**\n * CLI utility functions.\n *\n * Pure functions extracted from cli.ts for testability.\n */\n\n/**\n * Format bytes to human-readable string.\n *\n * @param bytes - Number of bytes\n * @returns Formatted string (e.g., \"1.5 KB\", \"2.0 MB\")\n */\nexport function formatBytes(bytes: number): string {\n if (bytes === 0) return '0 B';\n if (bytes < 1024) return `${bytes} B`;\n const kb = bytes / 1024;\n if (kb < 1024) return `${kb.toFixed(1)} KB`;\n const mb = kb / 1024;\n return `${mb.toFixed(1)} MB`;\n}\n\n/**\n * Parse limit option and validate it's a positive integer.\n *\n * @param value - String value from command option\n * @param name - Option name for error message\n * @returns Parsed integer\n * @throws Error if value is not a valid positive integer\n */\nexport function parseLimit(value: string, name: string): number {\n const parsed = parseInt(value, 10);\n if (Number.isNaN(parsed) || parsed <= 0) {\n throw new Error(`Invalid ${name}: must be a positive integer`);\n }\n return parsed;\n}\n\n/**\n * Get repository root from environment variable or current directory.\n *\n * @returns Repository root path for lesson storage\n */\nexport function getRepoRoot(): string {\n return process.env['COMPOUND_AGENT_ROOT'] || process.cwd();\n}\n\n// ============================================================================\n// Beads shared utilities\n// ============================================================================\n\n/** Extract the short suffix from a full beads ID (e.g., 'learning_agent-b8c' → 'b8c'). */\nexport function shortId(fullId: string): string {\n return fullId.replace(/^[^-]+-/, '');\n}\n\n/** Strict pattern for valid beads epic/task IDs. */\nexport const EPIC_ID_PATTERN = /^[a-zA-Z0-9_-]+$/;\n\n/** Validate an epic ID, throwing if invalid. */\nexport function validateEpicId(epicId: string): void {\n if (!EPIC_ID_PATTERN.test(epicId)) {\n throw new Error(`Invalid epic ID: \"${epicId}\" (must be alphanumeric with hyphens/underscores)`);\n }\n}\n\nexport interface BeadsDep {\n id: string;\n title: string;\n status: string;\n}\n\n/** Parse dependencies from `bd show --json` output. */\nexport function parseBdShowDeps(raw: string): BeadsDep[] {\n const data = JSON.parse(raw);\n const issue = Array.isArray(data) ? data[0] : data;\n if (!issue) return [];\n const depsArray = issue.depends_on ?? issue.dependencies ?? [];\n return depsArray.map((dep: { id?: string; title?: string; status?: string }) => ({\n id: dep.id ?? '',\n title: dep.title ?? '',\n status: dep.status ?? 'open',\n }));\n}\n","/**\n * One-shot setup command - Configure everything for compound-agent.\n *\n * Combines: init + Claude hooks + optionally model download.\n */\n\nimport { existsSync } from 'node:fs';\nimport { mkdir, readFile, rm, writeFile } from 'node:fs/promises';\nimport { dirname, join } from 'node:path';\nimport type { Command } from 'commander';\n\nimport { getRepoRoot } from '../cli-utils.js';\nimport { isModelAvailable, resolveModel } from '../memory/embeddings/index.js';\nimport { LESSONS_PATH } from '../memory/storage/index.js';\nimport { getGlobalOpts, out } from '../commands/index.js';\nimport { playInstallBanner } from './banner.js';\nimport { checkBeadsAvailable, runFullBeadsCheck, type BeadsCheckResult } from './beads-check.js';\nimport { printBeadsFullStatus, printGitignoreStatus, printPnpmConfigStatus, printScopeStatus, printSetupGitHooksStatus, printSqliteStatus, runStatus } from './display-utils.js';\nimport {\n addAllCompoundAgentHooks,\n getClaudeSettingsPath,\n hasAllCompoundAgentHooks,\n readClaudeSettings,\n writeClaudeSettings,\n} from './claude-helpers.js';\nimport { ensureGitignore, type GitignoreResult } from './gitignore.js';\nimport { installGeminiAdapter } from './gemini.js';\nimport { installPreCommitHook, installPostCommitHook, type HookInstallResult } from './hooks.js';\nimport {\n createPluginManifest,\n ensureClaudeMdReference,\n ensurePnpmBuildConfig,\n GENERATED_MARKER,\n installAgentRoleSkills,\n installAgentTemplates,\n installDocTemplates,\n installPhaseSkills,\n installResearchDocs,\n installWorkflowCommands,\n updateAgentsMd,\n verifySqlite,\n type PnpmConfigResult,\n type SqliteVerifyResult,\n} from './primitives.js';\nimport { checkUserScope, type ScopeCheckResult } from './scope-check.js';\nimport { LEGACY_ROOT_SLASH_COMMANDS } from './templates.js';\nimport { AGENT_TEMPLATES, AGENT_ROLE_SKILLS, DOC_TEMPLATES, WORKFLOW_COMMANDS, PHASE_SKILLS } from './templates/index.js';\nimport { VERSION } from '../version.js';\nimport { runUninstall } from './uninstall.js';\nimport { runUpgrade, detectExistingInstall, type UpgradeResult } from './upgrade.js';\n\n/** Result of one-shot setup */\ninterface SetupResult {\n lessonsDir: string;\n agentsMd: boolean;\n hooks: boolean;\n gitHooks: HookInstallResult['status'] | 'skipped';\n postCommitHook: HookInstallResult['status'] | 'skipped';\n model: 'downloaded' | 'already_exists' | 'failed' | 'skipped';\n pnpmConfig: PnpmConfigResult;\n sqlite: SqliteVerifyResult;\n beads: BeadsCheckResult;\n scope: ScopeCheckResult;\n upgrade: UpgradeResult | null;\n gitignore: GitignoreResult;\n}\n\n/**\n * Ensure lessons directory and index file exist.\n */\nasync function ensureLessonsDirectory(repoRoot: string): Promise<string> {\n const lessonsDir = dirname(join(repoRoot, LESSONS_PATH));\n await mkdir(lessonsDir, { recursive: true });\n\n const indexPath = join(repoRoot, LESSONS_PATH);\n if (!existsSync(indexPath)) {\n await writeFile(indexPath, '', 'utf-8');\n }\n\n return lessonsDir;\n}\n\n/**\n * Configure Claude Code settings: hooks in settings.json.\n */\nasync function configureClaudeSettings(): Promise<{ hooks: boolean }> {\n const settingsPath = getClaudeSettingsPath(false);\n let settings: Record<string, unknown>;\n try {\n settings = await readClaudeSettings(settingsPath);\n } catch {\n // File exists but has malformed JSON — warn and skip to avoid data loss\n console.error(`Warning: Could not parse ${settingsPath} — skipping hook installation.\\nFix the JSON syntax and re-run setup.`);\n return { hooks: false };\n }\n\n const hadHooks = hasAllCompoundAgentHooks(settings);\n addAllCompoundAgentHooks(settings);\n await writeClaudeSettings(settingsPath, settings);\n\n return {\n hooks: !hadHooks,\n };\n}\n\n/**\n * Run one-shot setup.\n */\nexport async function runSetup(options: { skipModel?: boolean; skipHooks?: boolean }): Promise<SetupResult> {\n const repoRoot = getRepoRoot();\n\n // Pre-flight checks\n const scope = checkUserScope(repoRoot);\n const beads = checkBeadsAvailable();\n\n // Upgrade detection\n let upgrade: UpgradeResult | null = null;\n if (detectExistingInstall(repoRoot)) {\n upgrade = await runUpgrade(repoRoot);\n }\n\n // 0. Ensure pnpm native build config (before anything that needs native addons)\n const pnpmConfig = await ensurePnpmBuildConfig(repoRoot);\n\n // 0b. Verify SQLite loads (auto-rebuild if needed for pnpm)\n const sqlite = verifySqlite(repoRoot, pnpmConfig);\n\n // 1. Initialize lessons directory\n const lessonsDir = await ensureLessonsDirectory(repoRoot);\n\n // 2. Update AGENTS.md\n const agentsMdUpdated = await updateAgentsMd(repoRoot);\n\n // 3. Ensure CLAUDE.md reference\n await ensureClaudeMdReference(repoRoot);\n\n // 4. Create plugin manifest\n await createPluginManifest(repoRoot);\n\n // 5. Install agent templates\n await installAgentTemplates(repoRoot);\n\n // 6. Install workflow commands (includes utility commands)\n await installWorkflowCommands(repoRoot);\n\n // 7. Install phase skills\n await installPhaseSkills(repoRoot);\n\n // 8. Install agent role skills\n await installAgentRoleSkills(repoRoot);\n\n // 9. Install documentation templates\n await installDocTemplates(repoRoot);\n\n // 9b. Install research docs\n await installResearchDocs(repoRoot);\n\n // 9c. Clean deprecated paths from prior versions\n await cleanDeprecatedPaths(repoRoot, false);\n\n // 10. Install pre-commit git hook\n let gitHooks: HookInstallResult['status'] | 'skipped' = 'skipped';\n if (!options.skipHooks) {\n gitHooks = (await installPreCommitHook(repoRoot)).status;\n }\n\n // 10b. Install post-commit git hook (auto-indexes docs/)\n let postCommitHook: HookInstallResult['status'] | 'skipped' = 'skipped';\n if (!options.skipHooks) {\n postCommitHook = (await installPostCommitHook(repoRoot)).status;\n }\n\n // 11. Configure Claude settings (hooks in settings.json)\n const { hooks } = await configureClaudeSettings();\n\n // 11b. Install Gemini CLI compatibility hooks\n await installGeminiAdapter({ dryRun: false, json: true });\n\n // 12. Ensure .gitignore has required patterns\n const gitignore = await ensureGitignore(repoRoot);\n\n // 13. Download model (unless skipped)\n let modelStatus: 'downloaded' | 'already_exists' | 'failed' | 'skipped' = 'skipped';\n if (!options.skipModel) {\n try {\n const alreadyExisted = isModelAvailable();\n if (!alreadyExisted) {\n await resolveModel({ cli: false });\n modelStatus = 'downloaded';\n } else {\n modelStatus = 'already_exists';\n }\n } catch {\n modelStatus = 'failed';\n }\n }\n\n // 14. Trigger background embedding if docs/ exists and model available\n if (modelStatus === 'downloaded' || modelStatus === 'already_exists') {\n try {\n const { indexAndSpawnEmbed } = await import('../memory/knowledge/embed-background.js');\n const spawnResult = await indexAndSpawnEmbed(repoRoot);\n if (spawnResult?.spawned) {\n out.info('Embedding in progress (background). You can start working.');\n }\n } catch {\n // Non-fatal: don't break setup if background embedding fails to spawn\n }\n }\n\n return {\n lessonsDir,\n agentsMd: agentsMdUpdated,\n hooks,\n gitHooks,\n postCommitHook,\n model: modelStatus,\n pnpmConfig,\n sqlite,\n beads,\n scope,\n upgrade,\n gitignore,\n };\n}\n\n\n/** Paths deprecated since v1.5 (worktree feature removed — superseded by Claude Code native worktrees). */\nconst DEPRECATED_PATHS = [\n ['.claude', 'skills', 'compound', 'set-worktree'],\n ['.claude', 'commands', 'compound', 'set-worktree.md'],\n ['.gemini', 'skills', 'compound-set-worktree'],\n ['.gemini', 'commands', 'compound', 'set-worktree.toml'],\n];\n\nasync function cleanDeprecatedPaths(repoRoot: string, dryRun: boolean): Promise<number> {\n let count = 0;\n for (const segments of DEPRECATED_PATHS) {\n const fullPath = join(repoRoot, ...segments);\n if (existsSync(fullPath)) {\n const rel = segments.join('/');\n if (!dryRun) {\n await rm(fullPath, { recursive: true, force: true });\n console.log(` Removed deprecated: ${rel}`);\n } else {\n console.log(` [dry-run] Would remove deprecated: ${rel}`);\n }\n count++;\n }\n }\n return count;\n}\n\n/**\n * Update generated files with latest templates.\n * Files inside compound/ subdirectories are always managed and overwritten.\n */\nexport async function runUpdate(repoRoot: string, dryRun: boolean): Promise<{\n updated: number;\n added: number;\n configUpdated: boolean;\n upgrade: UpgradeResult;\n gitignore: GitignoreResult;\n}> {\n // Run upgrade pipeline (deprecated commands, headers, doc version)\n const upgrade = await runUpgrade(repoRoot, dryRun);\n\n let updated = 0;\n let added = 0;\n\n async function processFile(filePath: string, content: string): Promise<void> {\n if (!existsSync(filePath)) {\n if (!dryRun) {\n await mkdir(dirname(filePath), { recursive: true });\n await writeFile(filePath, content, 'utf-8');\n }\n added++;\n } else {\n const existing = await readFile(filePath, 'utf-8');\n // Strip any legacy marker for comparison\n const cleanExisting = existing.startsWith(GENERATED_MARKER)\n ? existing.slice(GENERATED_MARKER.length)\n : existing;\n if (cleanExisting !== content) {\n if (!dryRun) await writeFile(filePath, content, 'utf-8');\n updated++;\n }\n }\n }\n\n for (const [filename, content] of Object.entries(AGENT_TEMPLATES)) {\n await processFile(join(repoRoot, '.claude', 'agents', 'compound', filename), content);\n }\n for (const [filename, content] of Object.entries(WORKFLOW_COMMANDS)) {\n await processFile(join(repoRoot, '.claude', 'commands', 'compound', filename), content);\n }\n for (const [phase, content] of Object.entries(PHASE_SKILLS)) {\n await processFile(join(repoRoot, '.claude', 'skills', 'compound', phase, 'SKILL.md'), content);\n }\n for (const [name, content] of Object.entries(AGENT_ROLE_SKILLS)) {\n await processFile(join(repoRoot, '.claude', 'skills', 'compound', 'agents', name, 'SKILL.md'), content);\n }\n for (const [filename, template] of Object.entries(DOC_TEMPLATES)) {\n const content = template\n .replace('{{VERSION}}', VERSION)\n .replace('{{DATE}}', new Date().toISOString().slice(0, 10));\n await processFile(join(repoRoot, 'docs', 'compound', filename), content);\n }\n\n if (!dryRun) await installResearchDocs(repoRoot, { force: true });\n\n // Migration: clean up legacy root-level slash commands (v1.0; only marker-tagged files)\n for (const filename of LEGACY_ROOT_SLASH_COMMANDS) {\n const filePath = join(repoRoot, '.claude', 'commands', filename);\n if (existsSync(filePath)) {\n const content = await readFile(filePath, 'utf-8');\n if (content.startsWith(GENERATED_MARKER)) {\n if (!dryRun) await rm(filePath);\n }\n }\n }\n\n // Migration: remove deprecated worktree files (superseded by Claude Code native worktrees)\n updated += await cleanDeprecatedPaths(repoRoot, dryRun);\n\n // Migration: remove old monolithic HOW_TO_COMPOUND.md (replaced by split docs)\n const oldDocPath = join(repoRoot, 'docs', 'compound', 'HOW_TO_COMPOUND.md');\n if (existsSync(oldDocPath)) {\n const oldContent = await readFile(oldDocPath, 'utf-8');\n // Only remove if it has the version frontmatter (was generated by us)\n if (oldContent.startsWith('---\\nversion:')) {\n if (!dryRun) await rm(oldDocPath);\n updated++;\n }\n }\n\n // Ensure hooks config is current\n let configUpdated = false;\n if (!dryRun) {\n const { hooks } = await configureClaudeSettings();\n await installGeminiAdapter({ dryRun: false, json: true });\n configUpdated = hooks;\n }\n\n // Ensure .gitignore has required patterns\n const gitignore = dryRun ? { added: [] } : await ensureGitignore(repoRoot);\n\n return { updated, added, configUpdated, upgrade, gitignore };\n}\n\n\nconst POST_COMMIT_STATUS_MSG: Record<string, string> = {\n skipped: 'Skipped (--skip-hooks)',\n not_git_repo: 'Skipped (not a git repository)',\n installed: 'Installed (auto-indexes docs/ on commit)',\n appended: 'Appended to existing post-commit hook',\n already_installed: 'Already configured',\n};\n\nfunction printPostCommitHookStatus(status: HookInstallResult['status'] | 'skipped'): void {\n console.log(` Post-commit hook: ${POST_COMMIT_STATUS_MSG[status]}`);\n}\n\nconst MODEL_STATUS_MSG: Record<string, string> = {\n skipped: 'Skipped (--skip-model)',\n downloaded: 'Downloaded',\n already_exists: 'Already exists',\n failed: 'Download failed (run `ca download-model` manually)',\n};\n\nasync function printSetupResult(result: SetupResult, quiet: boolean, repoRoot: string): Promise<void> {\n if (!quiet) {\n if (result.upgrade?.isUpgrade) {\n console.log(` ${result.upgrade.message}`);\n console.log(' Tip: Run with --update to regenerate managed files with latest templates.');\n }\n if (process.stdout.isTTY) await playInstallBanner();\n }\n out.success('Compound agent setup complete');\n console.log(` Lessons directory: ${result.lessonsDir}`);\n console.log(` AGENTS.md: ${result.agentsMd ? 'Updated' : 'Already configured'}`);\n console.log(` Claude hooks: ${result.hooks ? 'Installed' : 'Already configured'}`);\n printSetupGitHooksStatus(result.gitHooks);\n printPostCommitHookStatus(result.postCommitHook);\n printPnpmConfigStatus(result.pnpmConfig);\n printSqliteStatus(result.sqlite);\n printGitignoreStatus(result.gitignore);\n console.log(` Model: ${MODEL_STATUS_MSG[result.model]}`);\n const fullBeads = runFullBeadsCheck(repoRoot);\n printBeadsFullStatus(fullBeads);\n printScopeStatus(result.scope);\n console.log('\\nNext steps:\\n 1. Restart Claude Code to load hooks\\n 2. Use `npx ca search` and `npx ca learn` commands');\n}\n\n/**\n * Register the one-shot setup action as the default subcommand of setup.\n * Using a default subcommand prevents its options (--uninstall, --dry-run)\n * from being consumed by the parent when other subcommands like \"claude\"\n * define the same flags.\n */\nexport function registerSetupAllCommand(setupCommand: Command): void {\n setupCommand.description('One-shot setup: init + hooks + model');\n\n setupCommand\n .command('all', { isDefault: true })\n .description('Run full setup (default)')\n .option('--skip-model', 'Skip embedding model download')\n .option('--skip-hooks', 'Skip git hooks installation')\n .option('--uninstall', 'Remove all generated files and configuration')\n .option('--update', 'Regenerate managed files in compound/ directories')\n .option('--status', 'Show installation status')\n .option('--dry-run', 'Show what would change without changing')\n .action(async function (this: Command, options: {\n skipModel?: boolean;\n skipHooks?: boolean;\n uninstall?: boolean;\n update?: boolean;\n status?: boolean;\n dryRun?: boolean;\n }) {\n const repoRoot = getRepoRoot();\n const dryRun = options.dryRun ?? false;\n\n if (options.uninstall) {\n const prefix = dryRun ? '[dry-run] Would have: ' : '';\n const actions = await runUninstall(repoRoot, dryRun);\n if (actions.length === 0) {\n console.log('Nothing to uninstall.');\n } else {\n for (const action of actions) {\n console.log(` ${prefix}${action}`);\n }\n out.success(dryRun ? 'Dry run complete (no changes made)' : 'Uninstall complete');\n }\n return;\n }\n\n if (options.update) {\n if (!dryRun && process.stdout.isTTY) await playInstallBanner();\n const result = await runUpdate(repoRoot, dryRun);\n const prefix = dryRun ? '[dry-run] ' : '';\n if (result.upgrade.isUpgrade) {\n console.log(` ${prefix}${result.upgrade.message}`);\n }\n if (result.updated === 0 && result.added === 0) {\n console.log(`${prefix}All generated files are up to date.`);\n } else {\n if (result.updated > 0) console.log(` ${prefix}Updated: ${result.updated} file(s)`);\n if (result.added > 0) console.log(` ${prefix}Added: ${result.added} file(s)`);\n }\n if (result.gitignore.added.length > 0) {\n console.log(` ${prefix}.gitignore: Added [${result.gitignore.added.join(', ')}]`);\n }\n if (result.configUpdated) console.log(` ${prefix}Config: hooks updated`);\n const fullBeads = runFullBeadsCheck(repoRoot);\n printBeadsFullStatus(fullBeads);\n const scope = checkUserScope(repoRoot);\n printScopeStatus(scope);\n return;\n }\n\n if (options.status) {\n await runStatus(repoRoot);\n return;\n }\n\n // Default: full setup\n const result = await runSetup({ skipModel: options.skipModel, skipHooks: options.skipHooks });\n const { quiet } = getGlobalOpts(this);\n await printSetupResult(result, quiet, repoRoot);\n });\n}\n","/**\n * Shared display/print utilities for setup commands.\n */\n\nimport { existsSync } from 'node:fs';\nimport { join } from 'node:path';\n\nimport { runFullBeadsCheck, type BeadsFullCheck } from './beads-check.js';\nimport {\n getClaudeSettingsPath,\n hasAllCompoundAgentHooks,\n readClaudeSettings,\n} from './claude-helpers.js';\nimport { ensureSqliteAvailable } from '../memory/storage/index.js';\nimport type { GitignoreResult } from './gitignore.js';\nimport type { HookInstallResult } from './hooks.js';\nimport type { PnpmConfigResult, SqliteVerifyResult } from './primitives.js';\nimport { checkUserScope, type ScopeCheckResult } from './scope-check.js';\n\nexport function printGitignoreStatus(result: GitignoreResult): void {\n if (result.added.length > 0) {\n console.log(` .gitignore: Added [${result.added.join(', ')}]`);\n } else {\n console.log(' .gitignore: Already configured');\n }\n}\n\nexport function printSetupGitHooksStatus(gitHooks: HookInstallResult['status'] | 'skipped'): void {\n if (gitHooks === 'skipped') {\n console.log(' Git hooks: Skipped (--skip-hooks)');\n return;\n }\n if (gitHooks === 'not_git_repo') {\n console.log(' Git hooks: Skipped (not a git repository)');\n return;\n }\n if (gitHooks === 'installed') {\n console.log(' Git hooks: Installed');\n return;\n }\n if (gitHooks === 'appended') {\n console.log(' Git hooks: Appended to existing pre-commit hook');\n return;\n }\n console.log(' Git hooks: Already configured');\n}\n\nexport function printPnpmConfigStatus(result: PnpmConfigResult): void {\n if (!result.isPnpm) return;\n if (result.alreadyConfigured) {\n console.log(' pnpm config: onlyBuiltDependencies already configured');\n } else if (result.added.length > 0) {\n console.log(` pnpm config: Added onlyBuiltDependencies [${result.added.join(', ')}]`);\n }\n}\n\nconst SQLITE_STATUS_MSG: Record<SqliteVerifyResult['action'], string> = {\n already_ok: 'OK',\n rebuilt: 'OK (rebuilt native module)',\n installed_and_rebuilt: 'OK (installed + rebuilt native module)',\n failed: 'FAILED',\n};\n\nexport function printSqliteStatus(result: SqliteVerifyResult): void {\n const msg = SQLITE_STATUS_MSG[result.action];\n console.log(` SQLite: ${msg}`);\n if (result.error) {\n console.log(` ${result.error}`);\n }\n}\n\nexport function printBeadsFullStatus(check: BeadsFullCheck): void {\n console.log(` Beads CLI: ${check.cliAvailable ? 'OK' : 'not found'}`);\n if (check.cliAvailable) {\n console.log(` Beads repo: ${check.initialized ? 'OK' : 'not initialized (run: bd init)'}`);\n if (check.initialized) {\n console.log(` Beads health: ${check.healthy ? 'OK' : `issues found${check.healthMessage ? ` — ${check.healthMessage}` : ''}`}`);\n }\n } else if (check.healthMessage) {\n console.log(` ${check.healthMessage}`);\n }\n}\n\nexport function printScopeStatus(scope: ScopeCheckResult): void {\n if (scope.isUserScope) {\n console.log(' Scope: user-scope (reduced compounding value)');\n } else {\n console.log(' Scope: OK (repository scope)');\n }\n}\n\n/**\n * Show installation status (used by `ca setup --status`).\n */\nexport async function runStatus(repoRoot: string): Promise<void> {\n const agentsDir = join(repoRoot, '.claude', 'agents', 'compound');\n const commandsDir = join(repoRoot, '.claude', 'commands', 'compound');\n const skillsDir = join(repoRoot, '.claude', 'skills', 'compound');\n const pluginPath = join(repoRoot, '.claude', 'plugin.json');\n\n console.log('Compound Agent Status:');\n console.log(` Agent templates: ${existsSync(agentsDir) ? 'installed' : 'not installed'}`);\n console.log(` Workflow commands: ${existsSync(commandsDir) ? 'installed' : 'not installed'}`);\n console.log(` Phase skills: ${existsSync(skillsDir) ? 'installed' : 'not installed'}`);\n console.log(` Plugin manifest: ${existsSync(pluginPath) ? 'installed' : 'not installed'}`);\n\n const settingsPath = getClaudeSettingsPath(false);\n let hooksInstalled = false;\n try {\n const settings = await readClaudeSettings(settingsPath);\n hooksInstalled = hasAllCompoundAgentHooks(settings);\n } catch {\n // No settings\n }\n console.log(` Hooks: ${hooksInstalled ? 'installed' : 'not installed'}`);\n\n let sqliteOk = false;\n try {\n ensureSqliteAvailable();\n sqliteOk = true;\n } catch { /* not loadable */ }\n console.log(` SQLite: ${sqliteOk ? 'OK' : 'not available (run: pnpm rebuild better-sqlite3)'}`);\n\n const fullBeads = runFullBeadsCheck(repoRoot);\n printBeadsFullStatus(fullBeads);\n const scope = checkUserScope(repoRoot);\n printScopeStatus(scope);\n}\n","/**\n * Phase check state machine.\n *\n * Manages cook-it phase state in .claude/.ca-phase-state.json.\n */\n\nimport { existsSync, mkdirSync, readFileSync, unlinkSync, writeFileSync } from 'node:fs';\nimport { join } from 'node:path';\nimport type { Command } from 'commander';\n\nimport { EPIC_ID_PATTERN, getRepoRoot } from '../cli-utils.js';\n\nconst STATE_DIR = '.claude';\nconst STATE_FILE = '.ca-phase-state.json';\n\n/** Max age for phase state before it's considered stale (72 hours). */\nexport const PHASE_STATE_MAX_AGE_MS = 72 * 60 * 60 * 1000;\n\nexport const PHASES = ['spec-dev', 'plan', 'work', 'review', 'compound'] as const;\nexport type PhaseName = (typeof PHASES)[number];\n\nexport const GATES = ['post-plan', 'gate-3', 'gate-4', 'final'] as const;\nexport type GateName = (typeof GATES)[number];\n\nconst PHASE_INDEX: Record<PhaseName, number> = {\n 'spec-dev': 1,\n plan: 2,\n work: 3,\n review: 4,\n compound: 5,\n};\n\nexport interface PhaseState {\n cookit_active: boolean;\n epic_id: string;\n current_phase: PhaseName;\n phase_index: number;\n skills_read: string[];\n gates_passed: GateName[];\n started_at: string;\n}\n\nfunction getStatePath(repoRoot: string): string {\n return join(repoRoot, STATE_DIR, STATE_FILE);\n}\n\nfunction isPhaseName(value: unknown): value is PhaseName {\n return typeof value === 'string' && (PHASES as readonly string[]).includes(value);\n}\n\nfunction isGateName(value: unknown): value is GateName {\n return typeof value === 'string' && (GATES as readonly string[]).includes(value);\n}\n\nfunction isIsoDate(value: unknown): value is string {\n if (typeof value !== 'string') return false;\n return !Number.isNaN(Date.parse(value));\n}\n\nfunction isStringArray(value: unknown): value is string[] {\n return Array.isArray(value) && value.every((item) => typeof item === 'string');\n}\n\n/** Migrate legacy lfg_active field to cookit_active. */\nfunction migrateLegacyFields(raw: Record<string, unknown>): void {\n if (raw.cookit_active === undefined && typeof raw.lfg_active === 'boolean') {\n raw.cookit_active = raw.lfg_active;\n delete raw.lfg_active;\n }\n}\n\nfunction validatePhaseState(raw: unknown): raw is PhaseState {\n if (typeof raw !== 'object' || raw === null) return false;\n const state = raw as Record<string, unknown>;\n migrateLegacyFields(state);\n\n return (\n typeof state.cookit_active === 'boolean' &&\n typeof state.epic_id === 'string' &&\n isPhaseName(state.current_phase) &&\n typeof state.phase_index === 'number' &&\n state.phase_index >= 1 &&\n state.phase_index <= 5 &&\n isStringArray(state.skills_read) &&\n Array.isArray(state.gates_passed) &&\n state.gates_passed.every((gate) => isGateName(gate)) &&\n isIsoDate(state.started_at)\n );\n}\n\nexport function expectedGateForPhase(phaseIndex: number): GateName | null {\n if (phaseIndex === 2) return 'post-plan';\n if (phaseIndex === 3) return 'gate-3';\n if (phaseIndex === 4) return 'gate-4';\n if (phaseIndex === 5) return 'final';\n return null;\n}\n\nexport function initPhaseState(repoRoot: string, epicId: string): PhaseState {\n const dir = join(repoRoot, STATE_DIR);\n mkdirSync(dir, { recursive: true });\n\n const state: PhaseState = {\n cookit_active: true,\n epic_id: epicId,\n current_phase: 'spec-dev',\n phase_index: PHASE_INDEX['spec-dev'],\n skills_read: [],\n gates_passed: [],\n started_at: new Date().toISOString(),\n };\n writeFileSync(getStatePath(repoRoot), JSON.stringify(state, null, 2), 'utf-8');\n return state;\n}\n\nexport function getPhaseState(repoRoot: string): PhaseState | null {\n try {\n const path = getStatePath(repoRoot);\n if (!existsSync(path)) return null;\n const raw = readFileSync(path, 'utf-8');\n const parsed = JSON.parse(raw) as unknown;\n if (!validatePhaseState(parsed)) return null;\n // TTL check: discard and clean up stale state from abandoned cook-it runs\n const age = Date.now() - new Date(parsed.started_at).getTime();\n if (age > PHASE_STATE_MAX_AGE_MS) {\n cleanPhaseState(repoRoot);\n return null;\n }\n return parsed;\n } catch {\n return null;\n }\n}\n\nexport function updatePhaseState(repoRoot: string, partial: Partial<PhaseState>): PhaseState | null {\n const current = getPhaseState(repoRoot);\n if (current === null) return null;\n\n const updated: PhaseState = {\n ...current,\n ...partial,\n };\n\n if (!validatePhaseState(updated)) return null;\n\n writeFileSync(getStatePath(repoRoot), JSON.stringify(updated, null, 2), 'utf-8');\n return updated;\n}\n\nexport function startPhase(repoRoot: string, phase: PhaseName): PhaseState | null {\n return updatePhaseState(repoRoot, {\n current_phase: phase,\n phase_index: PHASE_INDEX[phase],\n });\n}\n\nexport function cleanPhaseState(repoRoot: string): void {\n try {\n const path = getStatePath(repoRoot);\n if (existsSync(path)) unlinkSync(path);\n } catch {\n // Silent cleanup\n }\n}\n\nexport function recordGatePassed(repoRoot: string, gate: GateName): PhaseState | null {\n const current = getPhaseState(repoRoot);\n if (current === null) return null;\n\n const gatesPassed = current.gates_passed.includes(gate)\n ? current.gates_passed\n : [...current.gates_passed, gate];\n const updated: PhaseState = { ...current, gates_passed: gatesPassed };\n\n // Final gate closes the active loop state.\n if (gate === 'final') {\n cleanPhaseState(repoRoot);\n return updated;\n }\n\n writeFileSync(getStatePath(repoRoot), JSON.stringify(updated, null, 2), 'utf-8');\n return updated;\n}\n\nfunction printStatusHuman(state: PhaseState | null): void {\n if (state === null) {\n console.log('No active cook-it session.');\n return;\n }\n console.log('Active cook-it Session');\n console.log(` Epic: ${state.epic_id}`);\n console.log(` Phase: ${state.current_phase} (${state.phase_index}/5)`);\n console.log(` Skills read: ${state.skills_read.length === 0 ? '(none)' : state.skills_read.join(', ')}`);\n console.log(` Gates passed: ${state.gates_passed.length === 0 ? '(none)' : state.gates_passed.join(', ')}`);\n console.log(` Started: ${state.started_at}`);\n}\n\n// eslint-disable-next-line max-lines-per-function -- command router registers multiple subcommands\nfunction registerPhaseSubcommands(\n phaseCheck: Command,\n getDryRun: () => boolean,\n repoRoot: () => string\n): void {\n phaseCheck\n .command('init <epic-id>')\n .description('Initialize phase state for an epic')\n .action((epicId: string) => {\n if (!EPIC_ID_PATTERN.test(epicId)) {\n console.error(`Invalid epic ID: \"${epicId}\"`);\n process.exitCode = 1;\n return;\n }\n if (getDryRun()) { console.log(`[dry-run] Would initialize phase state for epic ${epicId} in ${repoRoot()}`); return; }\n initPhaseState(repoRoot(), epicId);\n console.log(`Phase state initialized for ${epicId}. Current phase: spec-dev (1/5).`);\n });\n\n phaseCheck\n .command('start <phase>')\n .description('Start or resume a phase')\n .action((phase: string) => {\n if (!isPhaseName(phase)) {\n console.error(`Invalid phase: \"${phase}\". Valid phases: ${PHASES.join(', ')}`);\n process.exitCode = 1;\n return;\n }\n if (getDryRun()) { console.log(`[dry-run] Would start phase ${phase}`); return; }\n const state = startPhase(repoRoot(), phase);\n if (state === null) {\n console.error('No active phase state. Run: ca phase-check init <epic-id>');\n process.exitCode = 1;\n return;\n }\n console.log(`Phase updated: ${state.current_phase} (${state.phase_index}/5).`);\n });\n\n phaseCheck\n .command('gate <gate-name>')\n .description('Record a phase gate as passed')\n .action((gateName: string) => {\n if (!isGateName(gateName)) {\n console.error(`Invalid gate: \"${gateName}\". Valid gates: ${GATES.join(', ')}`);\n process.exitCode = 1;\n return;\n }\n if (getDryRun()) { console.log(`[dry-run] Would record gate ${gateName}`); return; }\n const state = recordGatePassed(repoRoot(), gateName);\n if (state === null) {\n console.error('No active phase state. Run: ca phase-check init <epic-id>');\n process.exitCode = 1;\n return;\n }\n if (gateName === 'final') {\n console.log('Final gate recorded. Phase state cleaned.');\n return;\n }\n console.log(`Gate recorded: ${gateName}.`);\n });\n\n phaseCheck\n .command('status')\n .description('Show current phase state')\n .option('--json', 'Output raw JSON')\n .action((options: { json?: boolean }) => {\n const state = getPhaseState(repoRoot());\n if (options.json) { console.log(JSON.stringify(state ?? { cookit_active: false })); return; }\n printStatusHuman(state);\n });\n\n phaseCheck\n .command('clean')\n .description('Remove phase state file')\n .action(() => {\n if (getDryRun()) { console.log('[dry-run] Would delete phase state file'); return; }\n cleanPhaseState(repoRoot());\n console.log('Phase state cleaned.');\n });\n}\n\nexport function registerPhaseCheckCommand(program: Command): void {\n const phaseCheck = program\n .command('phase-check')\n .description('Manage cook-it phase state')\n .option('--dry-run', 'Show what would be done without making changes');\n\n const getDryRun = (): boolean => phaseCheck.opts<{ dryRun?: boolean }>().dryRun ?? false;\n const repoRoot = (): string => getRepoRoot();\n\n registerPhaseSubcommands(phaseCheck, getDryRun, repoRoot);\n\n program\n .command('phase-clean')\n .description('Remove phase state file (alias for `phase-check clean`)')\n .action(() => { cleanPhaseState(repoRoot()); console.log('Phase state cleaned.'); });\n}\n","/**\n * Shared primitives for setup commands.\n * Used by both init.ts and setup-all.ts to avoid duplication.\n */\n\nimport { execFileSync } from 'node:child_process';\nimport { existsSync } from 'node:fs';\nimport { mkdir, readdir, readFile, writeFile } from 'node:fs/promises';\nimport { dirname, join } from 'node:path';\nimport { fileURLToPath } from 'node:url';\n\nimport { VERSION } from '../version.js';\nimport {\n AGENTS_MD_TEMPLATE,\n CLAUDE_MD_REFERENCE,\n CLAUDE_REF_START_MARKER,\n COMPOUND_AGENT_SECTION_HEADER,\n PLUGIN_MANIFEST,\n} from './templates.js';\nimport { AGENT_TEMPLATES, AGENT_ROLE_SKILLS, DOC_TEMPLATES, WORKFLOW_COMMANDS, PHASE_SKILLS, PHASE_SKILL_REFERENCES } from './templates/index.js';\n\n/**\n * @deprecated Kept for backward compatibility with all.ts --update detection.\n * New installs use path-based detection (file inside compound/ = managed).\n */\nexport const GENERATED_MARKER = '<!-- generated by compound-agent -->\\n';\n\n/**\n * Check if AGENTS.md already has the Compound Agent section.\n */\nexport function hasCompoundAgentSection(content: string): boolean {\n return content.includes(COMPOUND_AGENT_SECTION_HEADER);\n}\n\n/**\n * Check if CLAUDE.md already has the Compound Agent reference.\n */\nexport function hasClaudeMdReference(content: string): boolean {\n return content.includes('Compound Agent') || content.includes(CLAUDE_REF_START_MARKER);\n}\n\n/**\n * Create or update AGENTS.md with Compound Agent section.\n */\nexport async function updateAgentsMd(repoRoot: string): Promise<boolean> {\n const agentsPath = join(repoRoot, 'AGENTS.md');\n let content = '';\n let existed = false;\n\n if (existsSync(agentsPath)) {\n content = await readFile(agentsPath, 'utf-8');\n existed = true;\n if (hasCompoundAgentSection(content)) {\n return false; // Already has section, no update needed\n }\n }\n\n // Append the template\n const newContent = existed ? content.trimEnd() + '\\n' + AGENTS_MD_TEMPLATE : AGENTS_MD_TEMPLATE.trim() + '\\n';\n await writeFile(agentsPath, newContent, 'utf-8');\n return true;\n}\n\n/**\n * Ensure CLAUDE.md has a reference to AGENTS.md for Compound Agent workflow.\n * Creates CLAUDE.md if it doesn't exist, appends reference if not present.\n * Uses markers for clean uninstall support.\n */\nexport async function ensureClaudeMdReference(repoRoot: string): Promise<boolean> {\n const claudeMdPath = join(repoRoot, '.claude', 'CLAUDE.md');\n\n // Ensure .claude directory exists\n await mkdir(join(repoRoot, '.claude'), { recursive: true });\n\n if (!existsSync(claudeMdPath)) {\n // Create new CLAUDE.md with reference\n const content = `# Project Instructions\n${CLAUDE_MD_REFERENCE}`;\n await writeFile(claudeMdPath, content, 'utf-8');\n return true;\n }\n\n // File exists - check if reference is already present\n const content = await readFile(claudeMdPath, 'utf-8');\n if (hasClaudeMdReference(content)) {\n return false; // Already has reference\n }\n\n // Append reference\n const newContent = content.trimEnd() + '\\n' + CLAUDE_MD_REFERENCE;\n await writeFile(claudeMdPath, newContent, 'utf-8');\n return true;\n}\n\n/**\n * Create plugin.json in .claude/ directory.\n * Idempotent: does not overwrite existing file.\n *\n * @returns true if plugin.json was created\n */\nexport async function createPluginManifest(repoRoot: string): Promise<boolean> {\n const pluginPath = join(repoRoot, '.claude', 'plugin.json');\n\n // Ensure .claude directory exists\n await mkdir(join(repoRoot, '.claude'), { recursive: true });\n\n if (existsSync(pluginPath)) {\n return false; // Already exists\n }\n\n await writeFile(pluginPath, JSON.stringify(PLUGIN_MANIFEST, null, 2) + '\\n', 'utf-8');\n return true;\n}\n\n\n/**\n * Install agent templates to .claude/agents/compound/.\n * Idempotent: does not overwrite existing files.\n *\n * @returns true if any agent templates were created\n */\nexport async function installAgentTemplates(repoRoot: string): Promise<boolean> {\n const agentsDir = join(repoRoot, '.claude', 'agents', 'compound');\n await mkdir(agentsDir, { recursive: true });\n\n let created = false;\n for (const [filename, content] of Object.entries(AGENT_TEMPLATES)) {\n const filePath = join(agentsDir, filename);\n if (!existsSync(filePath)) {\n await writeFile(filePath, content, 'utf-8');\n created = true;\n }\n }\n return created;\n}\n\n/**\n * Install workflow commands to .claude/commands/compound/.\n * Idempotent: does not overwrite existing files.\n *\n * @returns true if any workflow commands were created\n */\nexport async function installWorkflowCommands(repoRoot: string): Promise<boolean> {\n const commandsDir = join(repoRoot, '.claude', 'commands', 'compound');\n await mkdir(commandsDir, { recursive: true });\n\n let created = false;\n for (const [filename, content] of Object.entries(WORKFLOW_COMMANDS)) {\n const filePath = join(commandsDir, filename);\n if (!existsSync(filePath)) {\n await writeFile(filePath, content, 'utf-8');\n created = true;\n }\n }\n return created;\n}\n\n/**\n * Install phase skill templates to .claude/skills/compound/<phase>/SKILL.md.\n * Idempotent: does not overwrite existing files.\n *\n * @returns true if any skill templates were created\n */\nexport async function installPhaseSkills(repoRoot: string): Promise<boolean> {\n let created = false;\n for (const [phase, content] of Object.entries(PHASE_SKILLS)) {\n const skillDir = join(repoRoot, '.claude', 'skills', 'compound', phase);\n await mkdir(skillDir, { recursive: true });\n const filePath = join(skillDir, 'SKILL.md');\n if (!existsSync(filePath)) {\n await writeFile(filePath, content, 'utf-8');\n created = true;\n }\n }\n\n // Install reference files alongside skills\n for (const [relPath, content] of Object.entries(PHASE_SKILL_REFERENCES)) {\n const filePath = join(repoRoot, '.claude', 'skills', 'compound', relPath);\n await mkdir(dirname(filePath), { recursive: true });\n if (!existsSync(filePath)) {\n await writeFile(filePath, content, 'utf-8');\n created = true;\n }\n }\n\n return created;\n}\n\n/**\n * Install agent role skill templates to .claude/skills/compound/agents/<name>/SKILL.md.\n * Idempotent: does not overwrite existing files.\n *\n * @returns true if any agent role skills were created\n */\nexport async function installAgentRoleSkills(repoRoot: string): Promise<boolean> {\n let created = false;\n for (const [name, content] of Object.entries(AGENT_ROLE_SKILLS)) {\n const skillDir = join(repoRoot, '.claude', 'skills', 'compound', 'agents', name);\n await mkdir(skillDir, { recursive: true });\n const filePath = join(skillDir, 'SKILL.md');\n if (!existsSync(filePath)) {\n await writeFile(filePath, content, 'utf-8');\n created = true;\n }\n }\n return created;\n}\n\n/**\n * Install documentation templates to docs/compound/.\n * Idempotent: does not overwrite existing files.\n * Replaces {{VERSION}} placeholder with the actual package version.\n *\n * @returns true if any doc templates were created\n */\nexport async function installDocTemplates(repoRoot: string): Promise<boolean> {\n const docsDir = join(repoRoot, 'docs', 'compound');\n await mkdir(docsDir, { recursive: true });\n\n let created = false;\n for (const [filename, template] of Object.entries(DOC_TEMPLATES)) {\n const filePath = join(docsDir, filename);\n if (!existsSync(filePath)) {\n const content = template.replace('{{VERSION}}', VERSION).replace('{{DATE}}', new Date().toISOString().slice(0, 10));\n await writeFile(filePath, content, 'utf-8');\n created = true;\n }\n }\n return created;\n}\n\n/**\n * Install research docs from the package's docs/research/ to docs/compound/research/ in the user's project.\n *\n * @param force - When true, overwrites existing files if content differs (used by --update).\n * When false (default), skips existing files (idempotent fresh install).\n * @returns true if any research docs were created or updated\n */\nexport async function installResearchDocs(repoRoot: string, options?: { force?: boolean }): Promise<boolean> {\n const force = options?.force ?? false;\n // Resolve the package's docs/research/ directory via import.meta.url\n // In the built bundle, import.meta.url points to dist/cli.js.\n // Go up one level from dist/ to reach the package root.\n const pkgRoot = join(dirname(fileURLToPath(import.meta.url)), '..');\n const srcDir = join(pkgRoot, 'docs', 'research');\n\n if (!existsSync(srcDir)) {\n return false; // Package doesn't include research docs (dev-only)\n }\n\n const destDir = join(repoRoot, 'docs', 'compound', 'research');\n await mkdir(destDir, { recursive: true });\n\n let created = false;\n\n async function copyDir(src: string, dest: string): Promise<void> {\n const entries = await readdir(src, { withFileTypes: true });\n for (const entry of entries) {\n const srcPath = join(src, entry.name);\n const destPath = join(dest, entry.name);\n if (entry.isDirectory()) {\n await mkdir(destPath, { recursive: true });\n await copyDir(srcPath, destPath);\n continue;\n }\n if (!entry.name.endsWith('.md')) continue;\n const exists = existsSync(destPath);\n if (exists && !force) continue;\n\n let content = await readFile(srcPath, 'utf-8');\n // Rewrite index.md header to note provenance\n if (entry.name === 'index.md') {\n const patched = content.replace(\n /^# .*/m,\n '$&\\n\\n> Shipped by compound-agent. Source: `docs/research/` in the compound-agent package.',\n );\n content = patched !== content ? patched : `> Shipped by compound-agent.\\n\\n${content}`;\n }\n // In force mode, skip write if content is unchanged\n if (exists && (await readFile(destPath, 'utf-8')) === content) continue;\n await writeFile(destPath, content, 'utf-8');\n created = true;\n }\n }\n\n try {\n await copyDir(srcDir, destDir);\n } catch (err) {\n console.error(`Warning: Could not install research docs: ${(err as Error).message}`);\n return false;\n }\n return created;\n}\n\n// ============================================================================\n// pnpm native build configuration\n// ============================================================================\n\n/** Native addon packages that require pnpm onlyBuiltDependencies opt-in. */\nconst REQUIRED_BUILD_DEPS = ['better-sqlite3', 'node-llama-cpp'];\n\n/** Result of pnpm build config check/update. */\nexport interface PnpmConfigResult {\n /** Whether this is a pnpm project (pnpm-lock.yaml or packageManager field). */\n isPnpm: boolean;\n /** Whether the config was already correct (no changes needed). */\n alreadyConfigured: boolean;\n /** Package names that were added to onlyBuiltDependencies. */\n added: string[];\n}\n\n/**\n * Ensure pnpm projects have onlyBuiltDependencies configured for native addons.\n *\n * pnpm v9+ blocks native addon compilation by default. This function detects\n * pnpm projects (via pnpm-lock.yaml or packageManager field) and adds the\n * required packages to pnpm.onlyBuiltDependencies in the consumer's package.json.\n *\n * Idempotent: does not duplicate entries or overwrite existing config.\n */\nexport async function ensurePnpmBuildConfig(repoRoot: string): Promise<PnpmConfigResult> {\n const lockPath = join(repoRoot, 'pnpm-lock.yaml');\n const hasLockfile = existsSync(lockPath);\n const pkgPath = join(repoRoot, 'package.json');\n const hasPkgJson = existsSync(pkgPath);\n\n // Detect pnpm: lockfile OR packageManager field starting with \"pnpm\"\n if (!hasLockfile) {\n if (!hasPkgJson) {\n return { isPnpm: false, alreadyConfigured: false, added: [] };\n }\n const pkg = readPkgJsonSafe(pkgPath, await readFile(pkgPath, 'utf-8'));\n if (pkg === null) return { isPnpm: false, alreadyConfigured: false, added: [] };\n const pm = typeof pkg.packageManager === 'string' ? pkg.packageManager : '';\n if (!pm.startsWith('pnpm')) {\n return { isPnpm: false, alreadyConfigured: false, added: [] };\n }\n // pnpm detected via packageManager field — fall through to config merge\n return mergePnpmConfig(pkgPath, pkg);\n }\n\n if (!hasPkgJson) {\n return { isPnpm: true, alreadyConfigured: false, added: [] };\n }\n\n const pkg = readPkgJsonSafe(pkgPath, await readFile(pkgPath, 'utf-8'));\n if (pkg === null) return { isPnpm: true, alreadyConfigured: false, added: [] };\n return mergePnpmConfig(pkgPath, pkg);\n}\n\nfunction readPkgJsonSafe(pkgPath: string, raw: string): Record<string, unknown> | null {\n try {\n return JSON.parse(raw) as Record<string, unknown>;\n } catch {\n console.error(`Warning: Could not parse ${pkgPath} — skipping pnpm build config.\\nFix the JSON syntax and re-run setup.`);\n return null;\n }\n}\n\nasync function mergePnpmConfig(pkgPath: string, pkg: Record<string, unknown>): Promise<PnpmConfigResult> {\n // Get or create pnpm.onlyBuiltDependencies\n if (!pkg.pnpm || typeof pkg.pnpm !== 'object') {\n pkg.pnpm = {};\n }\n const pnpmConfig = pkg.pnpm as Record<string, unknown>;\n\n if (!Array.isArray(pnpmConfig.onlyBuiltDependencies)) {\n pnpmConfig.onlyBuiltDependencies = [];\n }\n const existing = pnpmConfig.onlyBuiltDependencies as string[];\n\n const added: string[] = [];\n for (const dep of REQUIRED_BUILD_DEPS) {\n if (!existing.includes(dep)) {\n existing.push(dep);\n added.push(dep);\n }\n }\n\n if (added.length === 0) {\n return { isPnpm: true, alreadyConfigured: true, added: [] };\n }\n\n await writeFile(pkgPath, JSON.stringify(pkg, null, 2) + '\\n', 'utf-8');\n return { isPnpm: true, alreadyConfigured: false, added };\n}\n\n// ============================================================================\n// SQLite verification + auto-rebuild\n// ============================================================================\n\nimport { ensureSqliteAvailable, resetSqliteAvailability } from '../memory/storage/index.js';\n\n/** Result of post-setup SQLite verification. */\nexport interface SqliteVerifyResult {\n /** Whether SQLite (better-sqlite3) is currently loadable. */\n available: boolean;\n /** What action was taken (or attempted). */\n action: 'already_ok' | 'rebuilt' | 'installed_and_rebuilt' | 'failed';\n /** Error message if action is 'failed'. */\n error?: string;\n}\n\n/**\n * Try loading SQLite, returning true on success, false on failure.\n */\nfunction trySqliteLoad(): boolean {\n try {\n ensureSqliteAvailable();\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Verify SQLite availability and attempt auto-rebuild if needed.\n *\n * Strategy:\n * 1. Try ensureSqliteAvailable() — if works, fast path (zero overhead)\n * 2. If non-pnpm: report failure with npm rebuild suggestion\n * 3. If pnpm: try `pnpm rebuild better-sqlite3`, re-check\n * 4. If still fails: try `pnpm install` then rebuild, re-check\n * 5. All attempts fail: report with manual instructions\n *\n * Non-blocking: never throws. Reports result honestly.\n */\nexport function verifySqlite(repoRoot: string, pnpmConfig: PnpmConfigResult): SqliteVerifyResult {\n // Fast path: already works\n if (trySqliteLoad()) {\n return { available: true, action: 'already_ok' };\n }\n\n // Non-pnpm: can't auto-fix (npm/yarn build native modules by default)\n if (!pnpmConfig.isPnpm) {\n return {\n available: false,\n action: 'failed',\n error: 'better-sqlite3 failed to load. Run: npm rebuild better-sqlite3',\n };\n }\n\n // Attempt 1: pnpm rebuild better-sqlite3\n try {\n execFileSync('pnpm', ['rebuild', 'better-sqlite3'], {\n cwd: repoRoot,\n stdio: 'pipe',\n timeout: 60_000,\n });\n } catch {\n // rebuild failed, will try install next\n }\n\n resetSqliteAvailability();\n if (trySqliteLoad()) {\n return { available: true, action: 'rebuilt' };\n }\n\n // Attempt 2: pnpm install + rebuild (escalation)\n try {\n execFileSync('pnpm', ['install'], {\n cwd: repoRoot,\n stdio: 'pipe',\n timeout: 120_000,\n });\n execFileSync('pnpm', ['rebuild', 'better-sqlite3'], {\n cwd: repoRoot,\n stdio: 'pipe',\n timeout: 60_000,\n });\n } catch {\n // install or rebuild failed\n }\n\n resetSqliteAvailability();\n if (trySqliteLoad()) {\n return { available: true, action: 'installed_and_rebuilt' };\n }\n\n return {\n available: false,\n action: 'failed',\n error: 'Auto-rebuild failed. Run manually: pnpm install && pnpm rebuild better-sqlite3',\n };\n}\n","/**\n * Download-model command - Download the embedding model for semantic search.\n */\n\nimport { statSync } from 'node:fs';\nimport { homedir } from 'node:os';\nimport { join } from 'node:path';\nimport type { Command } from 'commander';\n\nimport { formatBytes } from '../cli-utils.js';\nimport { isModelAvailable, MODEL_FILENAME, resolveModel } from '../memory/embeddings/index.js';\n\n/**\n * Register the download-model command on the program.\n */\nexport function registerDownloadModelCommand(program: Command): void {\n program\n .command('download-model')\n .description('Download the embedding model for semantic search')\n .option('--json', 'Output as JSON')\n .action(async (options: { json?: boolean }) => {\n const alreadyExisted = isModelAvailable();\n\n if (alreadyExisted) {\n // Model already exists - get path and size\n const modelPath = join(homedir(), '.node-llama-cpp', 'models', MODEL_FILENAME);\n const size = statSync(modelPath).size;\n\n if (options.json) {\n console.log(JSON.stringify({ success: true, path: modelPath, size, alreadyExisted: true }));\n } else {\n console.log('Model already exists.');\n console.log(`Path: ${modelPath}`);\n console.log(`Size: ${formatBytes(size)}`);\n }\n return;\n }\n\n // Download the model\n if (!options.json) {\n console.log('Downloading embedding model...');\n }\n\n const modelPath = await resolveModel({ cli: !options.json });\n const size = statSync(modelPath).size;\n\n if (options.json) {\n console.log(JSON.stringify({ success: true, path: modelPath, size, alreadyExisted: false }));\n } else {\n console.log(`\\nModel downloaded successfully!`);\n console.log(`Path: ${modelPath}`);\n console.log(`Size: ${formatBytes(size)}`);\n }\n });\n}\n","/**\n * Init command - Initialize compound-agent in a repository.\n */\n\nimport { existsSync } from 'node:fs';\nimport { mkdir, writeFile } from 'node:fs/promises';\nimport { dirname, join } from 'node:path';\nimport type { Command } from 'commander';\n\nimport { getRepoRoot } from '../cli-utils.js';\nimport { LESSONS_PATH } from '../memory/storage/index.js';\nimport { getGlobalOpts, out } from '../commands/index.js';\nimport { playInstallBanner } from './banner.js';\nimport { runFullBeadsCheck, type BeadsFullCheck } from './beads-check.js';\nimport { printBeadsFullStatus, printGitignoreStatus, printScopeStatus } from './display-utils.js';\nimport { installClaudeHooksForInit } from './claude-helpers.js';\nimport { ensureGitignore, type GitignoreResult } from './gitignore.js';\nimport { installPreCommitHook, installPostCommitHook, type HookInstallResult } from './hooks.js';\nimport {\n createPluginManifest,\n ensureClaudeMdReference,\n ensurePnpmBuildConfig,\n installAgentRoleSkills,\n installAgentTemplates,\n installDocTemplates,\n installPhaseSkills,\n installResearchDocs,\n installWorkflowCommands,\n updateAgentsMd,\n type PnpmConfigResult,\n} from './primitives.js';\nimport { checkUserScope } from './scope-check.js';\nimport type { ClaudeHooksResult } from './types.js';\nimport { runUpgrade, detectExistingInstall, type UpgradeResult } from './upgrade.js';\n\n/**\n * Create the lessons directory structure.\n */\nasync function createLessonsDirectory(repoRoot: string): Promise<void> {\n const lessonsDir = dirname(join(repoRoot, LESSONS_PATH));\n await mkdir(lessonsDir, { recursive: true });\n}\n\n/**\n * Create empty index.jsonl if it doesn't exist.\n */\nasync function createIndexFile(repoRoot: string): Promise<void> {\n const indexPath = join(repoRoot, LESSONS_PATH);\n if (!existsSync(indexPath)) {\n await writeFile(indexPath, '', 'utf-8');\n }\n}\n\n// ============================================================================\n// Model & Background Embedding Helpers\n// ============================================================================\n\ntype ModelStatus = 'downloaded' | 'exists' | 'failed' | 'skipped';\n\n/** Download embedding model and optionally trigger background embedding. */\nasync function handleModelAndEmbed(\n repoRoot: string,\n opts: { skipModel?: boolean; quiet: boolean; json?: boolean },\n): Promise<ModelStatus> {\n if (opts.skipModel) return 'skipped';\n\n let status: ModelStatus = 'skipped';\n try {\n const { isModelAvailable, resolveModel } = await import('../memory/embeddings/index.js');\n if (isModelAvailable()) {\n status = 'exists';\n if (!opts.quiet && !opts.json) console.log(' Embedding model: already exists');\n } else {\n if (!opts.quiet && !opts.json) out.info('Downloading embedding model...');\n await resolveModel({ cli: !opts.json });\n status = 'downloaded';\n if (!opts.quiet && !opts.json) out.info('Embedding model downloaded.');\n }\n } catch (err) {\n status = 'failed';\n const msg = err instanceof Error ? err.message : 'Unknown error';\n console.error('[compound-agent] Embedding model download failed: ' + msg);\n console.error('[compound-agent] Run `npx ca download-model` manually.');\n }\n\n // Trigger background embedding if docs/ exists and model available\n if (status === 'exists' || status === 'downloaded') {\n try {\n const { indexAndSpawnEmbed } = await import('../memory/knowledge/embed-background.js');\n const spawnResult = await indexAndSpawnEmbed(repoRoot);\n if (spawnResult?.spawned && !opts.quiet && !opts.json) {\n out.info('Embedding in progress (background). You can start working.');\n }\n } catch {\n // Non-fatal: don't break init if background embedding fails to spawn\n }\n\n // Pre-warm lesson embeddings so the first search is fast\n try {\n const { withEmbedding } = await import('../memory/embeddings/index.js');\n const { preWarmLessonEmbeddings } = await import('../memory/search/prewarm.js');\n await withEmbedding(async () => preWarmLessonEmbeddings(repoRoot));\n } catch {\n // Non-fatal: don't break init if lesson pre-warming fails\n }\n }\n\n return status;\n}\n\n// ============================================================================\n// Action Handler\n// ============================================================================\n\nasync function initAction(\n cmd: Command,\n options: { skipAgents?: boolean; skipHooks?: boolean; skipClaude?: boolean; skipModel?: boolean; json?: boolean; update?: boolean }\n): Promise<void> {\n const repoRoot = getRepoRoot();\n const { quiet } = getGlobalOpts(cmd);\n\n // Pre-flight checks\n const scopeResult = checkUserScope(repoRoot);\n\n // Upgrade detection\n let upgradeResult: UpgradeResult | null = null;\n if (options.update || detectExistingInstall(repoRoot)) {\n upgradeResult = await runUpgrade(repoRoot);\n if (!quiet && !options.json && upgradeResult.isUpgrade) {\n console.log(` ${upgradeResult.message}`);\n if (!options.update) {\n console.log(' Tip: Run with --update to regenerate managed files with latest templates.');\n }\n }\n }\n\n if (!quiet && !options.json && process.stdout.isTTY) {\n await playInstallBanner();\n }\n\n // Ensure pnpm native build config before anything else\n const pnpmConfig = await ensurePnpmBuildConfig(repoRoot);\n\n await createLessonsDirectory(repoRoot);\n await createIndexFile(repoRoot);\n const lessonsDir = dirname(join(repoRoot, LESSONS_PATH));\n\n let agentsMdUpdated = false;\n if (!options.skipAgents) {\n agentsMdUpdated = await updateAgentsMd(repoRoot);\n await ensureClaudeMdReference(repoRoot);\n await createPluginManifest(repoRoot);\n await installAgentTemplates(repoRoot);\n await installWorkflowCommands(repoRoot);\n await installPhaseSkills(repoRoot);\n await installAgentRoleSkills(repoRoot);\n await installDocTemplates(repoRoot);\n await installResearchDocs(repoRoot);\n }\n\n let hookResult: HookInstallResult | null = null;\n if (!options.skipHooks) {\n hookResult = await installPreCommitHook(repoRoot);\n await installPostCommitHook(repoRoot);\n }\n\n let claudeHooksResult: ClaudeHooksResult = { installed: false, action: 'error', error: 'skipped' };\n if (!options.skipClaude) {\n claudeHooksResult = await installClaudeHooksForInit(repoRoot);\n }\n\n const gitignoreResult = await ensureGitignore(repoRoot);\n const modelStatus = await handleModelAndEmbed(repoRoot, { skipModel: options.skipModel, quiet, json: options.json });\n const fullBeads = runFullBeadsCheck(repoRoot);\n\n if (options.json) {\n printInitJson({ lessonsDir, agentsMdUpdated, hookResult, claudeHooksResult, pnpmConfig, fullBeads, scopeResult, upgradeResult, gitignoreResult, modelStatus });\n return;\n }\n\n if (quiet) return;\n\n out.success('Compound agent initialized');\n console.log(` Lessons directory: ${lessonsDir}`);\n printAgentsMdStatus(agentsMdUpdated, options.skipAgents);\n printHookStatus(hookResult, options.skipHooks);\n printClaudeHooksStatus(claudeHooksResult, options.skipClaude);\n printModelStatus(modelStatus, options.skipModel);\n printPnpmConfigStatus(pnpmConfig);\n printGitignoreStatus(gitignoreResult);\n printBeadsFullStatus(fullBeads);\n printScopeStatus(scopeResult);\n}\n\nfunction printInitJson(ctx: {\n lessonsDir: string; agentsMdUpdated: boolean; hookResult: HookInstallResult | null;\n claudeHooksResult: ClaudeHooksResult; pnpmConfig: PnpmConfigResult;\n fullBeads: BeadsFullCheck; scopeResult: { isUserScope: boolean };\n upgradeResult: UpgradeResult | null; gitignoreResult: GitignoreResult;\n modelStatus: string;\n}): void {\n const claudeHooksInstalled = ctx.claudeHooksResult.action === 'installed';\n const hooksChanged = ctx.hookResult?.status === 'installed' || ctx.hookResult?.status === 'appended';\n console.log(JSON.stringify({\n initialized: true, lessonsDir: ctx.lessonsDir, agentsMd: ctx.agentsMdUpdated,\n hooks: hooksChanged, hookStatus: ctx.hookResult?.status ?? 'skipped',\n claudeHooks: claudeHooksInstalled,\n model: ctx.modelStatus,\n pnpmConfig: ctx.pnpmConfig.isPnpm ? { added: ctx.pnpmConfig.added, alreadyConfigured: ctx.pnpmConfig.alreadyConfigured } : null,\n beadsAvailable: ctx.fullBeads.cliAvailable, beadsInitialized: ctx.fullBeads.initialized, beadsHealthy: ctx.fullBeads.healthy,\n userScope: ctx.scopeResult.isUserScope,\n upgrade: ctx.upgradeResult ? { isUpgrade: ctx.upgradeResult.isUpgrade, removedCommands: ctx.upgradeResult.removedCommands, strippedHeaders: ctx.upgradeResult.strippedHeaders } : null,\n gitignore: ctx.gitignoreResult.added,\n }));\n}\n\nfunction printAgentsMdStatus(updated: boolean, skipped?: boolean): void {\n if (updated) {\n console.log(' AGENTS.md: Updated with Compound Agent section');\n } else if (skipped) {\n console.log(' AGENTS.md: Skipped (--skip-agents)');\n } else {\n console.log(' AGENTS.md: Already has Compound Agent section');\n }\n}\n\nfunction printHookStatus(hookResult: HookInstallResult | null, skipped?: boolean): void {\n if (skipped) {\n console.log(' Git hooks: Skipped (--skip-hooks)');\n } else if (hookResult?.status === 'installed') {\n console.log(' Git hooks: Installed');\n } else if (hookResult?.status === 'appended') {\n console.log(' Git hooks: Appended to existing pre-commit hook');\n } else if (hookResult?.status === 'already_installed') {\n console.log(' Git hooks: Already installed');\n } else if (hookResult?.status === 'not_git_repo') {\n console.log(' Git hooks: Skipped (not a git repository)');\n }\n}\n\nfunction printClaudeHooksStatus(result: ClaudeHooksResult, skipped?: boolean): void {\n if (skipped) {\n console.log(' Claude hooks: Skipped (--skip-claude)');\n } else if (result.action === 'installed') {\n console.log(' Claude hooks: Installed to .claude/settings.json');\n } else if (result.action === 'already_installed') {\n console.log(' Claude hooks: Already installed');\n } else if (result.error) {\n console.log(` Claude hooks: Error - ${result.error}`);\n }\n}\n\nfunction printModelStatus(status: string, skipped?: boolean): void {\n if (skipped) {\n console.log(' Embedding model: Skipped (--skip-model)');\n } else if (status === 'exists') {\n // Already printed inline during download check\n } else if (status === 'downloaded') {\n // Already printed inline during download\n } else if (status === 'failed') {\n // Already printed inline via console.error\n }\n}\n\nfunction printPnpmConfigStatus(result: PnpmConfigResult): void {\n if (!result.isPnpm) return;\n if (result.alreadyConfigured) {\n console.log(' pnpm config: onlyBuiltDependencies already configured');\n } else if (result.added.length > 0) {\n console.log(` pnpm config: Added onlyBuiltDependencies [${result.added.join(', ')}]`);\n }\n}\n\n// ============================================================================\n// Command Registration\n// ============================================================================\n\n/**\n * Register the init command on the program.\n */\nexport function registerInitCommand(program: Command): void {\n program\n .command('init')\n .description('Initialize compound-agent in this repository')\n .option('--skip-agents', 'Skip AGENTS.md modification')\n .option('--skip-hooks', 'Skip git hooks installation')\n .option('--skip-claude', 'Skip Claude Code hooks installation')\n .option('--skip-model', 'Skip embedding model download')\n .option('--json', 'Output result as JSON')\n .option('--update', 'Run upgrade logic on existing install')\n .action(async function (this: Command, options: { skipAgents?: boolean; skipHooks?: boolean; skipClaude?: boolean; skipModel?: boolean; json?: boolean; update?: boolean }) {\n await initAction(this, options);\n });\n}\n","/**\n * CRUD commands: show, update, delete\n *\n * Commands for reading, updating, and deleting lessons.\n */\n\nimport type { Command } from 'commander';\n\nimport { getRepoRoot } from '../cli-utils.js';\nimport { appendMemoryItem, readMemoryItems, syncIfNeeded } from '../memory/storage/index.js';\nimport { MemoryItemSchema, SeveritySchema } from '../memory/index.js';\nimport type { MemoryItem, Severity } from '../memory/index.js';\n\nimport { formatError } from '../cli-error-format.js';\n\nimport { out } from './shared.js';\nimport { formatLessonHuman } from './management-helpers.js';\n\n/** JSON indentation for show output */\nconst SHOW_JSON_INDENT = 2;\n\n// ============================================================================\n// Action Handlers\n// ============================================================================\n\nasync function showAction(id: string, options: { json?: boolean }): Promise<void> {\n const repoRoot = getRepoRoot();\n\n const { items, deletedIds } = await readMemoryItems(repoRoot);\n const item = items.find((i) => i.id === id);\n\n if (!item) {\n const wasDeleted = deletedIds.has(id);\n\n if (options.json) {\n console.log(JSON.stringify({ error: wasDeleted ? `Lesson ${id} not found (deleted)` : `Lesson ${id} not found` }));\n } else {\n const msg = wasDeleted ? `Lesson ${id} not found (deleted)` : `Lesson ${id} not found`;\n console.error(formatError('show', 'NOT_FOUND', msg, 'Use \"ca list\" to see available lessons'));\n }\n process.exitCode = 1;\n return;\n }\n\n if (options.json) {\n console.log(JSON.stringify(item, null, SHOW_JSON_INDENT));\n } else {\n console.log(formatLessonHuman(item));\n }\n}\n\ninterface UpdateOptions {\n insight?: string;\n trigger?: string;\n evidence?: string;\n severity?: string;\n tags?: string;\n confirmed?: string;\n json?: boolean;\n}\n\nfunction buildUpdatedItem(item: MemoryItem, options: UpdateOptions): MemoryItem {\n return {\n ...item,\n ...(options.insight !== undefined && { insight: options.insight }),\n ...(options.trigger !== undefined && { trigger: options.trigger }),\n ...(options.evidence !== undefined && { evidence: options.evidence }),\n ...(options.severity !== undefined && { severity: options.severity as Severity }),\n ...(options.tags !== undefined && {\n tags: [...new Set(\n options.tags\n .split(',')\n .map((t) => t.trim())\n .filter((t) => t.length > 0)\n )],\n }),\n ...(options.confirmed !== undefined && { confirmed: options.confirmed === 'true' }),\n };\n}\n\nasync function updateAction(id: string, options: UpdateOptions): Promise<void> {\n const repoRoot = getRepoRoot();\n\n const hasUpdates = options.insight !== undefined\n || options.trigger !== undefined\n || options.evidence !== undefined\n || options.severity !== undefined\n || options.tags !== undefined\n || options.confirmed !== undefined;\n\n if (!hasUpdates) {\n if (options.json) {\n console.log(JSON.stringify({ error: 'No fields to update (specify at least one: --insight, --tags, --severity, ...)' }));\n } else {\n console.error(formatError('update', 'NO_FIELDS', 'No fields to update', 'Specify at least one: --insight, --tags, --severity, ...'));\n }\n process.exitCode = 1;\n return;\n }\n\n const { items, deletedIds } = await readMemoryItems(repoRoot);\n const item = items.find((i) => i.id === id);\n\n if (!item) {\n const wasDeleted = deletedIds.has(id);\n if (options.json) {\n console.log(JSON.stringify({ error: wasDeleted ? `Lesson ${id} is deleted` : `Lesson ${id} not found` }));\n } else {\n const msg = wasDeleted ? `Lesson ${id} is deleted` : `Lesson ${id} not found`;\n console.error(formatError('update', 'NOT_FOUND', msg, 'Use \"ca list\" to see available lessons'));\n }\n process.exitCode = 1;\n return;\n }\n\n if (options.severity !== undefined) {\n const result = SeveritySchema.safeParse(options.severity);\n if (!result.success) {\n if (options.json) {\n console.log(JSON.stringify({ error: `Invalid severity '${options.severity}' (must be: high, medium, low)` }));\n } else {\n console.error(formatError('update', 'INVALID_SEVERITY', `Invalid severity: \"${options.severity}\"`, 'Use --severity high|medium|low'));\n }\n process.exitCode = 1;\n return;\n }\n }\n\n const updatedItem = buildUpdatedItem(item, options);\n\n const validationResult = MemoryItemSchema.safeParse(updatedItem);\n if (!validationResult.success) {\n if (options.json) {\n console.log(JSON.stringify({ error: `Schema validation failed: ${validationResult.error.message}` }));\n } else {\n console.error(formatError('update', 'VALIDATION_FAILED', `Schema validation failed: ${validationResult.error.message}`, 'Check field values and try again'));\n }\n process.exitCode = 1;\n return;\n }\n\n await appendMemoryItem(repoRoot, updatedItem);\n await syncIfNeeded(repoRoot);\n\n if (options.json) {\n console.log(JSON.stringify(updatedItem, null, SHOW_JSON_INDENT));\n } else {\n out.success(`Updated lesson ${id}`);\n }\n}\n\nasync function deleteAction(ids: string[], options: { json?: boolean }): Promise<void> {\n const repoRoot = getRepoRoot();\n\n const { items, deletedIds } = await readMemoryItems(repoRoot);\n const itemMap = new Map(items.map((i) => [i.id, i]));\n\n const deleted: string[] = [];\n const warnings: Array<{ id: string; message: string }> = [];\n\n for (const id of ids) {\n const item = itemMap.get(id);\n\n if (!item) {\n warnings.push({ id, message: deletedIds.has(id) ? 'already deleted' : 'not found' });\n continue;\n }\n\n const deletedItem: MemoryItem = {\n ...item,\n deleted: true,\n deletedAt: new Date().toISOString(),\n };\n\n await appendMemoryItem(repoRoot, deletedItem);\n deleted.push(id);\n }\n\n if (deleted.length > 0) {\n await syncIfNeeded(repoRoot);\n }\n\n if (options.json) {\n console.log(JSON.stringify({ deleted, warnings }));\n } else {\n if (deleted.length > 0) {\n out.success(`Deleted ${deleted.length} lesson(s): ${deleted.join(', ')}`);\n }\n for (const warning of warnings) {\n out.warn(`${warning.id}: ${warning.message}`);\n }\n if (deleted.length === 0 && warnings.length > 0) {\n process.exitCode = 1;\n return;\n }\n }\n}\n\n// ============================================================================\n// Command Registration\n// ============================================================================\n\n/**\n * Register CRUD commands on the program.\n */\nexport function registerCrudCommands(program: Command): void {\n program\n .command('show <id>')\n .description('Show details of a specific lesson')\n .option('--json', 'Output as JSON')\n .action(async (id: string, options: { json?: boolean }) => {\n await showAction(id, options);\n });\n\n program\n .command('update <id>')\n .description('Update a lesson')\n .option('--insight <text>', 'Update insight')\n .option('--trigger <text>', 'Update trigger')\n .option('--evidence <text>', 'Update evidence')\n .option('--severity <level>', 'Update severity (low/medium/high)')\n .option('--tags <tags>', 'Update tags (comma-separated)')\n .option('--confirmed <bool>', 'Update confirmed status (true/false)')\n .option('--json', 'Output as JSON')\n .action(async (id: string, options: {\n insight?: string;\n trigger?: string;\n evidence?: string;\n severity?: string;\n tags?: string;\n confirmed?: string;\n json?: boolean;\n }) => {\n await updateAction(id, options);\n });\n\n program\n .command('delete <ids...>')\n .description('Soft delete lessons (creates tombstone)')\n .option('--json', 'Output as JSON')\n .action(async (ids: string[], options: { json?: boolean }) => {\n await deleteAction(ids, options);\n });\n}\n","/**\n * Memory module barrel export.\n *\n * Re-exports types and sub-module APIs for cross-module consumption.\n */\n\n// Types and schemas (from types.ts)\nexport {\n generateId,\n LessonItemSchema,\n LessonSchema,\n MemoryItemRecordSchema,\n MemoryItemSchema,\n MemoryItemTypeSchema,\n PatternItemSchema,\n PatternSchema,\n PreferenceItemSchema,\n SeveritySchema,\n SolutionItemSchema,\n} from './types.js';\nexport type {\n Context,\n Lesson,\n LessonRecord,\n LessonType,\n MemoryItem,\n MemoryItemRecord,\n MemoryItemType,\n PatternItem,\n Preference,\n Severity,\n Solution,\n Source,\n} from './types.js';\n\n// Storage API\nexport {\n appendLesson,\n appendMemoryItem,\n closeDb,\n DB_PATH,\n LESSONS_PATH,\n readLessons,\n readMemoryItems,\n rebuildIndex,\n searchKeyword,\n} from './storage/index.js';\n\n// Embeddings API\nexport {\n embedText,\n embedTexts,\n getEmbedding,\n isModelAvailable,\n isModelUsable,\n MODEL_FILENAME,\n MODEL_URI,\n resolveModel,\n unloadEmbedding,\n} from './embeddings/index.js';\n\n// Search API\nexport {\n CANDIDATE_MULTIPLIER,\n cosineSimilarity,\n mergeHybridResults,\n normalizeBm25Rank,\n rankLessons,\n searchVector,\n} from './search/index.js';\nexport type { RankedLesson, ScoredKeywordResult, ScoredLesson } from './search/index.js';\n\n// Capture API\nexport {\n detectSelfCorrection,\n detectTestFailure,\n detectUserCorrection,\n inferMemoryItemType,\n isActionable,\n isNovel,\n isSpecific,\n shouldPropose,\n} from './capture/index.js';\n\n// Retrieval API\nexport { loadSessionLessons, retrieveForPlan } from './retrieval/index.js';\n\n// Storage extras\nexport {\n compact,\n countTombstones,\n getRetrievalStats,\n incrementRetrievalCount,\n needsCompaction,\n syncIfNeeded,\n TOMBSTONE_THRESHOLD,\n} from './storage/index.js';\n","/**\n * Shared types, constants, and utilities for CLI commands.\n */\n\nimport chalk from 'chalk';\nimport type { Command } from 'commander';\n\n// Re-export centralized utilities (utils.ts remains in src/ root)\nexport { getLessonAgeDays, MS_PER_DAY } from '../utils.js';\n\n// ============================================================================\n// Output Formatting Helpers\n// ============================================================================\n\n/** Output helper functions for consistent formatting */\nexport const out = {\n success: (msg: string): void => console.log(chalk.green('[ok]'), msg),\n error: (msg: string): void => console.error(chalk.red('[error]'), msg),\n info: (msg: string): void => console.log(chalk.blue('[info]'), msg),\n warn: (msg: string): void => console.error(chalk.yellow('[warn]'), msg),\n};\n\n/** Global options interface */\nexport interface GlobalOpts {\n verbose: boolean;\n quiet: boolean;\n}\n\n/**\n * Get global options from command.\n */\nexport function getGlobalOpts(cmd: Command): GlobalOpts {\n const opts = cmd.optsWithGlobals() as { verbose?: boolean; quiet?: boolean };\n return {\n verbose: opts.verbose ?? false,\n quiet: opts.quiet ?? false,\n };\n}\n\n// ============================================================================\n// Constants\n// ============================================================================\n\n/** Default limit for search results */\nexport const DEFAULT_SEARCH_LIMIT = '10';\n\n/** Default limit for list results */\nexport const DEFAULT_LIST_LIMIT = '20';\n\n/** Default limit for search results */\nexport const DEFAULT_CHECK_PLAN_LIMIT = '5';\n\n/** Threshold for lesson count warning (context pollution prevention) */\nexport const LESSON_COUNT_WARNING_THRESHOLD = 20;\n\n/** Age threshold in days for flagging old lessons */\nexport const AGE_FLAG_THRESHOLD_DAYS = 90;\n\n/** Length of ISO date prefix (YYYY-MM-DD) */\nexport const ISO_DATE_PREFIX_LENGTH = 10;\n\n/** Decimal places for average calculations */\nexport const AVG_DECIMAL_PLACES = 1;\n\n/** Decimal places for relevance scores */\nexport const RELEVANCE_DECIMAL_PLACES = 2;\n\n/** Indentation for JSON pretty-printing */\nexport const JSON_INDENT_SPACES = 2;\n\n// ============================================================================\n// Loop / Improve Shared Constants\n// ============================================================================\n\n/** Default Claude model for loop and improve sessions (1M context for autonomous work) */\nexport const DEFAULT_LOOP_MODEL = 'claude-opus-4-6[1m]';\n\n/** Safe pattern for model names: alphanumeric, hyphens, underscores, dots, colons, slashes, brackets */\nexport const MODEL_PATTERN = /^[a-zA-Z0-9_.:[\\]/-]+$/;\n","/**\n * Doctor command — verify external dependencies and project health.\n *\n * Usage: ca doctor\n */\n\nimport { execFileSync } from 'node:child_process';\nimport { existsSync, readFileSync } from 'node:fs';\nimport { join } from 'node:path';\nimport type { Command } from 'commander';\n\nimport { getRepoRoot } from '../cli-utils.js';\nimport { isModelAvailable } from '../memory/embeddings/index.js';\nimport { ensureSqliteAvailable } from '../memory/storage/index.js';\nimport { LESSONS_PATH } from '../memory/storage/index.js';\nimport {\n checkBeadsAvailable,\n checkUserScope,\n getClaudeSettingsPath,\n hasAllCompoundAgentHooks,\n readClaudeSettings,\n} from '../setup/index.js';\n\nexport interface DoctorCheck {\n name: string;\n status: 'pass' | 'fail' | 'warn';\n fix?: string;\n}\n\nfunction checkGitignoreHealth(repoRoot: string): boolean {\n const gitignorePath = join(repoRoot, '.gitignore');\n if (!existsSync(gitignorePath)) return false;\n try {\n const content = readFileSync(gitignorePath, 'utf-8');\n const lines = new Set(content.split('\\n').map(l => l.trim()));\n return ['node_modules/', '.claude/.cache/', '.claude/.ca-*.json'].every(p => lines.has(p));\n } catch {\n return false;\n }\n}\n\n/**\n * Run all health checks and return results.\n */\nexport async function runDoctor(repoRoot: string): Promise<DoctorCheck[]> {\n const checks: DoctorCheck[] = [];\n\n // 1. .claude/ directory\n const claudeDir = join(repoRoot, '.claude');\n checks.push(existsSync(claudeDir)\n ? { name: '.claude directory', status: 'pass' }\n : { name: '.claude directory', status: 'fail', fix: 'Run: npx ca setup' });\n\n // 2. Lessons index\n const lessonsPath = join(repoRoot, LESSONS_PATH);\n checks.push(existsSync(lessonsPath)\n ? { name: 'Lessons index', status: 'pass' }\n : { name: 'Lessons index', status: 'warn', fix: 'Run: npx ca setup' });\n\n // 3. Agent templates\n const agentsDir = join(repoRoot, '.claude', 'agents', 'compound');\n checks.push(existsSync(agentsDir)\n ? { name: 'Agent templates', status: 'pass' }\n : { name: 'Agent templates', status: 'fail', fix: 'Run: npx ca setup' });\n\n // 4. Workflow commands\n const commandsDir = join(repoRoot, '.claude', 'commands', 'compound');\n checks.push(existsSync(commandsDir)\n ? { name: 'Workflow commands', status: 'pass' }\n : { name: 'Workflow commands', status: 'fail', fix: 'Run: npx ca setup' });\n\n // 5. Hooks\n const settingsPath = getClaudeSettingsPath(false);\n let hooksOk = false;\n try {\n const settings = await readClaudeSettings(settingsPath);\n hooksOk = hasAllCompoundAgentHooks(settings);\n } catch {\n // settings.json may not exist\n }\n checks.push(hooksOk\n ? { name: 'Claude hooks', status: 'pass' }\n : { name: 'Claude hooks', status: 'fail', fix: 'Run: npx ca setup' });\n\n // 6. Embedding model\n checks.push(checkEmbeddingModel());\n\n // 7. SQLite (better-sqlite3)\n checks.push(checkSqliteHealth());\n\n // 8. pnpm onlyBuiltDependencies config\n const pnpmCheck = checkPnpmBuildConfig(repoRoot);\n if (pnpmCheck !== null) {\n checks.push(pnpmCheck);\n }\n\n // 9. Beads CLI available\n const beadsResult = checkBeadsAvailable();\n checks.push(beadsResult.available\n ? { name: 'Beads CLI', status: 'pass' }\n : { name: 'Beads CLI', status: 'warn', fix: 'Run: ca install-beads' });\n\n // 10. .gitignore health\n checks.push(checkGitignoreHealth(repoRoot)\n ? { name: '.gitignore health', status: 'pass' }\n : { name: '.gitignore health', status: 'warn', fix: 'Run: npx ca setup --update' });\n\n // 11. Usage documentation\n const docPath = join(repoRoot, 'docs', 'compound', 'README.md');\n checks.push(existsSync(docPath)\n ? { name: 'Usage documentation', status: 'pass' }\n : { name: 'Usage documentation', status: 'warn', fix: 'Run: npx ca setup' });\n\n // 12. Beads initialized\n const beadsDir = join(repoRoot, '.beads');\n checks.push(existsSync(beadsDir)\n ? { name: 'Beads initialized', status: 'pass' }\n : { name: 'Beads initialized', status: 'warn', fix: 'Run: bd init' });\n\n // 13. Beads healthy\n if (beadsResult.available && existsSync(beadsDir)) {\n try {\n execFileSync('bd', ['doctor'], { cwd: repoRoot, stdio: 'pipe' });\n checks.push({ name: 'Beads healthy', status: 'pass' });\n } catch {\n checks.push({ name: 'Beads healthy', status: 'warn', fix: 'Run: bd doctor' });\n }\n }\n\n // 14. Codebase scope\n const scope = checkUserScope(repoRoot);\n checks.push(!scope.isUserScope\n ? { name: 'Codebase scope', status: 'pass' }\n : { name: 'Codebase scope', status: 'warn', fix: 'Install in a specific repository, not home directory' });\n\n return checks;\n}\n\nfunction checkEmbeddingModel(): DoctorCheck {\n try {\n return isModelAvailable()\n ? { name: 'Embedding model', status: 'pass' }\n : { name: 'Embedding model', status: 'warn', fix: 'Run: npx ca download-model' };\n } catch {\n return { name: 'Embedding model', status: 'warn', fix: 'Run: npx ca download-model' };\n }\n}\n\nfunction checkSqliteHealth(): DoctorCheck {\n try {\n ensureSqliteAvailable();\n return { name: 'SQLite (better-sqlite3)', status: 'pass' };\n } catch {\n return { name: 'SQLite (better-sqlite3)', status: 'fail', fix: 'Run: pnpm rebuild better-sqlite3 (or npm rebuild better-sqlite3)' };\n }\n}\n\n/**\n * Check if a pnpm project has onlyBuiltDependencies configured for native addons.\n * Returns null for non-pnpm projects (check is irrelevant).\n *\n * NOTE: The required deps list must stay in sync with\n * src/setup/primitives.ts REQUIRED_BUILD_DEPS and scripts/postinstall.mjs.\n */\nexport function checkPnpmBuildConfig(repoRoot: string): DoctorCheck | null {\n const lockPath = join(repoRoot, 'pnpm-lock.yaml');\n const pkgPath = join(repoRoot, 'package.json');\n\n // Single read of package.json, reused for both pnpm detection and config check\n let pkg: Record<string, unknown>;\n try {\n pkg = JSON.parse(readFileSync(pkgPath, 'utf-8')) as Record<string, unknown>;\n } catch { return null; }\n\n const hasLockfile = existsSync(lockPath);\n const hasPmField = typeof pkg.packageManager === 'string' && pkg.packageManager.startsWith('pnpm');\n if (!hasLockfile && !hasPmField) return null;\n\n const pnpmConfig = pkg.pnpm as Record<string, unknown> | undefined;\n const deps = pnpmConfig?.onlyBuiltDependencies;\n if (!Array.isArray(deps)) {\n return { name: 'pnpm build config', status: 'fail', fix: 'Run: npx ca setup (or add \"pnpm.onlyBuiltDependencies\" to package.json)' };\n }\n // Wildcard \"*\" means all builds are allowed\n if (deps.includes('*')) return { name: 'pnpm build config', status: 'pass' };\n const required = ['better-sqlite3', 'node-llama-cpp'];\n const missing = required.filter(d => !deps.includes(d));\n if (missing.length > 0) {\n return { name: 'pnpm build config', status: 'fail', fix: `Missing from onlyBuiltDependencies: [${missing.join(', ')}]. Run: npx ca setup` };\n }\n return { name: 'pnpm build config', status: 'pass' };\n}\n\nconst STATUS_ICONS: Record<string, string> = {\n pass: 'OK',\n fail: 'FAIL',\n warn: 'WARN',\n};\n\nexport function registerDoctorCommand(program: Command): void {\n program\n .command('doctor')\n .description('Verify external dependencies and project health')\n .action(async () => {\n const repoRoot = getRepoRoot();\n const checks = await runDoctor(repoRoot);\n\n console.log('Compound Agent Health Check:\\n');\n for (const check of checks) {\n const icon = STATUS_ICONS[check.status];\n const line = ` [${icon}] ${check.name}`;\n console.log(line);\n if (check.fix) {\n console.log(` Fix: ${check.fix}`);\n }\n }\n\n const failures = checks.filter(c => c.status === 'fail');\n const warnings = checks.filter(c => c.status === 'warn');\n console.log('');\n if (failures.length === 0 && warnings.length === 0) {\n console.log('All checks passed.');\n } else {\n if (failures.length > 0) console.log(`${failures.length} check(s) failed.`);\n if (warnings.length > 0) console.log(`${warnings.length} warning(s).`);\n }\n });\n}\n","/**\n * Invalidation commands: wrong, validate\n *\n * Commands for managing lesson validity state.\n */\n\nimport type { Command } from 'commander';\n\nimport { getRepoRoot } from '../cli-utils.js';\nimport { appendMemoryItem, readMemoryItems } from '../memory/storage/index.js';\nimport type { MemoryItem } from '../memory/index.js';\n\nimport { formatError } from '../cli-error-format.js';\n\nimport { out } from './shared.js';\n\n/**\n * Register invalidation commands on the program.\n */\nexport function registerInvalidationCommands(program: Command): void {\n /**\n * Wrong command - Mark a lesson as invalid/wrong.\n *\n * Appends an invalidatedAt timestamp and optional reason to the lesson.\n * Invalidated lessons are excluded from retrieval but remain in storage.\n *\n * @example npx ca wrong L12345678\n * @example npx ca wrong L12345678 --reason \"This advice was incorrect\"\n */\n program\n .command('wrong <id>')\n .description('Mark a lesson as invalid/wrong')\n .option('-r, --reason <text>', 'Reason for invalidation')\n .action(async function (this: Command, id: string, options: { reason?: string }) {\n const repoRoot = getRepoRoot();\n\n // Read all lessons\n const { items } = await readMemoryItems(repoRoot);\n\n // Find the lesson\n const lesson = items.find((l) => l.id === id);\n if (!lesson) {\n console.error(formatError('wrong', 'NOT_FOUND', `Lesson not found: ${id}`, 'Use \"ca list\" to see available lessons'));\n process.exitCode = 1;\n return;\n }\n\n // Check if already invalidated\n if (lesson.invalidatedAt) {\n out.warn(`Lesson ${id} is already marked as invalid.`);\n return;\n }\n\n // Create updated lesson with invalidation\n const updatedItem: MemoryItem = {\n ...lesson,\n invalidatedAt: new Date().toISOString(),\n ...(options.reason !== undefined && { invalidationReason: options.reason }),\n };\n\n // Append the updated lesson (JSONL append-only pattern)\n await appendMemoryItem(repoRoot, updatedItem);\n out.success(`Lesson ${id} marked as invalid.`);\n if (options.reason) {\n console.log(` Reason: ${options.reason}`);\n }\n });\n\n /**\n * Validate command - Remove invalidation from a lesson.\n *\n * Re-enables a previously invalidated lesson for retrieval.\n *\n * @example npx ca validate L12345678\n */\n program\n .command('validate <id>')\n .description('Re-enable a previously invalidated lesson')\n .action(async function (this: Command, id: string) {\n const repoRoot = getRepoRoot();\n\n // Read all lessons\n const { items } = await readMemoryItems(repoRoot);\n\n // Find the lesson\n const lesson = items.find((l) => l.id === id);\n if (!lesson) {\n console.error(formatError('validate', 'NOT_FOUND', `Lesson not found: ${id}`, 'Use \"ca list\" to see available lessons'));\n process.exitCode = 1;\n return;\n }\n\n // Check if not invalidated\n if (!lesson.invalidatedAt) {\n out.info(`Lesson ${id} is not invalidated.`);\n return;\n }\n\n // Remove invalidation fields (keep everything else)\n const updatedItem: MemoryItem = {\n ...lesson,\n invalidatedAt: undefined,\n invalidationReason: undefined,\n };\n\n // Append the updated lesson (JSONL append-only pattern)\n await appendMemoryItem(repoRoot, updatedItem);\n out.success(`Lesson ${id} re-enabled (validated).`);\n });\n}\n","/**\n * I/O commands: export, import\n *\n * Commands for importing and exporting lessons.\n */\n\nimport { readFile } from 'node:fs/promises';\nimport type { Command } from 'commander';\n\nimport { getRepoRoot } from '../cli-utils.js';\nimport { appendMemoryItem, readMemoryItems } from '../memory/storage/index.js';\nimport { MemoryItemSchema } from '../memory/index.js';\nimport type { MemoryItem } from '../memory/index.js';\n\nimport { formatError } from '../cli-error-format.js';\n\n// ============================================================================\n// Action Handlers\n// ============================================================================\n\nasync function exportAction(options: { since?: string; tags?: string }): Promise<void> {\n const repoRoot = getRepoRoot();\n\n const { items } = await readMemoryItems(repoRoot);\n\n let filtered = items;\n\n if (options.since) {\n const sinceDate = new Date(options.since);\n if (Number.isNaN(sinceDate.getTime())) {\n console.error(formatError('export', 'INVALID_DATE', `Invalid date format: ${options.since}`, 'Use ISO8601 format (e.g., 2024-01-15)'));\n process.exitCode = 1;\n return;\n }\n filtered = filtered.filter((item) => new Date(item.created) >= sinceDate);\n }\n\n if (options.tags) {\n const filterTags = options.tags.split(',').map((t) => t.trim());\n filtered = filtered.filter((item) => item.tags.some((tag) => filterTags.includes(tag)));\n }\n\n // Output as JSONL (one JSON object per line) for round-trip compatibility with import\n for (const item of filtered) {\n console.log(JSON.stringify(item));\n }\n}\n\nasync function importAction(file: string): Promise<void> {\n const repoRoot = getRepoRoot();\n\n let content: string;\n try {\n content = await readFile(file, 'utf-8');\n } catch (err) {\n const code = (err as NodeJS.ErrnoException).code;\n if (code === 'ENOENT') {\n console.error(formatError('import', 'FILE_NOT_FOUND', `File not found: ${file}`, 'Check the path and try again'));\n } else {\n console.error(formatError('import', 'READ_ERROR', `Error reading file: ${(err as Error).message}`, 'Check file permissions'));\n }\n process.exitCode = 1;\n return;\n }\n\n const { items: existingItems } = await readMemoryItems(repoRoot);\n const existingIds = new Set(existingItems.map((item) => item.id));\n\n const lines = content.split('\\n');\n let imported = 0;\n let skipped = 0;\n let invalid = 0;\n\n for (const line of lines) {\n const trimmed = line.trim();\n if (!trimmed) continue;\n\n let parsed: unknown;\n try {\n parsed = JSON.parse(trimmed);\n } catch {\n invalid++;\n continue;\n }\n\n const result = MemoryItemSchema.safeParse(parsed);\n if (!result.success) {\n invalid++;\n continue;\n }\n\n const item: MemoryItem = result.data;\n\n if (existingIds.has(item.id)) {\n skipped++;\n continue;\n }\n\n await appendMemoryItem(repoRoot, item);\n existingIds.add(item.id);\n imported++;\n }\n\n const lessonWord = imported === 1 ? 'lesson' : 'lessons';\n const parts: string[] = [];\n if (skipped > 0) parts.push(`${skipped} skipped`);\n if (invalid > 0) parts.push(`${invalid} invalid`);\n\n if (parts.length > 0) {\n console.log(`Imported ${imported} ${lessonWord} (${parts.join(', ')})`);\n } else {\n console.log(`Imported ${imported} ${lessonWord}`);\n }\n}\n\n// ============================================================================\n// Command Registration\n// ============================================================================\n\n/**\n * Register I/O commands on the program.\n */\nexport function registerIOCommands(program: Command): void {\n program\n .command('export')\n .description('Export lessons as JSON to stdout')\n .option('--since <date>', 'Only include lessons created after this date (ISO8601)')\n .option('--tags <tags>', 'Filter by tags (comma-separated, OR logic)')\n .action(async (options: { since?: string; tags?: string }) => {\n await exportAction(options);\n });\n\n program\n .command('import <file>')\n .description('Import lessons from a JSONL file')\n .action(async (file: string) => {\n await importAction(file);\n });\n}\n","/**\n * Maintenance commands: compact, rebuild, stats\n *\n * Commands for database health and maintenance.\n */\n\nimport { statSync } from 'node:fs';\nimport { join } from 'node:path';\nimport type { Command } from 'commander';\n\nimport { formatBytes, getRepoRoot } from '../cli-utils.js';\nimport {\n compact,\n countTombstones,\n DB_PATH,\n getRetrievalStats,\n LESSONS_PATH,\n needsCompaction,\n readMemoryItems,\n rebuildIndex,\n syncIfNeeded,\n TOMBSTONE_THRESHOLD,\n} from '../memory/storage/index.js';\n\nimport {\n AGE_FLAG_THRESHOLD_DAYS,\n AVG_DECIMAL_PLACES,\n getLessonAgeDays,\n LESSON_COUNT_WARNING_THRESHOLD,\n out,\n} from './shared.js';\n\n// ============================================================================\n// Action Handlers\n// ============================================================================\n\nasync function compactAction(options: { force?: boolean; dryRun?: boolean }): Promise<void> {\n const repoRoot = getRepoRoot();\n\n const tombstones = await countTombstones(repoRoot);\n const needs = await needsCompaction(repoRoot);\n\n if (options.dryRun) {\n console.log('Dry run - no changes will be made.\\n');\n console.log(`Tombstones found: ${tombstones}`);\n console.log(`Compaction needed: ${needs ? 'yes' : 'no'}`);\n return;\n }\n\n if (!needs && !options.force) {\n console.log(`Compaction not needed (${tombstones} tombstones, threshold is ${TOMBSTONE_THRESHOLD}).`);\n console.log('Use --force to compact anyway.');\n return;\n }\n\n console.log('Running compaction...');\n const result = await compact(repoRoot);\n\n console.log('\\nCompaction complete:');\n console.log(` Tombstones removed: ${result.tombstonesRemoved}`);\n console.log(` Lessons remaining: ${result.lessonsRemaining}`);\n if (result.droppedInvalid > 0) {\n console.log(` Invalid records dropped: ${result.droppedInvalid}`);\n }\n\n await rebuildIndex(repoRoot);\n console.log(' Index rebuilt.');\n}\n\nasync function rebuildAction(options: { force?: boolean }): Promise<void> {\n const repoRoot = getRepoRoot();\n if (options.force) {\n console.log('Forcing index rebuild...');\n await rebuildIndex(repoRoot);\n console.log('Index rebuilt.');\n } else {\n const rebuilt = await syncIfNeeded(repoRoot);\n if (rebuilt) {\n console.log('Index rebuilt (JSONL changed).');\n } else {\n console.log('Index is up to date.');\n }\n }\n}\n\nasync function statsAction(): Promise<void> {\n const repoRoot = getRepoRoot();\n\n await syncIfNeeded(repoRoot);\n\n const { items } = await readMemoryItems(repoRoot);\n const deletedCount = await countTombstones(repoRoot);\n const totalLessons = items.length;\n\n const retrievalStats = getRetrievalStats(repoRoot);\n const totalRetrievals = retrievalStats.reduce((sum, s) => sum + s.count, 0);\n const avgRetrievals = totalLessons > 0 ? (totalRetrievals / totalLessons).toFixed(AVG_DECIMAL_PLACES) : '0.0';\n\n const jsonlPath = join(repoRoot, LESSONS_PATH);\n const dbPath = join(repoRoot, DB_PATH);\n\n let dataSize = 0;\n let indexSize = 0;\n\n try { dataSize = statSync(jsonlPath).size; } catch { /* File doesn't exist */ }\n try { indexSize = statSync(dbPath).size; } catch { /* File doesn't exist */ }\n\n const totalSize = dataSize + indexSize;\n\n let recentCount = 0;\n let mediumCount = 0;\n let oldCount = 0;\n for (const item of items) {\n const ageDays = getLessonAgeDays(item);\n if (ageDays < 30) {\n recentCount++;\n } else if (ageDays <= AGE_FLAG_THRESHOLD_DAYS) {\n mediumCount++;\n } else {\n oldCount++;\n }\n }\n\n const typeCounts: Record<string, number> = {};\n for (const item of items) {\n typeCounts[item.type] = (typeCounts[item.type] ?? 0) + 1;\n }\n\n const deletedInfo = deletedCount > 0 ? ` (${deletedCount} deleted)` : '';\n console.log(`Lessons: ${totalLessons} total${deletedInfo}`);\n\n if (Object.keys(typeCounts).length > 1 || (Object.keys(typeCounts).length === 1 && !typeCounts['lesson'])) {\n const breakdown = Object.entries(typeCounts)\n .sort(([a], [b]) => a.localeCompare(b))\n .map(([type, count]) => `${count} ${type}`)\n .join(', ');\n console.log(`Types: ${breakdown}`);\n }\n\n if (totalLessons > LESSON_COUNT_WARNING_THRESHOLD) {\n out.warn(`High lesson count may degrade retrieval quality. Consider running \\`ca compact\\`.`);\n }\n\n if (totalLessons > 0) {\n console.log(`Age: ${recentCount} <30d, ${mediumCount} 30-90d, ${oldCount} >90d`);\n }\n\n console.log(`Retrievals: ${totalRetrievals} total, ${avgRetrievals} avg per lesson`);\n console.log(`Storage: ${formatBytes(totalSize)} (index: ${formatBytes(indexSize)}, data: ${formatBytes(dataSize)})`);\n}\n\n// ============================================================================\n// Command Registration\n// ============================================================================\n\n/**\n * Register maintenance commands on the program.\n */\nexport function registerMaintenanceCommands(program: Command): void {\n program\n .command('compact')\n .description('Compact lessons: remove tombstones and invalid records')\n .option('-f, --force', 'Run compaction even if below threshold')\n .option('--dry-run', 'Show what would be done without making changes')\n .action(async (options: { force?: boolean; dryRun?: boolean }) => {\n await compactAction(options);\n });\n\n program\n .command('rebuild')\n .description('Rebuild SQLite index from JSONL')\n .option('-f, --force', 'Force rebuild even if unchanged')\n .action(async (options: { force?: boolean }) => {\n await rebuildAction(options);\n });\n\n program\n .command('stats')\n .description('Show database health and statistics')\n .action(async () => {\n await statsAction();\n });\n}\n","/**\n * Prime command - Context recovery for Claude Code with Beads-style trust language.\n *\n * Generates trust language guidelines combined with high-severity lessons\n * for context recovery after compaction or session restart.\n */\n\nimport type { Command } from 'commander';\nimport { join } from 'node:path';\n\nimport { getRepoRoot } from '../cli-utils.js';\nimport { loadSessionLessons } from '../memory/retrieval/index.js';\nimport { syncIfNeeded } from '../memory/storage/index.js';\nimport type { MemoryItem, Source } from '../memory/index.js';\nimport { checkForUpdate, formatUpdateNotificationMarkdown } from '../update-check.js';\nimport { getPhaseState } from './phase-check.js';\n/**\n * Beads-style trust language template.\n *\n * Uses explicit prohibitions, workflow sequencing, and NEVER/MUST language\n * following Beads conventions for maximum adherence.\n *\n * CLI-first: all lesson operations use `npx ca` commands.\n */\nconst TRUST_LANGUAGE_TEMPLATE = `# Compound Agent Active\n\n> **Context Recovery**: Run \\`npx ca prime\\` after compaction, clear, or new session\n\n## CLI Commands (ALWAYS USE THESE)\n\n**You MUST use CLI commands for lesson management:**\n\n| Command | Purpose |\n|---------|---------|\n| \\`npx ca search \"query\"\\` | Search lessons - MUST call before architectural decisions; use anytime you need context |\n| \\`npx ca knowledge \"query\"\\` | Semantic search over project docs - MUST call before architectural decisions; use keyword phrases, not questions |\n| \\`npx ca learn \"insight\"\\` | Capture lessons - call AFTER corrections or discoveries |\n\n## Core Constraints\n\n**Default**: Use CLI commands for lesson management\n**Prohibited**: NEVER edit .claude/lessons/ files directly\n\n**Default**: Propose lessons freely after corrections\n**Prohibited**: NEVER propose without quality gate (novel + specific; prefer actionable)\n\n## Retrieval Protocol\n\nYou MUST call \\`npx ca search\\` and \\`npx ca knowledge\\` BEFORE:\n- Architectural decisions or complex planning\n- Implementing patterns you've done before in this repo\n\n**NEVER skip search for complex decisions.** Past mistakes will repeat.\n\nBeyond mandatory triggers, use these commands freely — they are lightweight queries, not heavyweight operations. Uncertain about a pattern? \\`ca search\\`. Need a detail from the docs? \\`ca knowledge\\`. The cost of an unnecessary search is near-zero; the cost of a missed one can be hours.\n\n## Capture Protocol\n\nRun \\`npx ca learn\\` AFTER:\n- User corrects you (\"no\", \"wrong\", \"actually...\")\n- You self-correct after iteration failures\n- Test fails then you fix it\n\n**Quality gate** (must pass before capturing):\n- Novel (not already stored)\n- Specific (clear guidance)\n- Actionable (preferred, not mandatory)\n\n**Workflow**: Search BEFORE deciding, capture AFTER learning.\n`;\n\n/**\n * Format lesson source for human-readable display.\n */\nfunction formatSource(source: Source): string {\n switch (source) {\n case 'user_correction':\n return 'user correction';\n case 'self_correction':\n return 'self correction';\n case 'test_failure':\n return 'test failure';\n case 'manual':\n return 'manual';\n default:\n return source;\n }\n}\n\n/**\n * Format a single lesson for the Emergency Recall section.\n *\n * Format: - **{insight}** ({tags})\n * Learned: {date} via {source}\n */\nfunction formatLessonForPrime(lesson: MemoryItem): string {\n const date = lesson.created.slice(0, 10); // YYYY-MM-DD\n const tags = lesson.tags.length > 0 ? ` (${lesson.tags.join(', ')})` : '';\n const source = formatSource(lesson.source);\n return `- **${lesson.insight}**${tags}\\n Learned: ${date} via ${source}`;\n}\n\nfunction formatActiveCookitSection(repoRoot: string): string | null {\n const state = getPhaseState(repoRoot);\n if (state === null || !state.cookit_active) return null;\n\n const skillsRead = state.skills_read.length === 0 ? '(none)' : state.skills_read.join(', ');\n const gatesPassed = state.gates_passed.length === 0 ? '(none)' : state.gates_passed.join(', ');\n\n return `\n---\n\n# ACTIVE COOK-IT SESSION\n\nEpic: ${state.epic_id}\nPhase: ${state.current_phase} (${state.phase_index}/5)\nSkills read: ${skillsRead}\nGates passed: ${gatesPassed}\nStarted: ${state.started_at}\n\nResume from phase ${state.current_phase}. Run: \\`npx ca phase-check start ${state.current_phase}\\`\nRead the skill file first: \\`.claude/skills/compound/${state.current_phase}/SKILL.md\\`\n`;\n}\n\n/**\n * Generate prime context output for Claude Code.\n *\n * Combines Beads-style trust language guidelines with high-severity lessons\n * for context recovery after compaction or session restart.\n *\n * @param repoRoot - Repository root directory (defaults to getRepoRoot())\n * @returns Formatted markdown string (< 2K tokens)\n */\nexport async function getPrimeContext(repoRoot?: string): Promise<string> {\n const root = repoRoot ?? getRepoRoot();\n\n // Sync SQLite index before loading — ensures searches have fresh data\n // after git pull or external JSONL changes.\n try {\n await syncIfNeeded(root);\n } catch {\n // Non-fatal: prime still works from JSONL even if SQLite sync fails\n }\n\n // Load high-severity lessons (top 5, sorted by recency)\n const lessons = await loadSessionLessons(root, 5);\n\n // Build output: trust language first\n let output = TRUST_LANGUAGE_TEMPLATE;\n\n // Add Emergency Recall section if we have high-severity lessons\n if (lessons.length > 0) {\n const formattedLessons = lessons.map(formatLessonForPrime).join('\\n\\n');\n output += `\n---\n\n# [CRITICAL] Mandatory Recall\n\nCritical lessons from past corrections:\n\n${formattedLessons}\n`;\n }\n\n const cookitSection = formatActiveCookitSection(root);\n if (cookitSection !== null) {\n output += cookitSection;\n }\n\n // Append update notification when NOT in a TTY.\n // This is intentionally the complement of shouldCheckForUpdate() in cli-app.ts:\n // TTY sessions get notifications from runProgram(); non-TTY contexts (Claude Code\n // session context) get them here. CI and explicit opt-outs are respected in both paths.\n if (!process.stdout.isTTY && !process.env['CI'] && !process.env['NO_UPDATE_NOTIFIER']) {\n try {\n const updateResult = await checkForUpdate(join(root, '.claude', '.cache'));\n if (updateResult?.updateAvailable) {\n output += formatUpdateNotificationMarkdown(updateResult.current, updateResult.latest);\n }\n } catch {\n // Non-fatal: update check failure should never block prime\n }\n }\n\n return output;\n}\n\n/**\n * Register prime command on the program.\n */\nexport function registerPrimeCommand(program: Command): void {\n /**\n * Prime command - Output context recovery for Claude Code.\n *\n * Combines Beads-style trust language guidelines with high-severity lessons.\n * Used after compaction or context loss to remind Claude of the\n * compound-agent workflow, rules, and commands.\n *\n * @example npx ca prime\n */\n program\n .command('prime')\n .description('Output context for Claude Code (guidelines + top lessons)')\n .action(async () => {\n const output = await getPrimeContext();\n console.log(output);\n });\n}\n","/**\n * Update-check module -- fetches latest version from npm, caches results,\n * and formats upgrade notifications.\n */\n\nimport { readFileSync, writeFileSync, mkdirSync, statSync } from 'node:fs';\nimport { join } from 'node:path';\nimport { VERSION } from './version.js';\n\nexport interface UpdateCheckResult {\n current: string;\n latest: string;\n updateAvailable: boolean;\n}\n\ninterface CacheData {\n latest: string;\n}\n\nconst CACHE_TTL_MS = 24 * 60 * 60 * 1000; // 24 hours\nconst FETCH_TIMEOUT_MS = 3000;\nconst CACHE_FILENAME = 'update-check.json';\n\n/**\n * Fetch the latest published version of a package from the npm registry.\n * Uses the dist-tags endpoint (~100 bytes) instead of the full manifest.\n * Returns null on any error.\n */\nexport async function fetchLatestVersion(\n packageName: string = 'compound-agent',\n): Promise<string | null> {\n try {\n const res = await fetch(\n `https://registry.npmjs.org/-/package/${packageName}/dist-tags`,\n { signal: AbortSignal.timeout(FETCH_TIMEOUT_MS) },\n );\n if (!res.ok) return null;\n const data = (await res.json()) as Record<string, unknown>;\n const latest = data['latest'];\n return typeof latest === 'string' ? latest : null;\n } catch {\n return null;\n }\n}\n\n/**\n * Check whether an update is available, using a file-based cache to avoid\n * hitting the registry on every invocation.\n * Returns null on any failure.\n */\nexport async function checkForUpdate(\n cacheDir: string,\n): Promise<UpdateCheckResult | null> {\n try {\n const cachePath = join(cacheDir, CACHE_FILENAME);\n\n // Try reading a fresh cache\n const cached = readCache(cachePath);\n if (cached) {\n return {\n current: VERSION,\n latest: cached.latest,\n updateAvailable: semverGt(cached.latest, VERSION),\n };\n }\n\n // Cache miss / expired / corrupt -- fetch from registry\n const latest = await fetchLatestVersion();\n if (latest === null) return null;\n\n // Write cache\n try {\n mkdirSync(cacheDir, { recursive: true });\n const cacheData: CacheData = { latest };\n writeFileSync(cachePath, JSON.stringify(cacheData));\n } catch {\n // Cache write failure is non-fatal\n }\n\n return {\n current: VERSION,\n latest,\n updateAvailable: semverGt(latest, VERSION),\n };\n } catch {\n return null;\n }\n}\n\n/**\n * Returns true when the major version of `latest` exceeds that of `current`.\n */\nexport function isMajorUpdate(current: string, latest: string): boolean {\n return parseInt(latest.split('.')[0]!, 10) > parseInt(current.split('.')[0]!, 10);\n}\n\n/**\n * Format a human-readable update notification string (plain text, for TTY).\n * Major updates get an urgency label; shows both global and dev-dep commands.\n */\nexport function formatUpdateNotification(\n current: string,\n latest: string,\n): string {\n const label = isMajorUpdate(current, latest) ? 'Major update' : 'Update available';\n const warning = isMajorUpdate(current, latest)\n ? '\\n May contain breaking changes -- check the changelog.'\n : '';\n return [\n `${label}: ${current} -> ${latest}${warning}`,\n `Run: npm update -g compound-agent (global)`,\n ` pnpm add -D compound-agent@latest (dev dependency)`,\n ].join('\\n');\n}\n\n/**\n * Format an update notification in markdown (for non-TTY / prime output).\n */\nexport function formatUpdateNotificationMarkdown(\n current: string,\n latest: string,\n): string {\n const urgency = isMajorUpdate(current, latest)\n ? ' (MAJOR - may contain breaking changes)'\n : '';\n return `\\n---\\n# Update Available\\ncompound-agent v${latest} is available (current: v${current})${urgency}.\\nRun: \\`npm update -g compound-agent\\` (global) or \\`pnpm add -D compound-agent@latest\\` (dev dependency)\\n`;\n}\n\n/**\n * Determine whether an update check should run.\n * Skips non-TTY, CI environments, and explicit opt-outs.\n */\nexport function shouldCheckForUpdate(): boolean {\n if (!process.stdout.isTTY) return false;\n if (process.env['CI']) return false;\n if (process.env['NO_UPDATE_NOTIFIER']) return false;\n if (process.env['NODE_ENV'] === 'test') return false;\n return true;\n}\n\n// ---------------------------------------------------------------------------\n// Internal helpers\n// ---------------------------------------------------------------------------\n\n/**\n * Returns true if version a is strictly greater than version b.\n * Handles standard MAJOR.MINOR.PATCH semver format.\n * Pre-release suffixes (e.g. \"2.0.0-beta.1\") are stripped before comparison\n * so that pre-releases are never promoted over stable releases.\n */\nfunction semverGt(a: string, b: string): boolean {\n const parse = (v: string): [number, number, number] => {\n // Strip pre-release suffix: \"2.0.0-beta.1\" -> \"2.0.0\"\n const clean = v.split('-')[0]!;\n const parts = clean.split('.').map(n => {\n const num = parseInt(n, 10);\n return isNaN(num) ? 0 : num;\n });\n return [parts[0] ?? 0, parts[1] ?? 0, parts[2] ?? 0];\n };\n const [aMaj, aMin, aPat] = parse(a);\n const [bMaj, bMin, bPat] = parse(b);\n if (aMaj !== bMaj) return aMaj > bMaj;\n if (aMin !== bMin) return aMin > bMin;\n return aPat > bPat;\n}\n\nfunction readCache(cachePath: string): CacheData | null {\n try {\n const stat = statSync(cachePath);\n if (Date.now() - stat.mtimeMs > CACHE_TTL_MS) return null;\n\n const raw = readFileSync(cachePath, 'utf-8');\n const data = JSON.parse(raw) as CacheData;\n if (typeof data.latest !== 'string' || !data.latest) return null;\n return data;\n } catch {\n return null;\n }\n}\n","/**\n * Lessons audit check.\n *\n * Surfaces high-severity lessons as info-level findings.\n */\n\nimport { LESSONS_PATH, readMemoryItems } from '../../memory/storage/index.js';\nimport type { AuditCheckResult } from '../types.js';\n\n/**\n * Check for high-severity lessons and return as info findings.\n *\n * @param repoRoot - Repository root directory\n * @returns Audit check result with findings and filesChecked\n */\nexport async function checkLessons(repoRoot: string): Promise<AuditCheckResult> {\n const { items } = await readMemoryItems(repoRoot);\n const findings: AuditCheckResult['findings'] = [];\n\n for (const item of items) {\n if (item.severity === 'high') {\n findings.push({\n file: '',\n issue: `High-severity lesson: ${item.insight}`,\n severity: 'info',\n relatedLessonId: item.id,\n source: 'lesson',\n });\n }\n }\n\n const filesChecked = items.length > 0 ? [LESSONS_PATH] : [];\n return { findings, filesChecked };\n}\n","/**\n * Patterns audit check.\n *\n * Searches source files for known bad patterns from memory items.\n */\n\nimport { readFileSync } from 'node:fs';\nimport { join } from 'node:path';\n\nimport { readMemoryItems } from '../../memory/storage/index.js';\nimport { findFiles } from '../../rules/index.js';\nimport type { AuditCheckResult } from '../types.js';\n\n/**\n * Check for bad patterns in source files.\n *\n * @param repoRoot - Repository root directory\n * @returns Audit check result with findings and filesChecked\n */\nexport async function checkPatterns(repoRoot: string): Promise<AuditCheckResult> {\n const { items } = await readMemoryItems(repoRoot);\n\n // Filter items that have pattern.bad defined\n const patterned = items.filter((item) => item.pattern?.bad);\n if (patterned.length === 0) {\n return { findings: [], filesChecked: [] };\n }\n\n // Find source files to scan\n const sourceFiles = findFiles(repoRoot, '**/*.ts');\n const findings: AuditCheckResult['findings'] = [];\n\n for (const item of patterned) {\n const bad = item.pattern!.bad;\n for (const relPath of sourceFiles) {\n const content = readFileSync(join(repoRoot, relPath), 'utf-8');\n if (content.includes(bad)) {\n findings.push({\n file: relPath,\n issue: `Bad pattern found: \"${bad}\" (${item.insight})`,\n severity: 'warning',\n relatedLessonId: item.id,\n suggestedFix: item.pattern!.good ? `Use: ${item.pattern!.good}` : undefined,\n source: 'pattern',\n });\n }\n }\n }\n\n return { findings, filesChecked: sourceFiles };\n}\n","/**\n * Zod schemas for rule configuration.\n *\n * Rules are defined in .claude/rules.json and describe mechanical checks\n * that can be run against a codebase.\n */\n\nimport { z } from 'zod';\n\n/** Rule severity levels. */\nexport const SeveritySchema = z.enum(['error', 'warning', 'info']);\n\n/** File-pattern check: regex match on files matching a glob. */\nexport const FilePatternCheckSchema = z.object({\n type: z.literal('file-pattern'),\n glob: z.string(),\n pattern: z.string(),\n mustMatch: z.boolean().optional(),\n});\n\n/** File-size check: line count limit on files matching a glob. */\nexport const FileSizeCheckSchema = z.object({\n type: z.literal('file-size'),\n glob: z.string(),\n maxLines: z.number().int().positive(),\n});\n\n/** Script check: run a shell command and check exit code. */\nexport const ScriptCheckSchema = z.object({\n type: z.literal('script'),\n command: z.string(),\n expectExitCode: z.number().int().optional(),\n timeout: z.number().int().positive().optional(),\n});\n\n/** Discriminated union of all check types. */\nexport const RuleCheckSchema = z.discriminatedUnion('type', [\n FilePatternCheckSchema,\n FileSizeCheckSchema,\n ScriptCheckSchema,\n]);\n\n/** A single rule definition. */\nexport const RuleSchema = z.object({\n id: z.string().min(1),\n description: z.string(),\n severity: SeveritySchema,\n check: RuleCheckSchema,\n remediation: z.string(),\n});\n\n/** Top-level rule configuration file schema. */\nexport const RuleConfigSchema = z.object({\n rules: z.array(RuleSchema),\n});\n\n// Type exports\nexport type Severity = z.infer<typeof SeveritySchema>;\nexport type FilePatternCheck = z.infer<typeof FilePatternCheckSchema>;\nexport type FileSizeCheck = z.infer<typeof FileSizeCheckSchema>;\nexport type ScriptCheck = z.infer<typeof ScriptCheckSchema>;\nexport type RuleCheck = z.infer<typeof RuleCheckSchema>;\nexport type Rule = z.infer<typeof RuleSchema>;\nexport type RuleConfig = z.infer<typeof RuleConfigSchema>;\n","/**\n * Simple glob-like file finder using Node.js built-in fs.\n *\n * Supports basic glob patterns: **, *, and extension matching.\n * No external dependencies required.\n */\n\nimport { readdirSync, statSync } from 'node:fs';\nimport { join, relative } from 'node:path';\n\n/**\n * Convert a simple glob pattern to a regex.\n * Supports: ** (any path), * (any segment), .ext matching.\n *\n * @param glob - Glob pattern (e.g., \"**\\/*.ts\", \"src/*.js\")\n * @returns RegExp that matches the pattern\n */\nexport function globToRegex(glob: string): RegExp {\n const pattern = glob\n .replace(/\\./g, '\\\\.') // escape dots\n .replace(/\\*\\*\\//g, '(.+/)?') // ** matches any directory depth\n .replace(/\\*/g, '[^/]*'); // * matches within a single segment\n return new RegExp(`^${pattern}$`);\n}\n\n/**\n * Find files in baseDir matching a glob pattern.\n *\n * @param baseDir - Root directory to search from\n * @param glob - Glob pattern to match\n * @returns Array of relative file paths matching the pattern\n */\nexport function findFiles(baseDir: string, glob: string): string[] {\n const regex = globToRegex(glob);\n const results: string[] = [];\n\n function walk(dir: string): void {\n const entries = readdirSync(dir);\n for (const entry of entries) {\n // Skip hidden directories and node_modules\n if (entry.startsWith('.') || entry === 'node_modules') continue;\n\n const fullPath = join(dir, entry);\n const stat = statSync(fullPath);\n if (stat.isDirectory()) {\n walk(fullPath);\n } else {\n const relPath = relative(baseDir, fullPath);\n if (regex.test(relPath)) {\n results.push(relPath);\n }\n }\n }\n }\n\n walk(baseDir);\n return results.sort();\n}\n","/**\n * File-pattern rule check implementation.\n *\n * Scans files matching a glob for a regex pattern.\n * By default, matches are violations. With mustMatch=true,\n * files missing the pattern are violations.\n */\n\nimport { readFileSync } from 'node:fs';\nimport { join } from 'node:path';\n\nimport type { FilePatternCheck } from '../types.js';\nimport type { Violation } from '../engine.js';\n\nimport { findFiles } from './glob-utils.js';\n\n/**\n * Run a file-pattern check against files in baseDir.\n *\n * @param baseDir - Root directory to search from\n * @param check - The file-pattern check configuration\n * @returns Array of violations found\n */\nexport function runFilePatternCheck(\n baseDir: string,\n check: FilePatternCheck,\n): Violation[] {\n const files = findFiles(baseDir, check.glob);\n const regex = new RegExp(check.pattern);\n const violations: Violation[] = [];\n\n for (const file of files) {\n const fullPath = join(baseDir, file);\n const content = readFileSync(fullPath, 'utf-8');\n const lines = content.split('\\n');\n\n if (check.mustMatch) {\n const found = lines.some((line) => regex.test(line));\n if (!found) {\n violations.push({\n file,\n message: `Pattern ${check.pattern} missing from file`,\n });\n }\n } else {\n for (let i = 0; i < lines.length; i++) {\n if (regex.test(lines[i]!)) {\n violations.push({\n file,\n line: i + 1,\n message: `Pattern ${check.pattern} matched`,\n });\n }\n }\n }\n }\n\n return violations;\n}\n","/**\n * File-size rule check implementation.\n *\n * Checks that files matching a glob do not exceed a line count limit.\n */\n\nimport { readFileSync } from 'node:fs';\nimport { join } from 'node:path';\n\nimport type { FileSizeCheck } from '../types.js';\nimport type { Violation } from '../engine.js';\n\nimport { findFiles } from './glob-utils.js';\n\n/**\n * Run a file-size check against files in baseDir.\n *\n * @param baseDir - Root directory to search from\n * @param check - The file-size check configuration\n * @returns Array of violations found\n */\nexport function runFileSizeCheck(\n baseDir: string,\n check: FileSizeCheck,\n): Violation[] {\n const files = findFiles(baseDir, check.glob);\n const violations: Violation[] = [];\n\n for (const file of files) {\n const content = readFileSync(join(baseDir, file), 'utf-8');\n // Count non-empty trailing: split and filter trailing empty from final newline\n const lineCount = content === '' ? 0 : content.split('\\n').filter((_, i, arr) => i < arr.length - 1 || arr[i] !== '').length;\n\n if (lineCount > check.maxLines) {\n violations.push({\n file,\n message: `File has ${lineCount} lines, exceeds limit of ${check.maxLines}`,\n });\n }\n }\n\n return violations;\n}\n","/**\n * Script rule check implementation.\n *\n * Runs a shell command and checks the exit code.\n */\n\nimport { execSync } from 'node:child_process';\n\nimport type { ScriptCheck } from '../types.js';\nimport type { Violation } from '../engine.js';\n\n/**\n * Run a script check by executing a shell command.\n *\n * @param check - The script check configuration\n * @returns Array of violations (empty if command exits with expected code)\n */\n/** Default timeout for script checks (30 seconds). */\nconst DEFAULT_SCRIPT_TIMEOUT = 30_000;\n\nexport function runScriptCheck(check: ScriptCheck, baseDir?: string): Violation[] {\n const expectedCode = check.expectExitCode ?? 0;\n const timeout = check.timeout ?? DEFAULT_SCRIPT_TIMEOUT;\n\n try {\n execSync(check.command, { stdio: ['pipe', 'pipe', 'pipe'], cwd: baseDir, timeout });\n // Exit code 0\n if (expectedCode !== 0) {\n return [{ message: `Script exited with exit code 0, expected ${expectedCode}` }];\n }\n return [];\n } catch (err: unknown) {\n const exitCode = (err as { status?: number }).status ?? 1;\n if (exitCode === expectedCode) {\n return [];\n }\n const stderr = ((err as { stderr?: Buffer }).stderr ?? Buffer.alloc(0))\n .toString('utf-8')\n .trim();\n const msg = stderr\n ? `Script exited with exit code ${exitCode} (expected ${expectedCode}): ${stderr}`\n : `Script exited with exit code ${exitCode} (expected ${expectedCode})`;\n return [{ message: msg }];\n }\n}\n","/**\n * Rule engine: loads config, runs checks, formats output.\n */\n\nimport { existsSync, readFileSync } from 'node:fs';\nimport { join } from 'node:path';\n\nimport { RuleConfigSchema } from './types.js';\nimport type { Rule, RuleConfig } from './types.js';\n\nimport { runFilePatternCheck } from './checks/file-pattern.js';\nimport { runFileSizeCheck } from './checks/file-size.js';\nimport { runScriptCheck } from './checks/script.js';\n\n/** A single violation found by a rule check. */\nexport interface Violation {\n file?: string;\n line?: number;\n message: string;\n}\n\n/** Result of running a single rule. */\nexport interface RuleResult {\n rule: Rule;\n violations: Violation[];\n passed: boolean;\n}\n\n/** Severity label mapping for output formatting. */\nconst SEVERITY_LABELS: Record<string, string> = {\n error: 'ERROR',\n warning: 'WARN',\n info: 'INFO',\n};\n\n/**\n * Load rule configuration from .claude/rules.json.\n *\n * @param baseDir - Repository root directory\n * @returns Parsed rule configuration (empty rules if no config file)\n * @throws On invalid JSON or schema validation failure\n */\nexport function loadRuleConfig(baseDir: string): RuleConfig {\n const configPath = join(baseDir, '.claude', 'rules.json');\n if (!existsSync(configPath)) {\n return { rules: [] };\n }\n\n const raw = readFileSync(configPath, 'utf-8');\n const json: unknown = JSON.parse(raw);\n return RuleConfigSchema.parse(json);\n}\n\n/**\n * Run all rules against the codebase.\n *\n * @param baseDir - Repository root directory\n * @param rules - Array of rules to check\n * @returns Array of results, one per rule\n */\nexport function runRules(baseDir: string, rules: Rule[]): RuleResult[] {\n return rules.map((rule) => {\n try {\n const violations = runCheck(baseDir, rule);\n return { rule, violations, passed: violations.length === 0 };\n } catch (err) {\n const message = err instanceof Error ? err.message : 'Rule check failed';\n return { rule, violations: [{ message: `Rule check error: ${message}` }], passed: false };\n }\n });\n}\n\n/**\n * Format a single violation as an agent-legible line.\n *\n * Format: SEVERITY [rules] rule-id: file:line -- remediation\n *\n * @param rule - The rule that was violated\n * @param violation - The specific violation\n * @returns Formatted single-line string\n */\nexport function formatViolation(rule: Rule, violation: Violation): string {\n const label = SEVERITY_LABELS[rule.severity] ?? 'INFO';\n const location = violation.file\n ? violation.line\n ? `${violation.file}:${violation.line}`\n : violation.file\n : '';\n const locationPart = location ? ` ${location} --` : '';\n const messagePart = violation.message ? ` ${violation.message} --` : '';\n return `${label} [rules] ${rule.id}:${locationPart}${messagePart} ${rule.remediation}`;\n}\n\n/** Dispatch a rule check to the appropriate handler. */\nfunction runCheck(baseDir: string, rule: Rule): Violation[] {\n switch (rule.check.type) {\n case 'file-pattern':\n return runFilePatternCheck(baseDir, rule.check);\n case 'file-size':\n return runFileSizeCheck(baseDir, rule.check);\n case 'script':\n return runScriptCheck(rule.check, baseDir);\n }\n}\n","/**\n * Rules audit check.\n *\n * Wraps loadRuleConfig + runRules and converts violations to AuditFinding format.\n */\n\nimport { loadRuleConfig, runRules } from '../../rules/index.js';\nimport type { AuditCheckResult } from '../types.js';\n\n/**\n * Check rules and return findings with files checked.\n *\n * @param repoRoot - Repository root directory\n * @returns Audit check result with findings and filesChecked\n */\nexport function checkRules(repoRoot: string): AuditCheckResult {\n let config;\n try {\n config = loadRuleConfig(repoRoot);\n } catch (err) {\n const message = err instanceof Error ? err.message : 'Failed to load rules config';\n return {\n findings: [{\n file: '.claude/rules.json',\n issue: `Invalid rules configuration: ${message}`,\n severity: 'error',\n source: 'rule',\n }],\n filesChecked: [],\n };\n }\n\n if (config.rules.length === 0) {\n return { findings: [], filesChecked: [] };\n }\n\n const results = runRules(repoRoot, config.rules);\n const findings: AuditCheckResult['findings'] = [];\n const filesCheckedSet = new Set<string>();\n\n for (const result of results) {\n for (const violation of result.violations) {\n if (violation.file) {\n filesCheckedSet.add(violation.file);\n }\n findings.push({\n file: violation.file ?? '',\n issue: violation.message,\n severity: result.rule.severity,\n suggestedFix: result.rule.remediation,\n source: 'rule',\n });\n }\n }\n\n return { findings, filesChecked: [...filesCheckedSet] };\n}\n","/**\n * Audit engine: orchestrates checks and builds report.\n */\n\nimport { checkLessons } from './checks/lessons.js';\nimport { checkPatterns } from './checks/patterns.js';\nimport { checkRules } from './checks/rules.js';\nimport type { AuditCheckResult, AuditFinding, AuditOptions, AuditReport } from './types.js';\n\n/**\n * Run audit checks and build a report.\n *\n * @param repoRoot - Repository root directory\n * @param options - Toggle individual checks (all enabled by default)\n * @returns Complete audit report with findings and summary\n */\nexport async function runAudit(\n repoRoot: string,\n options: AuditOptions = {}\n): Promise<AuditReport> {\n const { includeRules = true, includePatterns = true, includeLessons = true } = options;\n\n const findings: AuditFinding[] = [];\n const allCheckedFiles = new Set<string>();\n\n function collect(result: AuditCheckResult): void {\n findings.push(...result.findings);\n for (const f of result.filesChecked) {\n allCheckedFiles.add(f);\n }\n }\n\n if (includeRules) {\n collect(checkRules(repoRoot));\n }\n\n if (includePatterns) {\n collect(await checkPatterns(repoRoot));\n }\n\n if (includeLessons) {\n collect(await checkLessons(repoRoot));\n }\n\n const errors = findings.filter((f) => f.severity === 'error').length;\n const warnings = findings.filter((f) => f.severity === 'warning').length;\n const infos = findings.filter((f) => f.severity === 'info').length;\n\n return {\n findings,\n summary: { errors, warnings, infos, filesChecked: allCheckedFiles.size },\n timestamp: new Date().toISOString(),\n };\n}\n","/**\n * Audit module types and Zod schemas.\n */\n\nimport { z } from 'zod';\n\n/** Schema for a single audit finding. */\nexport const AuditFindingSchema = z.object({\n file: z.string(),\n issue: z.string(),\n severity: z.enum(['error', 'warning', 'info']),\n relatedLessonId: z.string().optional(),\n suggestedFix: z.string().optional(),\n source: z.enum(['rule', 'pattern', 'lesson']),\n});\n\n/** Schema for the audit summary. */\nexport const AuditSummarySchema = z.object({\n errors: z.number(),\n warnings: z.number(),\n infos: z.number(),\n filesChecked: z.number(),\n});\n\n/** Schema for a complete audit report. */\nexport const AuditReportSchema = z.object({\n findings: z.array(AuditFindingSchema),\n summary: AuditSummarySchema,\n timestamp: z.string(),\n});\n\nexport type AuditFinding = z.infer<typeof AuditFindingSchema>;\nexport type AuditSummary = z.infer<typeof AuditSummarySchema>;\nexport type AuditReport = z.infer<typeof AuditReportSchema>;\n\n/** Return type for individual audit check functions. */\nexport interface AuditCheckResult {\n findings: AuditFinding[];\n filesChecked: string[];\n}\n\n/** Options to toggle individual audit checks. */\nexport interface AuditOptions {\n includeRules?: boolean;\n includePatterns?: boolean;\n includeLessons?: boolean;\n}\n","/**\n * CLI command: ca knowledge <query>\n *\n * Search the docs knowledge base using hybrid search.\n */\n\nimport type { Command } from 'commander';\n\nimport { getRepoRoot, parseLimit } from '../cli-utils.js';\nimport { formatError } from '../cli-error-format.js';\nimport { withEmbedding } from '../memory/embeddings/index.js';\nimport { searchKnowledge } from '../memory/knowledge/index.js';\nimport { openKnowledgeDb, closeKnowledgeDb, getChunkCount } from '../memory/storage/sqlite-knowledge/index.js';\nimport { getGlobalOpts, out } from './shared.js';\n\nconst MAX_DISPLAY_TEXT = 200;\n\nexport function registerKnowledgeCommand(program: Command): void {\n program\n .command('knowledge <query>')\n .description('Search docs knowledge base')\n .option('-n, --limit <number>', 'Maximum results', '6')\n .action(async function (this: Command, query: string, opts: { limit: string }) {\n const globalOpts = getGlobalOpts(this);\n let limit: number;\n try {\n limit = parseLimit(opts.limit, 'limit');\n } catch (err) {\n const message = err instanceof Error ? err.message : 'Invalid limit';\n console.error(formatError('knowledge', 'INVALID_LIMIT', message, 'Use -n with a positive integer'));\n process.exitCode = 1;\n return;\n }\n\n const repoRoot = getRepoRoot();\n\n try {\n // Check if DB has chunks; auto-index if empty\n openKnowledgeDb(repoRoot);\n if (getChunkCount(repoRoot) === 0) {\n try {\n const { indexDocs } = await import('../memory/knowledge/indexing.js');\n out.info('Knowledge base empty. Indexing docs...');\n const result = await indexDocs(repoRoot);\n if (result.filesIndexed === 0) {\n out.info('No docs found to index. Add docs/ directory or run: npx ca index-docs --help');\n return;\n }\n } catch (indexErr) {\n const msg = indexErr instanceof Error ? indexErr.message : 'Unknown error';\n out.info(`Auto-index failed (${msg}). Run manually: npx ca index-docs`);\n }\n }\n\n const results = await withEmbedding(async () => searchKnowledge(repoRoot, query, { limit }));\n\n if (results.length === 0) {\n out.info('No matching results found.');\n return;\n }\n\n for (const r of results) {\n const { filePath, startLine, endLine, text } = r.item;\n const truncated = text.length > MAX_DISPLAY_TEXT ? text.slice(0, MAX_DISPLAY_TEXT) + '...' : text;\n const displayText = truncated.replace(/\\n/g, ' ');\n\n if (globalOpts.verbose) {\n console.log(`[${filePath}:L${startLine}-L${endLine}] (score: ${r.score.toFixed(2)}) ${displayText}`);\n } else {\n console.log(`[${filePath}:L${startLine}-L${endLine}] ${displayText}`);\n }\n }\n } catch (err) {\n const message = err instanceof Error ? err.message : 'Unknown error';\n console.error(formatError('knowledge', 'SEARCH_FAILED', message, 'Check that docs are indexed'));\n process.exitCode = 1;\n } finally {\n closeKnowledgeDb();\n }\n });\n}\n","/**\n * CLI command: index-docs\n *\n * Index docs/ directory into the knowledge base for retrieval.\n *\n * Usage: ca index-docs [--force] [--embed]\n */\n\nimport type { Command } from 'commander';\n\nimport { getRepoRoot } from '../cli-utils.js';\nimport { indexDocs } from '../memory/knowledge/index.js';\nimport { withEmbedding } from '../memory/embeddings/index.js';\nimport { closeKnowledgeDb } from '../memory/storage/sqlite-knowledge/index.js';\nimport { out } from './shared.js';\n\nexport function registerKnowledgeIndexCommand(program: Command): void {\n program\n .command('index-docs')\n .description('Index docs/ directory into knowledge base')\n .option('--force', 'Re-index all files (ignore cache)')\n .option('--embed', 'Embed chunks for semantic search')\n .action(async function (this: Command, options: { force?: boolean; embed?: boolean }) {\n const repoRoot = getRepoRoot();\n\n out.info('Indexing docs/ directory...');\n\n try {\n const result = await withEmbedding(async () => indexDocs(repoRoot, {\n force: options.force,\n embed: options.embed,\n }));\n\n const skippedPart = result.filesSkipped > 0\n ? ` (${result.filesSkipped} skipped)`\n : '';\n const deletedPart = result.chunksDeleted > 0\n ? `, ${result.chunksDeleted} deleted`\n : '';\n const duration = (result.durationMs / 1000).toFixed(1);\n\n out.info(\n `Indexed ${result.filesIndexed} file${result.filesIndexed !== 1 ? 's' : ''}${skippedPart}, ` +\n `${result.chunksCreated} chunk${result.chunksCreated !== 1 ? 's' : ''} created${deletedPart}`\n );\n if (result.chunksEmbedded > 0) {\n out.info(`${result.chunksEmbedded} chunk${result.chunksEmbedded !== 1 ? 's' : ''} embedded`);\n }\n if (result.filesErrored > 0) {\n out.warn(`${result.filesErrored} file(s) had errors during indexing`);\n }\n out.info(`Duration: ${duration}s`);\n } finally {\n closeKnowledgeDb();\n }\n });\n\n // Internal worker command for background embedding (spawned by init/setup)\n program\n .command('embed-worker <repoRoot>', { hidden: true })\n .description('Internal: background embedding worker')\n .action(async (repoRoot: string) => {\n const { existsSync, statSync } = await import('node:fs');\n if (!existsSync(repoRoot) || !statSync(repoRoot).isDirectory()) {\n out.error(`Invalid repoRoot: \"${repoRoot}\" is not a directory`);\n process.exitCode = 1;\n return;\n }\n const { runBackgroundEmbed } = await import('../memory/knowledge/embed-background.js');\n await runBackgroundEmbed(repoRoot);\n });\n}\n","/**\n * Clean-lessons command: Analyze lessons for semantic duplicates.\n *\n * Uses embedding model to find similar lesson pairs and outputs\n * structured diagnostic for the lessons-reviewer subagent.\n */\n\nimport type { Command } from 'commander';\n\nimport { getRepoRoot } from '../cli-utils.js';\nimport { formatError } from '../cli-error-format.js';\nimport { embedText, isModelAvailable, withEmbedding } from '../memory/embeddings/index.js';\nimport { findSimilarLessons } from '../memory/search/index.js';\nimport { readMemoryItems, syncIfNeeded } from '../memory/storage/index.js';\nimport type { MemoryItem } from '../memory/index.js';\n\ninterface LessonPair {\n aId: string;\n aInsight: string;\n bId: string;\n bInsight: string;\n score: number;\n}\n\n/**\n * Find deduplicated similar lesson pairs using embedding similarity.\n */\nasync function findDuplicatePairs(repoRoot: string, activeItems: MemoryItem[]): Promise<LessonPair[]> {\n const pairs: LessonPair[] = [];\n const seen = new Set<string>();\n\n for (const item of activeItems) {\n const similar = await findSimilarLessons(repoRoot, item.insight, {\n excludeId: item.id,\n items: activeItems,\n });\n\n for (const match of similar) {\n const key = [item.id, match.item.id].sort().join(':');\n if (!seen.has(key)) {\n seen.add(key);\n pairs.push({\n aId: item.id,\n aInsight: item.insight,\n bId: match.item.id,\n bInsight: match.item.insight,\n score: match.score,\n });\n }\n }\n }\n\n return pairs;\n}\n\n/**\n * Print structured diagnostic output for flagged pairs.\n */\nfunction printReport(pairs: LessonPair[]): void {\n console.log('# Lessons Review Required');\n console.log('');\n console.log(`Found ${pairs.length} similar lesson pair(s) that may need attention.`);\n console.log('');\n console.log('## Flagged Pairs');\n console.log('');\n\n for (let i = 0; i < pairs.length; i++) {\n const pair = pairs[i]!;\n console.log(\n `### Pair ${i + 1}: ${pair.aId} <-> ${pair.bId} (similarity: ${(pair.score * 100).toFixed(0)}%)`,\n );\n console.log(`- **${pair.aId}**: ${pair.aInsight}`);\n console.log(`- **${pair.bId}**: ${pair.bInsight}`);\n console.log('');\n }\n\n console.log('## Instructions');\n console.log('');\n console.log('Spawn the lessons-reviewer subagent to analyze these pairs:');\n console.log('');\n console.log(' /lessons-reviewer');\n console.log('');\n console.log('The reviewer will classify each pair and propose cleanup actions.');\n}\n\nasync function cleanLessonsAction(): Promise<void> {\n const repoRoot = getRepoRoot();\n\n if (!isModelAvailable()) {\n console.error(\n formatError('clean-lessons', 'MODEL_UNAVAILABLE', 'Embedding model not available', 'Run: npx ca download-model'),\n );\n process.exitCode = 1;\n return;\n }\n\n await withEmbedding(async () => {\n // Early probe to catch runtime failures\n try {\n await embedText('test');\n } catch (e) {\n console.error(\n formatError(\n 'clean-lessons',\n 'MODEL_UNUSABLE',\n `Embedding model failed to initialize: ${e instanceof Error ? e.message : String(e)}`,\n 'Check model compatibility',\n ),\n );\n process.exitCode = 1;\n return;\n }\n\n await syncIfNeeded(repoRoot);\n const { items } = await readMemoryItems(repoRoot);\n const activeItems = items.filter((item) => !item.invalidatedAt && item.type === 'lesson');\n if (items.length > activeItems.length) {\n console.log(`Analyzing ${activeItems.length} lesson-type items only (${items.length - activeItems.length} non-lesson items excluded).`);\n }\n const pairs = await findDuplicatePairs(repoRoot, activeItems);\n\n if (pairs.length === 0) {\n console.log('No similar lessons found. Your lesson database is clean.');\n return;\n }\n\n printReport(pairs);\n });\n}\n\nexport function registerCleanLessonsCommand(program: Command): void {\n program\n .command('clean-lessons')\n .description('Analyze lessons for semantic duplicates and contradictions')\n .action(cleanLessonsAction);\n}\n","/**\n * Retrieval commands: search, list, check-plan, load-session\n *\n * Commands for searching and retrieving lessons.\n */\n\nimport chalk from 'chalk';\nimport type { Command } from 'commander';\n\nimport { getRepoRoot, parseLimit } from '../cli-utils.js';\nimport { isModelAvailable, loadSessionLessons, retrieveForPlan } from '../index.js';\nimport { withEmbedding } from '../memory/embeddings/index.js';\nimport { incrementRetrievalCount, readLessons, readMemoryItems, searchKeyword, searchKeywordScored, syncIfNeeded } from '../memory/storage/index.js';\nimport type { MemoryItem } from '../memory/index.js';\nimport { CANDIDATE_MULTIPLIER, MIN_HYBRID_SCORE, mergeHybridResults, rankLessons, searchVector } from '../memory/search/index.js';\n\nimport { formatError } from '../cli-error-format.js';\n\nimport {\n AGE_FLAG_THRESHOLD_DAYS,\n DEFAULT_CHECK_PLAN_LIMIT,\n DEFAULT_LIST_LIMIT,\n DEFAULT_SEARCH_LIMIT,\n getGlobalOpts,\n getLessonAgeDays,\n ISO_DATE_PREFIX_LENGTH,\n LESSON_COUNT_WARNING_THRESHOLD,\n out,\n} from './shared.js';\n\nimport type { RankedLesson } from '../memory/search/index.js';\n\n/**\n * Parse numeric limit with user-friendly error output on invalid input.\n * Returns null on failure so callers can set exitCode and return.\n */\nfunction parseLimitOrNull(rawLimit: string, optionName: string, commandName: string): number | null {\n try {\n return parseLimit(rawLimit, optionName);\n } catch (err) {\n const message = err instanceof Error ? err.message : `Invalid ${optionName}`;\n console.error(formatError(commandName, 'INVALID_LIMIT', message, `Use --${optionName} with a positive integer`));\n return null;\n }\n}\n\n// ============================================================================\n// Check-Plan Command Helpers\n// ============================================================================\n\n/** Max stdin size for check-plan (1MB). */\nconst MAX_STDIN_BYTES = 1_048_576;\n/** Stdin read timeout (30 seconds). */\nconst STDIN_TIMEOUT_MS = 30_000;\n\n/**\n * Read plan text from stdin (non-TTY mode).\n * Enforces a size limit and timeout to prevent hangs in CI/CD.\n */\nasync function readPlanFromStdin(): Promise<string | undefined> {\n const { stdin } = await import('node:process');\n if (!stdin.isTTY) {\n const chunks: Buffer[] = [];\n let totalBytes = 0;\n\n let timerId: ReturnType<typeof setTimeout> | undefined;\n const timeout = new Promise<never>((_, reject) => {\n timerId = setTimeout(() => reject(new Error('stdin read timed out after 30s')), STDIN_TIMEOUT_MS);\n });\n\n const read = (async (): Promise<string> => {\n for await (const chunk of stdin) {\n const buf = chunk as Buffer;\n totalBytes += buf.length;\n if (totalBytes > MAX_STDIN_BYTES) {\n throw new Error(`stdin exceeds ${MAX_STDIN_BYTES} byte limit`);\n }\n chunks.push(buf);\n }\n return Buffer.concat(chunks).toString('utf-8').trim();\n })();\n\n try {\n return await Promise.race([read, timeout]);\n } catch (err) {\n console.error(`Warning: ${err instanceof Error ? err.message : String(err)}`);\n return undefined;\n } finally {\n clearTimeout(timerId);\n }\n }\n return undefined;\n}\n\n/**\n * Output check-plan results in JSON format.\n *\n * Uses rankScore (final boosted score) instead of raw similarity.\n */\nfunction outputCheckPlanJson(lessons: RankedLesson[]): void {\n const jsonOutput = {\n lessons: lessons.map((l) => ({\n id: l.lesson.id,\n insight: l.lesson.insight,\n rankScore: l.finalScore ?? l.score, // Use finalScore if available, fallback to raw score\n source: l.lesson.source,\n })),\n count: lessons.length,\n };\n console.log(JSON.stringify(jsonOutput));\n}\n\n/**\n * Output check-plan results in human-readable format.\n *\n * Omits numeric scores - ordering is sufficient for human consumption.\n */\nfunction outputCheckPlanHuman(lessons: RankedLesson[], quiet: boolean): void {\n console.log('## Lessons Check\\n');\n console.log('Relevant to your plan:\\n');\n\n lessons.forEach((item, i) => {\n const num = i + 1;\n console.log(`${num}. ${chalk.bold(`[${item.lesson.id}]`)} ${item.lesson.insight}`);\n console.log(` - Source: ${item.lesson.source}`);\n console.log();\n });\n\n if (!quiet) {\n console.log('---');\n console.log('Consider these lessons while implementing.');\n }\n}\n\n// ============================================================================\n// Load-Session Command Helpers\n// ============================================================================\n\n/**\n * Format source string for human-readable display.\n * Converts snake_case to space-separated words.\n */\nfunction formatSource(source: string): string {\n return source.replace(/_/g, ' ');\n}\n\n/**\n * Output load-session results in human-readable format.\n * Optimized for Claude's context window - no IDs, clear structure.\n */\nfunction outputSessionLessonsHuman(lessons: MemoryItem[], quiet: boolean): void {\n console.log('## Lessons from Past Sessions\\n');\n console.log('These lessons were captured from previous corrections and should inform your work:\\n');\n\n lessons.forEach((lesson, i) => {\n const num = i + 1;\n const date = lesson.created.slice(0, ISO_DATE_PREFIX_LENGTH);\n const tagsDisplay = lesson.tags.length > 0 ? ` (${lesson.tags.join(', ')})` : '';\n\n console.log(`${num}. **${lesson.insight}**${tagsDisplay}`);\n console.log(` Learned: ${date} via ${formatSource(lesson.source)}`);\n console.log();\n });\n\n if (!quiet) {\n console.log('Consider these lessons when planning and implementing tasks.');\n }\n}\n\n// ============================================================================\n// Action Handlers\n// ============================================================================\n\nasync function searchAction(cmd: Command, query: string, options: { limit: string }): Promise<void> {\n const repoRoot = getRepoRoot();\n const limit = parseLimitOrNull(options.limit, 'limit', 'search');\n if (limit === null) {\n process.exitCode = 1;\n return;\n }\n const { verbose, quiet } = getGlobalOpts(cmd);\n\n await syncIfNeeded(repoRoot);\n\n const results = await withEmbedding(async () => {\n if (isModelAvailable()) {\n try {\n // Hybrid search: blend vector + keyword\n const candidateLimit = limit * CANDIDATE_MULTIPLIER;\n const [vectorResults, keywordResults] = await Promise.all([\n searchVector(repoRoot, query, { limit: candidateLimit }),\n searchKeywordScored(repoRoot, query, candidateLimit),\n ]);\n const merged = mergeHybridResults(vectorResults, keywordResults, { minScore: MIN_HYBRID_SCORE });\n const ranked = rankLessons(merged);\n return ranked.slice(0, limit).map((r) => r.lesson);\n } catch {\n // Model failed at runtime — fall back to keyword-only search\n return await searchKeyword(repoRoot, query, limit);\n }\n }\n // FTS-only fallback when embedding model unavailable\n return await searchKeyword(repoRoot, query, limit);\n });\n\n if (results.length > 0) {\n incrementRetrievalCount(repoRoot, results.map((lesson) => lesson.id));\n }\n\n if (results.length === 0) {\n console.log('No lessons match your search. Try a different query or use \"list\" to see all lessons.');\n return;\n }\n\n if (!quiet) {\n out.info(`Found ${results.length} lesson(s):\\n`);\n }\n for (const lesson of results) {\n console.log(`[${chalk.cyan(lesson.id)}] ${lesson.insight}`);\n console.log(` Trigger: ${lesson.trigger}`);\n if (verbose && lesson.context) {\n console.log(` Context: ${lesson.context.tool} - ${lesson.context.intent}`);\n console.log(` Created: ${lesson.created}`);\n }\n if (lesson.tags.length > 0) {\n console.log(` Tags: ${lesson.tags.join(', ')}`);\n }\n console.log();\n }\n}\n\nasync function listAction(cmd: Command, options: { limit: string; invalidated?: boolean }): Promise<void> {\n const repoRoot = getRepoRoot();\n const limit = parseLimitOrNull(options.limit, 'limit', 'list');\n if (limit === null) {\n process.exitCode = 1;\n return;\n }\n const { verbose, quiet } = getGlobalOpts(cmd);\n\n const { items, skippedCount } = await readMemoryItems(repoRoot);\n\n const filteredItems = options.invalidated\n ? items.filter((i) => i.invalidatedAt)\n : items;\n\n if (filteredItems.length === 0) {\n if (options.invalidated) {\n console.log('No invalidated lessons found.');\n } else {\n console.log('No lessons found. Get started with: learn \"Your first lesson\"');\n }\n if (skippedCount > 0) {\n out.warn(`${skippedCount} corrupted lesson(s) skipped.`);\n }\n return;\n }\n\n const toShow = filteredItems.slice(0, limit);\n\n if (!quiet) {\n const label = options.invalidated ? 'invalidated lesson(s)' : 'item(s)';\n out.info(`Showing ${toShow.length} of ${filteredItems.length} ${label}:\\n`);\n }\n\n for (const item of toShow) {\n const invalidMarker = item.invalidatedAt ? chalk.red('[INVALID] ') : '';\n console.log(`[${chalk.cyan(item.id)}] ${invalidMarker}${item.insight}`);\n if (verbose) {\n console.log(` Type: ${item.type} | Source: ${item.source}`);\n console.log(` Created: ${item.created}`);\n if (item.context) {\n console.log(` Context: ${item.context.tool} - ${item.context.intent}`);\n }\n if (item.invalidatedAt) {\n console.log(` Invalidated: ${item.invalidatedAt}`);\n if (item.invalidationReason) {\n console.log(` Reason: ${item.invalidationReason}`);\n }\n }\n } else {\n console.log(` Type: ${item.type} | Source: ${item.source}`);\n }\n if (item.tags.length > 0) {\n console.log(` Tags: ${item.tags.join(', ')}`);\n }\n console.log();\n }\n\n if (skippedCount > 0) {\n out.warn(`${skippedCount} corrupted lesson(s) skipped.`);\n }\n}\n\nasync function loadSessionAction(cmd: Command, options: { json?: boolean }): Promise<void> {\n const repoRoot = getRepoRoot();\n const { quiet } = getGlobalOpts(cmd);\n const lessons = await loadSessionLessons(repoRoot);\n\n const { lessons: allLessons } = await readLessons(repoRoot);\n const totalCount = allLessons.length;\n\n if (options.json) {\n console.log(JSON.stringify({ lessons, count: lessons.length, totalCount }));\n return;\n }\n\n if (lessons.length === 0) {\n console.log('No high-severity lessons found.');\n return;\n }\n\n outputSessionLessonsHuman(lessons, quiet);\n\n if (totalCount > LESSON_COUNT_WARNING_THRESHOLD) {\n console.log('');\n out.info(`${totalCount} lessons in index. Consider \\`ca compact\\` to reduce context pollution.`);\n }\n\n const oldLessons = lessons.filter((l) => getLessonAgeDays(l) > AGE_FLAG_THRESHOLD_DAYS);\n if (oldLessons.length > 0) {\n console.log('');\n out.warn(`${oldLessons.length} lesson(s) are over ${AGE_FLAG_THRESHOLD_DAYS} days old. Review for continued validity.`);\n }\n}\n\nasync function checkPlanAction(cmd: Command, options: { plan?: string; json?: boolean; limit: string }): Promise<void> {\n const repoRoot = getRepoRoot();\n const limit = parseLimitOrNull(options.limit, 'limit', 'check-plan');\n if (limit === null) {\n process.exitCode = 1;\n return;\n }\n const { quiet } = getGlobalOpts(cmd);\n\n const planText = options.plan ?? (await readPlanFromStdin());\n\n if (!planText) {\n console.error(formatError('check-plan', 'NO_PLAN', 'No plan provided', 'Use --plan <text> or pipe text to stdin'));\n process.exitCode = 1;\n return;\n }\n\n await syncIfNeeded(repoRoot);\n\n if (!isModelAvailable()) {\n if (options.json) {\n console.log(JSON.stringify({\n lessons: [],\n count: 0,\n error: 'Embedding model not found',\n action: 'Run: npx ca download-model',\n }));\n } else {\n console.error(formatError('check-plan', 'MODEL_UNAVAILABLE', 'Embedding model not found', 'Run: npx ca download-model'));\n }\n process.exitCode = 1;\n return;\n }\n\n try {\n const result = await withEmbedding(async () => retrieveForPlan(repoRoot, planText, limit));\n\n if (options.json) {\n outputCheckPlanJson(result.lessons);\n return;\n }\n\n if (result.lessons.length === 0) {\n console.log('No relevant lessons found for this plan.');\n return;\n }\n\n outputCheckPlanHuman(result.lessons, quiet);\n } catch (err) {\n const message = err instanceof Error ? err.message : 'Unknown error';\n if (options.json) {\n console.log(JSON.stringify({\n lessons: [],\n count: 0,\n error: message,\n }));\n } else {\n console.error(formatError('check-plan', 'PLAN_CHECK_FAILED', message, 'Check model installation and try again'));\n }\n process.exitCode = 1;\n }\n}\n\n// ============================================================================\n// Command Registration\n// ============================================================================\n\n/**\n * Register retrieval commands (search, list, check-plan, load-session) on the program.\n */\nexport function registerRetrievalCommands(program: Command): void {\n program\n .command('search <query>')\n .description('Search lessons')\n .option('-n, --limit <number>', 'Maximum results', DEFAULT_SEARCH_LIMIT)\n .action(async function (this: Command, query: string, options: { limit: string }) {\n await searchAction(this, query, options);\n });\n\n program\n .command('list')\n .description('List all lessons')\n .option('-n, --limit <number>', 'Maximum results', DEFAULT_LIST_LIMIT)\n .option('--invalidated', 'Show only invalidated lessons')\n .action(async function (this: Command, options: { limit: string; invalidated?: boolean }) {\n await listAction(this, options);\n });\n\n program\n .command('load-session')\n .description('Load high-severity lessons for session context')\n .option('--json', 'Output as JSON')\n .action(async function (this: Command, options: { json?: boolean }) {\n await loadSessionAction(this, options);\n });\n\n program\n .command('check-plan')\n .description('Check plan against relevant lessons')\n .option('--plan <text>', 'Plan text to check')\n .option('--json', 'Output as JSON')\n .option('-n, --limit <number>', 'Maximum results', DEFAULT_CHECK_PLAN_LIMIT)\n .action(async function (this: Command, options: { plan?: string; json?: boolean; limit: string }) {\n await checkPlanAction(this, options);\n });\n}\n","/**\n * Linter detection utility.\n *\n * Scans a directory for linter config files and returns\n * info about the first detected linter.\n */\n\nimport { readFileSync, statSync } from 'node:fs';\nimport { join } from 'node:path';\n\nimport { z } from 'zod';\n\n/** Supported linter identifiers. */\nexport const LinterNameSchema = z.enum([\n 'eslint',\n 'ruff',\n 'clippy',\n 'golangci-lint',\n 'ast-grep',\n 'semgrep',\n 'unknown',\n]);\n\n/** Result of linter detection. */\nexport const LinterInfoSchema = z.object({\n linter: LinterNameSchema,\n configPath: z.string().nullable(),\n});\n\nexport type LinterInfo = z.infer<typeof LinterInfoSchema>;\nexport type LinterName = z.infer<typeof LinterNameSchema>;\n\n/** Detection rules in priority order. Each entry maps a linter to its config filenames. */\nconst DETECTION_RULES: Array<{\n linter: z.infer<typeof LinterNameSchema>;\n configs: string[];\n}> = [\n {\n linter: 'eslint',\n configs: [\n // Flat config (ESLint v9+)\n 'eslint.config.js',\n 'eslint.config.mjs',\n 'eslint.config.cjs',\n 'eslint.config.ts',\n 'eslint.config.mts',\n 'eslint.config.cts',\n // Legacy config\n '.eslintrc.js',\n '.eslintrc.cjs',\n '.eslintrc.json',\n '.eslintrc.yml',\n '.eslintrc.yaml',\n ],\n },\n {\n linter: 'ruff',\n configs: ['ruff.toml', '.ruff.toml'],\n },\n {\n linter: 'clippy',\n configs: ['clippy.toml', '.clippy.toml'],\n },\n {\n linter: 'golangci-lint',\n configs: ['.golangci.yml', '.golangci.yaml', '.golangci.toml', '.golangci.json'],\n },\n {\n linter: 'ast-grep',\n configs: ['sgconfig.yml'],\n },\n {\n linter: 'semgrep',\n configs: ['.semgrep.yml', '.semgrep.yaml'],\n },\n];\n\n/** Return a fresh unknown result (avoids shared mutable reference). */\nfunction unknown(): LinterInfo {\n return { linter: 'unknown', configPath: null };\n}\n\n/** Returns true only if the path exists AND is a regular file (not a directory). */\nfunction isFile(filePath: string): boolean {\n try {\n return statSync(filePath).isFile();\n } catch {\n return false;\n }\n}\n\n/**\n * Check if pyproject.toml contains a [tool.ruff] or [tool.ruff.*] section.\n * Returns true if found, false otherwise (including on read errors).\n */\nfunction pyprojectHasRuff(repoRoot: string): boolean {\n const filePath = join(repoRoot, 'pyproject.toml');\n try {\n const content = readFileSync(filePath, 'utf-8');\n // Match [tool.ruff] or any subsection like [tool.ruff.lint], [tool.ruff.format], etc.\n return /^\\s*\\[tool\\.ruff\\b/m.test(content);\n } catch {\n return false;\n }\n}\n\n/**\n * Detect the linter used in a repository by scanning for config files.\n *\n * Checks linters in priority order; first match wins.\n * Returns `{ linter: 'unknown', configPath: null }` if nothing found.\n */\nexport function detectLinter(repoRoot: string): LinterInfo {\n try {\n for (const rule of DETECTION_RULES) {\n for (const config of rule.configs) {\n if (isFile(join(repoRoot, config))) {\n return { linter: rule.linter, configPath: config };\n }\n }\n\n // Special case: Ruff can also live inside pyproject.toml\n if (rule.linter === 'ruff' && pyprojectHasRuff(repoRoot)) {\n return { linter: 'ruff', configPath: 'pyproject.toml' };\n }\n }\n } catch {\n // Graceful degradation on any unexpected error\n return unknown();\n }\n\n return unknown();\n}\n"]}
|