docusaurus-plugin-mcp-server 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/search/flexsearch-indexer.ts","../../src/mcp/tools/docs-search.ts","../../src/mcp/tools/docs-get-page.ts","../../src/processing/heading-extractor.ts","../../src/mcp/tools/docs-get-section.ts","../../src/mcp/server.ts","../../src/cli/verify.ts"],"names":["results","lines","fs"],"mappings":";;;;;;;;;AAUA,IAAM,aAAA,GAAgB;AAAA,EACpB,KAAA,EAAO,CAAA;AAAA,EACP,QAAA,EAAU,CAAA;AAAA,EACV,WAAA,EAAa,GAAA;AAAA,EACb,OAAA,EAAS;AACX,CAAA;AAMA,SAAS,eAAe,IAAA,EAAsB;AAE5C,EAAA,IAAI,IAAA,CAAK,MAAA,IAAU,CAAA,EAAG,OAAO,IAAA;AAE7B,EAAA,OACE,KAEG,OAAA,CAAQ,MAAA,EAAQ,EAAE,CAAA,CAElB,QAAQ,OAAA,EAAS,GAAG,CAAA,CACpB,OAAA,CAAQ,SAAS,GAAG,CAAA,CAEpB,QAAQ,eAAA,EAAiB,IAAI,EAE7B,OAAA,CAAQ,eAAA,EAAiB,IAAI,CAAA,CAE7B,QAAQ,KAAA,EAAO,EAAE,EAEjB,OAAA,CAAQ,OAAA,EAAS,EAAE,CAAA,CAEnB,OAAA,CAAQ,OAAA,EAAS,EAAE,EAEnB,OAAA,CAAQ,MAAA,EAAQ,GAAG,CAAA,CAEnB,OAAA,CAAQ,YAAY,IAAI,CAAA;AAE/B;AAWO,SAAS,iBAAA,GAAwC;AACtD,EAAA,OAAO,IAAI,WAAW,QAAA,CAAsC;AAAA;AAAA;AAAA,IAG1D,QAAA,EAAU,MAAA;AAAA;AAAA,IAGV,KAAA,EAAO,GAAA;AAAA;AAAA,IAGP,UAAA,EAAY,CAAA;AAAA;AAAA,IAGZ,OAAA,EAAS;AAAA,MACP,UAAA,EAAY,CAAA;AAAA,MACZ,KAAA,EAAO,CAAA;AAAA,MACP,aAAA,EAAe;AAAA,KACjB;AAAA;AAAA,IAGA,MAAA,EAAQ,CAAC,GAAA,KAAgB;AAEvB,MAAA,MAAM,KAAA,GAAQ,GAAA,CAAI,WAAA,EAAY,CAAE,MAAM,yBAAyB,CAAA;AAE/D,MAAA,OAAO,KAAA,CAAM,MAAA,CAAO,OAAO,CAAA,CAAE,IAAI,cAAc,CAAA;AAAA,IACjD,CAAA;AAAA;AAAA,IAGA,QAAA,EAAU;AAAA,MACR,EAAA,EAAI,IAAA;AAAA;AAAA,MAEJ,KAAA,EAAO,CAAC,OAAA,EAAS,SAAA,EAAW,YAAY,aAAa,CAAA;AAAA;AAAA,MAErD,KAAA,EAAO,CAAC,OAAA,EAAS,aAAa;AAAA;AAChC,GACD,CAAA;AACH;AAqCO,SAAS,YACd,KAAA,EACA,IAAA,EACA,KAAA,EACA,OAAA,GAA8B,EAAC,EACf;AAChB,EAAA,MAAM,EAAE,KAAA,GAAQ,CAAA,EAAE,GAAI,OAAA;AAGtB,EAAA,MAAM,UAAA,GAAa,KAAA,CAAM,MAAA,CAAO,KAAA,EAAO;AAAA,IACrC,OAAO,KAAA,GAAQ,CAAA;AAAA;AAAA,IACf,MAAA,EAAQ;AAAA,GACT,CAAA;AAGD,EAAA,MAAM,SAAA,uBAAgB,GAAA,EAAoB;AAE1C,EAAA,KAAA,MAAW,eAAe,UAAA,EAAY;AAEpC,IAAA,MAAM,QAAQ,WAAA,CAAY,KAAA;AAC1B,IAAA,MAAM,WAAA,GAAc,aAAA,CAAc,KAAK,CAAA,IAAK,CAAA;AAG5C,IAAA,MAAMA,WAAU,WAAA,CAAY,MAAA;AAE5B,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAIA,QAAAA,CAAQ,QAAQ,CAAA,EAAA,EAAK;AACvC,MAAA,MAAM,IAAA,GAAOA,SAAQ,CAAC,CAAA;AACtB,MAAA,IAAI,CAAC,IAAA,EAAM;AAEX,MAAA,MAAM,KAAA,GAAQ,OAAO,IAAA,KAAS,QAAA,GAAW,OAAO,IAAA,CAAK,EAAA;AAGrD,MAAA,MAAM,aAAA,GAAA,CAAiBA,QAAAA,CAAQ,MAAA,GAAS,CAAA,IAAKA,QAAAA,CAAQ,MAAA;AAGrD,MAAA,MAAM,gBAAgB,aAAA,GAAgB,WAAA;AAGtC,MAAA,MAAM,aAAA,GAAgB,SAAA,CAAU,GAAA,CAAI,KAAK,CAAA,IAAK,CAAA;AAC9C,MAAA,SAAA,CAAU,GAAA,CAAI,KAAA,EAAO,aAAA,GAAgB,aAAa,CAAA;AAAA,IACpD;AAAA,EACF;AAGA,EAAA,MAAM,UAA0B,EAAC;AAEjC,EAAA,KAAA,MAAW,CAAC,KAAA,EAAO,KAAK,CAAA,IAAK,SAAA,EAAW;AACtC,IAAA,MAAM,GAAA,GAAM,KAAK,KAAK,CAAA;AACtB,IAAA,IAAI,CAAC,GAAA,EAAK;AAEV,IAAA,OAAA,CAAQ,IAAA,CAAK;AAAA,MACX,OAAO,GAAA,CAAI,KAAA;AAAA,MACX,OAAO,GAAA,CAAI,KAAA;AAAA,MACX,KAAA;AAAA,MACA,OAAA,EAAS,eAAA,CAAgB,GAAA,CAAI,QAAA,EAAU,KAAK,CAAA;AAAA,MAC5C,gBAAA,EAAkB,oBAAA,CAAqB,GAAA,EAAK,KAAK;AAAA,KAClD,CAAA;AAAA,EACH;AAGA,EAAA,OAAA,CAAQ,KAAK,CAAC,CAAA,EAAG,MAAM,CAAA,CAAE,KAAA,GAAQ,EAAE,KAAK,CAAA;AACxC,EAAA,OAAO,OAAA,CAAQ,KAAA,CAAM,CAAA,EAAG,KAAK,CAAA;AAC/B;AAKO,SAAS,eAAA,CAAgB,UAAkB,KAAA,EAAuB;AACvE,EAAA,MAAM,SAAA,GAAY,GAAA;AAClB,EAAA,MAAM,UAAA,GAAa,MAAM,WAAA,EAAY,CAAE,MAAM,KAAK,CAAA,CAAE,OAAO,OAAO,CAAA;AAElE,EAAA,IAAI,UAAA,CAAW,WAAW,CAAA,EAAG;AAC3B,IAAA,OAAO,QAAA,CAAS,MAAM,CAAA,EAAG,SAAS,KAAK,QAAA,CAAS,MAAA,GAAS,YAAY,KAAA,GAAQ,EAAA,CAAA;AAAA,EAC/E;AAEA,EAAA,MAAM,aAAA,GAAgB,SAAS,WAAA,EAAY;AAC3C,EAAA,IAAI,SAAA,GAAY,EAAA;AAChB,EAAA,IAAI,QAAA,GAAW,EAAA;AAGf,EAAA,MAAM,QAAA,GAAW,CAAC,GAAG,UAAA,EAAY,GAAG,UAAA,CAAW,GAAA,CAAI,cAAc,CAAC,CAAA;AAElE,EAAA,KAAA,MAAW,QAAQ,QAAA,EAAU;AAC3B,IAAA,MAAM,KAAA,GAAQ,aAAA,CAAc,OAAA,CAAQ,IAAI,CAAA;AACxC,IAAA,IAAI,KAAA,KAAU,EAAA,KAAO,SAAA,KAAc,EAAA,IAAM,QAAQ,SAAA,CAAA,EAAY;AAC3D,MAAA,SAAA,GAAY,KAAA;AACZ,MAAA,QAAA,GAAW,IAAA;AAAA,IACb;AAAA,EACF;AAEA,EAAA,IAAI,cAAc,EAAA,EAAI;AAEpB,IAAA,OAAO,QAAA,CAAS,MAAM,CAAA,EAAG,SAAS,KAAK,QAAA,CAAS,MAAA,GAAS,YAAY,KAAA,GAAQ,EAAA,CAAA;AAAA,EAC/E;AAEA,EAAA,MAAM,YAAA,GAAe,IAAA,CAAK,GAAA,CAAI,CAAA,EAAG,YAAY,EAAE,CAAA;AAC/C,EAAA,MAAM,UAAA,GAAa,KAAK,GAAA,CAAI,QAAA,CAAS,QAAQ,SAAA,GAAY,QAAA,CAAS,SAAS,GAAG,CAAA;AAE9E,EAAA,IAAI,OAAA,GAAU,QAAA,CAAS,KAAA,CAAM,YAAA,EAAc,UAAU,CAAA;AAErD,EAAA,OAAA,GAAU,OAAA,CAEP,OAAA,CAAQ,cAAA,EAAgB,EAAE,CAAA,CAE1B,QAAQ,wBAAA,EAA0B,IAAI,CAAA,CAEtC,OAAA,CAAQ,yBAAA,EAA2B,EAAE,EAErC,OAAA,CAAQ,eAAA,EAAiB,EAAE,CAAA,CAE3B,OAAA,CAAQ,YAAA,EAAc,IAAI,CAAA,CAE1B,OAAA,CAAQ,MAAA,EAAQ,GAAG,CAAA,CACnB,IAAA,EAAK;AAER,EAAA,MAAM,MAAA,GAAS,YAAA,GAAe,CAAA,GAAI,KAAA,GAAQ,EAAA;AAC1C,EAAA,MAAM,MAAA,GAAS,UAAA,GAAa,QAAA,CAAS,MAAA,GAAS,KAAA,GAAQ,EAAA;AAEtD,EAAA,OAAO,SAAS,OAAA,GAAU,MAAA;AAC5B;AAKA,SAAS,oBAAA,CAAqB,KAAmB,KAAA,EAAyB;AACxE,EAAA,MAAM,UAAA,GAAa,MAAM,WAAA,EAAY,CAAE,MAAM,KAAK,CAAA,CAAE,OAAO,OAAO,CAAA;AAElE,EAAA,MAAM,QAAA,GAAW,CAAC,GAAG,UAAA,EAAY,GAAG,UAAA,CAAW,GAAA,CAAI,cAAc,CAAC,CAAA;AAClE,EAAA,MAAM,WAAqB,EAAC;AAE5B,EAAA,KAAA,MAAW,OAAA,IAAW,IAAI,QAAA,EAAU;AAClC,IAAA,MAAM,YAAA,GAAe,OAAA,CAAQ,IAAA,CAAK,WAAA,EAAY;AAC9C,IAAA,MAAM,cAAA,GAAiB,aAAa,KAAA,CAAM,KAAK,EAAE,GAAA,CAAI,cAAc,CAAA,CAAE,IAAA,CAAK,GAAG,CAAA;AAG7E,IAAA,IACE,QAAA,CAAS,IAAA;AAAA,MACP,CAAC,IAAA,KAAS,YAAA,CAAa,QAAA,CAAS,IAAI,KAAK,cAAA,CAAe,QAAA,CAAS,cAAA,CAAe,IAAI,CAAC;AAAA,KACvF,EACA;AACA,MAAA,QAAA,CAAS,IAAA,CAAK,QAAQ,IAAI,CAAA;AAAA,IAC5B;AAAA,EACF;AAEA,EAAA,OAAO,QAAA,CAAS,KAAA,CAAM,CAAA,EAAG,CAAC,CAAA;AAC5B;AAkBA,eAAsB,kBACpB,IAAA,EAC6B;AAC7B,EAAA,MAAM,QAAQ,iBAAA,EAAkB;AAEhC,EAAA,KAAA,MAAW,CAAC,GAAA,EAAK,KAAK,KAAK,MAAA,CAAO,OAAA,CAAQ,IAAI,CAAA,EAAG;AAE/C,IAAA,MAAO,KAAA,CAA+E,MAAA;AAAA,MACpF,GAAA;AAAA,MACA;AAAA,KACF;AAAA,EACF;AAEA,EAAA,OAAO,KAAA;AACT;;;ACtRO,SAAS,iBAAA,CACd,MAAA,EACA,KAAA,EACA,IAAA,EACgB;AAChB,EAAA,MAAM,EAAE,KAAA,EAAO,KAAA,GAAQ,CAAA,EAAE,GAAI,MAAA;AAG7B,EAAA,IAAI,CAAC,SAAS,OAAO,KAAA,KAAU,YAAY,KAAA,CAAM,IAAA,EAAK,CAAE,MAAA,KAAW,CAAA,EAAG;AACpE,IAAA,MAAM,IAAI,MAAM,4DAA4D,CAAA;AAAA,EAC9E;AAEA,EAAA,MAAM,cAAA,GAAiB,KAAK,GAAA,CAAI,IAAA,CAAK,IAAI,CAAA,EAAG,KAAK,GAAG,EAAE,CAAA;AAGtD,EAAA,MAAM,UAAU,WAAA,CAAY,KAAA,EAAO,IAAA,EAAM,KAAA,CAAM,MAAK,EAAG;AAAA,IACrD,KAAA,EAAO;AAAA,GACR,CAAA;AAED,EAAA,OAAO,OAAA;AACT;AAKO,SAAS,mBAAA,CAAoB,SAAyB,OAAA,EAA0B;AACrF,EAAA,IAAI,OAAA,CAAQ,WAAW,CAAA,EAAG;AACxB,IAAA,OAAO,8BAAA;AAAA,EACT;AAEA,EAAA,MAAM,KAAA,GAAkB,CAAC,CAAA,MAAA,EAAS,OAAA,CAAQ,MAAM,CAAA;AAAA,CAAe,CAAA;AAE/D,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,OAAA,CAAQ,QAAQ,CAAA,EAAA,EAAK;AACvC,IAAA,MAAM,MAAA,GAAS,QAAQ,CAAC,CAAA;AACxB,IAAA,IAAI,CAAC,MAAA,EAAQ;AAEb,IAAA,KAAA,CAAM,KAAK,CAAA,EAAG,CAAA,GAAI,CAAC,CAAA,IAAA,EAAO,MAAA,CAAO,KAAK,CAAA,EAAA,CAAI,CAAA;AAG1C,IAAA,IAAI,OAAA,EAAS;AACX,MAAA,MAAM,OAAA,GAAU,GAAG,OAAA,CAAQ,OAAA,CAAQ,OAAO,EAAE,CAAC,CAAA,EAAG,MAAA,CAAO,KAAK,CAAA,CAAA;AAC5D,MAAA,KAAA,CAAM,IAAA,CAAK,CAAA,QAAA,EAAW,OAAO,CAAA,CAAE,CAAA;AAAA,IACjC;AAEA,IAAA,KAAA,CAAM,IAAA,CAAK,CAAA,UAAA,EAAa,MAAA,CAAO,KAAK,CAAA,CAAE,CAAA;AAEtC,IAAA,IAAI,MAAA,CAAO,gBAAA,IAAoB,MAAA,CAAO,gBAAA,CAAiB,SAAS,CAAA,EAAG;AACjE,MAAA,KAAA,CAAM,KAAK,CAAA,sBAAA,EAAyB,MAAA,CAAO,iBAAiB,IAAA,CAAK,IAAI,CAAC,CAAA,CAAE,CAAA;AAAA,IAC1E;AAEA,IAAA,KAAA,CAAM,IAAA,CAAK,CAAA,GAAA,EAAM,MAAA,CAAO,OAAO,CAAA,CAAE,CAAA;AACjC,IAAA,KAAA,CAAM,KAAK,EAAE,CAAA;AAAA,EACf;AAEA,EAAA,OAAO,KAAA,CAAM,KAAK,IAAI,CAAA;AACxB;;;AC7DO,SAAS,kBAAA,CACd,QACA,IAAA,EACqB;AACrB,EAAA,MAAM,EAAE,OAAM,GAAI,MAAA;AAGlB,EAAA,IAAI,CAAC,KAAA,IAAS,OAAO,KAAA,KAAU,QAAA,EAAU;AACvC,IAAA,MAAM,IAAI,MAAM,kDAAkD,CAAA;AAAA,EACpE;AAGA,EAAA,IAAI,eAAA,GAAkB,MAAM,IAAA,EAAK;AACjC,EAAA,IAAI,CAAC,eAAA,CAAgB,UAAA,CAAW,GAAG,CAAA,EAAG;AACpC,IAAA,eAAA,GAAkB,GAAA,GAAM,eAAA;AAAA,EAC1B;AACA,EAAA,IAAI,gBAAgB,MAAA,GAAS,CAAA,IAAK,eAAA,CAAgB,QAAA,CAAS,GAAG,CAAA,EAAG;AAC/D,IAAA,eAAA,GAAkB,eAAA,CAAgB,KAAA,CAAM,CAAA,EAAG,EAAE,CAAA;AAAA,EAC/C;AAGA,EAAA,MAAM,GAAA,GAAM,KAAK,eAAe,CAAA;AAEhC,EAAA,IAAI,CAAC,GAAA,EAAK;AAER,IAAA,MAAM,QAAA,GAAW,eAAA,CAAgB,KAAA,CAAM,CAAC,CAAA;AACxC,IAAA,IAAI,IAAA,CAAK,QAAQ,CAAA,EAAG;AAClB,MAAA,OAAO,IAAA,CAAK,QAAQ,CAAA,IAAK,IAAA;AAAA,IAC3B;AACA,IAAA,OAAO,IAAA;AAAA,EACT;AAEA,EAAA,OAAO,GAAA;AACT;AAKO,SAAS,iBAAA,CAAkB,KAA0B,OAAA,EAA0B;AACpF,EAAA,IAAI,CAAC,GAAA,EAAK;AACR,IAAA,OAAO,4DAAA;AAAA,EACT;AAEA,EAAA,MAAM,QAAkB,EAAC;AAGzB,EAAA,KAAA,CAAM,IAAA,CAAK,CAAA,EAAA,EAAK,GAAA,CAAI,KAAK,CAAA,CAAE,CAAA;AAC3B,EAAA,KAAA,CAAM,KAAK,EAAE,CAAA;AAGb,EAAA,IAAI,IAAI,WAAA,EAAa;AACnB,IAAA,KAAA,CAAM,IAAA,CAAK,CAAA,EAAA,EAAK,GAAA,CAAI,WAAW,CAAA,CAAE,CAAA;AACjC,IAAA,KAAA,CAAM,KAAK,EAAE,CAAA;AAAA,EACf;AAGA,EAAA,IAAI,OAAA,EAAS;AACX,IAAA,MAAM,OAAA,GAAU,GAAG,OAAA,CAAQ,OAAA,CAAQ,OAAO,EAAE,CAAC,CAAA,EAAG,GAAA,CAAI,KAAK,CAAA,CAAA;AACzD,IAAA,KAAA,CAAM,IAAA,CAAK,CAAA,SAAA,EAAY,OAAO,CAAA,CAAE,CAAA;AAAA,EAClC;AAEA,EAAA,KAAA,CAAM,IAAA,CAAK,CAAA,WAAA,EAAc,GAAA,CAAI,KAAK,CAAA,CAAE,CAAA;AACpC,EAAA,KAAA,CAAM,KAAK,EAAE,CAAA;AAGb,EAAA,IAAI,GAAA,CAAI,QAAA,CAAS,MAAA,GAAS,CAAA,EAAG;AAC3B,IAAA,KAAA,CAAM,KAAK,aAAa,CAAA;AACxB,IAAA,KAAA,CAAM,KAAK,EAAE,CAAA;AACb,IAAA,KAAA,MAAW,OAAA,IAAW,IAAI,QAAA,EAAU;AAClC,MAAA,IAAI,OAAA,CAAQ,SAAS,CAAA,EAAG;AACtB,QAAA,MAAM,MAAA,GAAS,IAAA,CAAK,MAAA,CAAO,OAAA,CAAQ,QAAQ,CAAC,CAAA;AAC5C,QAAA,KAAA,CAAM,IAAA,CAAK,GAAG,MAAM,CAAA,GAAA,EAAM,QAAQ,IAAI,CAAA,GAAA,EAAM,OAAA,CAAQ,EAAE,CAAA,CAAA,CAAG,CAAA;AAAA,MAC3D;AAAA,IACF;AACA,IAAA,KAAA,CAAM,KAAK,EAAE,CAAA;AACb,IAAA,KAAA,CAAM,KAAK,KAAK,CAAA;AAChB,IAAA,KAAA,CAAM,KAAK,EAAE,CAAA;AAAA,EACf;AAGA,EAAA,KAAA,CAAM,IAAA,CAAK,IAAI,QAAQ,CAAA;AAEvB,EAAA,OAAO,KAAA,CAAM,KAAK,IAAI,CAAA;AACxB;;;ACtBO,SAAS,cAAA,CACd,QAAA,EACA,SAAA,EACA,QAAA,EACe;AACf,EAAA,MAAM,UAAU,QAAA,CAAS,IAAA,CAAK,CAAC,CAAA,KAAM,CAAA,CAAE,OAAO,SAAS,CAAA;AAEvD,EAAA,IAAI,CAAC,OAAA,EAAS;AACZ,IAAA,OAAO,IAAA;AAAA,EACT;AAEA,EAAA,OAAO,SAAS,KAAA,CAAM,OAAA,CAAQ,aAAa,OAAA,CAAQ,SAAS,EAAE,IAAA,EAAK;AACrE;;;ACtDO,SAAS,qBAAA,CACd,QACA,IAAA,EACe;AACf,EAAA,MAAM,EAAE,KAAA,EAAO,SAAA,EAAU,GAAI,MAAA;AAG7B,EAAA,IAAI,CAAC,KAAA,IAAS,OAAO,KAAA,KAAU,QAAA,EAAU;AACvC,IAAA,MAAM,IAAI,MAAM,kDAAkD,CAAA;AAAA,EACpE;AACA,EAAA,IAAI,CAAC,SAAA,IAAa,OAAO,SAAA,KAAc,QAAA,EAAU;AAC/C,IAAA,MAAM,IAAI,MAAM,sDAAsD,CAAA;AAAA,EACxE;AAGA,EAAA,IAAI,eAAA,GAAkB,MAAM,IAAA,EAAK;AACjC,EAAA,IAAI,CAAC,eAAA,CAAgB,UAAA,CAAW,GAAG,CAAA,EAAG;AACpC,IAAA,eAAA,GAAkB,GAAA,GAAM,eAAA;AAAA,EAC1B;AACA,EAAA,IAAI,gBAAgB,MAAA,GAAS,CAAA,IAAK,eAAA,CAAgB,QAAA,CAAS,GAAG,CAAA,EAAG;AAC/D,IAAA,eAAA,GAAkB,eAAA,CAAgB,KAAA,CAAM,CAAA,EAAG,EAAE,CAAA;AAAA,EAC/C;AAGA,EAAA,MAAM,GAAA,GAAM,KAAK,eAAe,CAAA;AAEhC,EAAA,IAAI,CAAC,GAAA,EAAK;AACR,IAAA,OAAO;AAAA,MACL,OAAA,EAAS,IAAA;AAAA,MACT,GAAA,EAAK,IAAA;AAAA,MACL,WAAA,EAAa,IAAA;AAAA,MACb,mBAAmB;AAAC,KACtB;AAAA,EACF;AAGA,EAAA,MAAM,iBAAA,GAAoB,GAAA,CAAI,QAAA,CAAS,GAAA,CAAI,CAAC,CAAA,MAAO;AAAA,IACjD,IAAI,CAAA,CAAE,EAAA;AAAA,IACN,MAAM,CAAA,CAAE,IAAA;AAAA,IACR,OAAO,CAAA,CAAE;AAAA,GACX,CAAE,CAAA;AAGF,EAAA,MAAM,OAAA,GAAU,GAAA,CAAI,QAAA,CAAS,IAAA,CAAK,CAAC,MAAM,CAAA,CAAE,EAAA,KAAO,SAAA,CAAU,IAAA,EAAM,CAAA;AAElE,EAAA,IAAI,CAAC,OAAA,EAAS;AACZ,IAAA,OAAO;AAAA,MACL,OAAA,EAAS,IAAA;AAAA,MACT,GAAA;AAAA,MACA,WAAA,EAAa,IAAA;AAAA,MACb;AAAA,KACF;AAAA,EACF;AAGA,EAAA,MAAM,OAAA,GAAU,eAAe,GAAA,CAAI,QAAA,EAAU,UAAU,IAAA,EAAK,EAAG,IAAI,QAAQ,CAAA;AAE3E,EAAA,OAAO;AAAA,IACL,OAAA;AAAA,IACA,GAAA;AAAA,IACA,aAAa,OAAA,CAAQ,IAAA;AAAA,IACrB;AAAA,GACF;AACF;AAKO,SAAS,oBAAA,CACd,MAAA,EACA,SAAA,EACA,OAAA,EACQ;AACR,EAAA,IAAI,CAAC,OAAO,GAAA,EAAK;AACf,IAAA,OAAO,4DAAA;AAAA,EACT;AAEA,EAAA,IAAI,CAAC,OAAO,OAAA,EAAS;AACnB,IAAA,MAAMC,SAAQ,CAAC,CAAA,SAAA,EAAY,SAAS,CAAA,6BAAA,CAAA,EAAiC,IAAI,qBAAqB,CAAA;AAE9F,IAAA,KAAA,MAAW,OAAA,IAAW,OAAO,iBAAA,EAAmB;AAC9C,MAAA,MAAM,MAAA,GAAS,IAAA,CAAK,MAAA,CAAO,OAAA,CAAQ,QAAQ,CAAC,CAAA;AAC5C,MAAAA,MAAAA,CAAM,IAAA,CAAK,CAAA,EAAG,MAAM,CAAA,EAAA,EAAK,QAAQ,IAAI,CAAA,MAAA,EAAS,OAAA,CAAQ,EAAE,CAAA,CAAA,CAAG,CAAA;AAAA,IAC7D;AAEA,IAAA,OAAOA,MAAAA,CAAM,KAAK,IAAI,CAAA;AAAA,EACxB;AAEA,EAAA,MAAM,QAAkB,EAAC;AAGzB,EAAA,MAAM,OAAA,GAAU,OAAA,GAAU,CAAA,EAAG,OAAA,CAAQ,QAAQ,KAAA,EAAO,EAAE,CAAC,CAAA,EAAG,MAAA,CAAO,GAAA,CAAI,KAAK,CAAA,CAAA,EAAI,SAAS,CAAA,CAAA,GAAK,IAAA;AAG5F,EAAA,KAAA,CAAM,IAAA,CAAK,CAAA,EAAA,EAAK,MAAA,CAAO,WAAW,CAAA,CAAE,CAAA;AACpC,EAAA,IAAI,OAAA,EAAS;AACX,IAAA,KAAA,CAAM,KAAK,CAAA,QAAA,EAAW,MAAA,CAAO,IAAI,KAAK,CAAA,GAAA,EAAM,OAAO,CAAA,CAAE,CAAA;AAAA,EACvD,CAAA,MAAO;AACL,IAAA,KAAA,CAAM,IAAA,CAAK,WAAW,MAAA,CAAO,GAAA,CAAI,KAAK,CAAA,EAAA,EAAK,MAAA,CAAO,GAAA,CAAI,KAAK,CAAA,CAAA,CAAG,CAAA;AAAA,EAChE;AACA,EAAA,KAAA,CAAM,KAAK,EAAE,CAAA;AACb,EAAA,KAAA,CAAM,KAAK,KAAK,CAAA;AAChB,EAAA,KAAA,CAAM,KAAK,EAAE,CAAA;AAGb,EAAA,KAAA,CAAM,IAAA,CAAK,OAAO,OAAO,CAAA;AAEzB,EAAA,OAAO,KAAA,CAAM,KAAK,IAAI,CAAA;AACxB;;;ACnIA,SAAS,aAAa,MAAA,EAAwD;AAC5E,EAAA,OAAO,UAAA,IAAc,UAAU,WAAA,IAAe,MAAA;AAChD;AAKA,SAAS,aAAa,MAAA,EAAwD;AAC5E,EAAA,OAAO,MAAA,IAAU,UAAU,iBAAA,IAAqB,MAAA;AAClD;AAcO,IAAM,gBAAN,MAAoB;AAAA,EACjB,MAAA;AAAA,EACA,IAAA,GAA4C,IAAA;AAAA,EAC5C,WAAA,GAAyC,IAAA;AAAA,EACzC,SAAA;AAAA,EACA,WAAA,GAAc,KAAA;AAAA,EAEtB,YAAY,MAAA,EAAyB;AACnC,IAAA,IAAA,CAAK,MAAA,GAAS,MAAA;AAGd,IAAA,IAAA,CAAK,YAAY,IAAI,SAAA;AAAA,MACnB;AAAA,QACE,MAAM,MAAA,CAAO,IAAA;AAAA,QACb,OAAA,EAAS,OAAO,OAAA,IAAW;AAAA,OAC7B;AAAA,MACA;AAAA,QACE,YAAA,EAAc;AAAA,UACZ,OAAO;AAAC;AACV;AACF,KACF;AAEA,IAAA,IAAA,CAAK,aAAA,EAAc;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAA,GAAsB;AAE5B,IAAA,IAAA,CAAK,SAAA,CAAU,YAAA;AAAA,MACb,aAAA;AAAA,MACA;AAAA,QACE,WAAA,EACE,oKAAA;AAAA,QACF,WAAA,EAAa;AAAA,UACX,KAAA,EAAO,EAAE,MAAA,EAAO,CAAE,IAAI,CAAC,CAAA,CAAE,SAAS,yBAAyB,CAAA;AAAA,UAC3D,OAAO,CAAA,CACJ,MAAA,GACA,GAAA,EAAI,CACJ,IAAI,CAAC,CAAA,CACL,GAAA,CAAI,EAAE,EACN,QAAA,EAAS,CACT,QAAQ,CAAC,CAAA,CACT,SAAS,wDAAwD;AAAA;AACtE,OACF;AAAA,MACA,OAAO,EAAE,KAAA,EAAO,KAAA,EAAM,KAAM;AAC1B,QAAA,MAAM,KAAK,UAAA,EAAW;AAEtB,QAAA,IAAI,CAAC,IAAA,CAAK,IAAA,IAAQ,CAAC,KAAK,WAAA,EAAa;AACnC,UAAA,OAAO;AAAA,YACL,SAAS,CAAC,EAAE,MAAM,MAAA,EAAiB,IAAA,EAAM,6CAA6C,CAAA;AAAA,YACtF,OAAA,EAAS;AAAA,WACX;AAAA,QACF;AAEA,QAAA,MAAM,OAAA,GAAU,kBAAkB,EAAE,KAAA,EAAO,OAAM,EAAG,IAAA,CAAK,WAAA,EAAa,IAAA,CAAK,IAAI,CAAA;AAC/E,QAAA,OAAO;AAAA,UACL,OAAA,EAAS;AAAA,YACP,EAAE,MAAM,MAAA,EAAiB,IAAA,EAAM,oBAAoB,OAAA,EAAS,IAAA,CAAK,MAAA,CAAO,OAAO,CAAA;AAAE;AACnF,SACF;AAAA,MACF;AAAA,KACF;AAGA,IAAA,IAAA,CAAK,SAAA,CAAU,YAAA;AAAA,MACb,eAAA;AAAA,MACA;AAAA,QACE,WAAA,EACE,gIAAA;AAAA,QACF,WAAA,EAAa;AAAA,UACX,KAAA,EAAO,EACJ,MAAA,EAAO,CACP,IAAI,CAAC,CAAA,CACL,SAAS,yEAAyE;AAAA;AACvF,OACF;AAAA,MACA,OAAO,EAAE,KAAA,EAAM,KAAM;AACnB,QAAA,MAAM,KAAK,UAAA,EAAW;AAEtB,QAAA,IAAI,CAAC,KAAK,IAAA,EAAM;AACd,UAAA,OAAO;AAAA,YACL,SAAS,CAAC,EAAE,MAAM,MAAA,EAAiB,IAAA,EAAM,6CAA6C,CAAA;AAAA,YACtF,OAAA,EAAS;AAAA,WACX;AAAA,QACF;AAEA,QAAA,MAAM,MAAM,kBAAA,CAAmB,EAAE,KAAA,EAAM,EAAG,KAAK,IAAI,CAAA;AACnD,QAAA,OAAO;AAAA,UACL,OAAA,EAAS,CAAC,EAAE,IAAA,EAAM,MAAA,EAAiB,IAAA,EAAM,iBAAA,CAAkB,GAAA,EAAK,IAAA,CAAK,MAAA,CAAO,OAAO,CAAA,EAAG;AAAA,SACxF;AAAA,MACF;AAAA,KACF;AAGA,IAAA,IAAA,CAAK,SAAA,CAAU,YAAA;AAAA,MACb,kBAAA;AAAA,MACA;AAAA,QACE,WAAA,EACE,0JAAA;AAAA,QACF,WAAA,EAAa;AAAA,UACX,KAAA,EAAO,EAAE,MAAA,EAAO,CAAE,IAAI,CAAC,CAAA,CAAE,SAAS,qBAAqB,CAAA;AAAA,UACvD,WAAW,CAAA,CACR,MAAA,EAAO,CACP,GAAA,CAAI,CAAC,CAAA,CACL,QAAA;AAAA,YACC;AAAA;AACF;AACJ,OACF;AAAA,MACA,OAAO,EAAE,KAAA,EAAO,SAAA,EAAU,KAAM;AAC9B,QAAA,MAAM,KAAK,UAAA,EAAW;AAEtB,QAAA,IAAI,CAAC,KAAK,IAAA,EAAM;AACd,UAAA,OAAO;AAAA,YACL,SAAS,CAAC,EAAE,MAAM,MAAA,EAAiB,IAAA,EAAM,6CAA6C,CAAA;AAAA,YACtF,OAAA,EAAS;AAAA,WACX;AAAA,QACF;AAEA,QAAA,MAAM,SAAS,qBAAA,CAAsB,EAAE,OAAO,SAAA,EAAU,EAAG,KAAK,IAAI,CAAA;AACpE,QAAA,OAAO;AAAA,UACL,OAAA,EAAS;AAAA,YACP;AAAA,cACE,IAAA,EAAM,MAAA;AAAA,cACN,MAAM,oBAAA,CAAqB,MAAA,EAAQ,SAAA,EAAW,IAAA,CAAK,OAAO,OAAO;AAAA;AACnE;AACF,SACF;AAAA,MACF;AAAA,KACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,UAAA,GAA4B;AAChC,IAAA,IAAI,KAAK,WAAA,EAAa;AACpB,MAAA;AAAA,IACF;AAEA,IAAA,IAAI;AACF,MAAA,IAAI,YAAA,CAAa,IAAA,CAAK,MAAM,CAAA,EAAG;AAE7B,QAAA,IAAA,CAAK,IAAA,GAAO,KAAK,MAAA,CAAO,IAAA;AACxB,QAAA,IAAA,CAAK,WAAA,GAAc,MAAM,iBAAA,CAAkB,IAAA,CAAK,OAAO,eAAe,CAAA;AAAA,MACxE,CAAA,MAAA,IAAW,YAAA,CAAa,IAAA,CAAK,MAAM,CAAA,EAAG;AAEpC,QAAA,IAAI,MAAMC,GAAA,CAAG,UAAA,CAAW,IAAA,CAAK,MAAA,CAAO,QAAQ,CAAA,EAAG;AAC7C,UAAA,IAAA,CAAK,OAAO,MAAMA,GAAA,CAAG,QAAA,CAAS,IAAA,CAAK,OAAO,QAAQ,CAAA;AAAA,QACpD,CAAA,MAAO;AACL,UAAA,MAAM,IAAI,KAAA,CAAM,CAAA,qBAAA,EAAwB,IAAA,CAAK,MAAA,CAAO,QAAQ,CAAA,CAAE,CAAA;AAAA,QAChE;AAEA,QAAA,IAAI,MAAMA,GAAA,CAAG,UAAA,CAAW,IAAA,CAAK,MAAA,CAAO,SAAS,CAAA,EAAG;AAC9C,UAAA,MAAM,YAAY,MAAMA,GAAA,CAAG,QAAA,CAAS,IAAA,CAAK,OAAO,SAAS,CAAA;AACzD,UAAA,IAAA,CAAK,WAAA,GAAc,MAAM,iBAAA,CAAkB,SAAS,CAAA;AAAA,QACtD,CAAA,MAAO;AACL,UAAA,MAAM,IAAI,KAAA,CAAM,CAAA,wBAAA,EAA2B,IAAA,CAAK,MAAA,CAAO,SAAS,CAAA,CAAE,CAAA;AAAA,QACpE;AAAA,MACF,CAAA,MAAO;AACL,QAAA,MAAM,IAAI,MAAM,0EAA0E,CAAA;AAAA,MAC5F;AAEA,MAAA,IAAA,CAAK,WAAA,GAAc,IAAA;AAAA,IACrB,SAAS,KAAA,EAAO;AACd,MAAA,OAAA,CAAQ,KAAA,CAAM,+BAA+B,KAAK,CAAA;AAClD,MAAA,MAAM,KAAA;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,iBAAA,CACJ,GAAA,EACA,GAAA,EACA,UAAA,EACe;AACf,IAAA,MAAM,KAAK,UAAA,EAAW;AAItB,IAAA,MAAM,SAAA,GAAY,IAAI,6BAAA,CAA8B;AAAA,MAClD,kBAAA,EAAoB,MAAA;AAAA;AAAA,MACpB,kBAAA,EAAoB;AAAA;AAAA,KACrB,CAAA;AAGD,IAAA,MAAM,IAAA,CAAK,SAAA,CAAU,OAAA,CAAQ,SAAS,CAAA;AAEtC,IAAA,IAAI;AAEF,MAAA,MAAM,SAAA,CAAU,aAAA,CAAc,GAAA,EAAK,GAAA,EAAK,UAAU,CAAA;AAAA,IACpD,CAAA,SAAE;AAEA,MAAA,MAAM,UAAU,KAAA,EAAM;AAAA,IACxB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,iBAAiB,OAAA,EAAqC;AAC1D,IAAA,MAAM,KAAK,UAAA,EAAW;AAGtB,IAAA,MAAM,SAAA,GAAY,IAAI,wCAAA,CAAyC;AAAA,MAC7D,kBAAA,EAAoB,MAAA;AAAA;AAAA,MACpB,kBAAA,EAAoB;AAAA,KACrB,CAAA;AAGD,IAAA,MAAM,IAAA,CAAK,SAAA,CAAU,OAAA,CAAQ,SAAS,CAAA;AAEtC,IAAA,IAAI;AAEF,MAAA,OAAO,MAAM,SAAA,CAAU,aAAA,CAAc,OAAO,CAAA;AAAA,IAC9C,CAAA,SAAE;AAEA,MAAA,MAAM,UAAU,KAAA,EAAM;AAAA,IACxB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,SAAA,GAMH;AACD,IAAA,OAAO;AAAA,MACL,IAAA,EAAM,KAAK,MAAA,CAAO,IAAA;AAAA,MAClB,OAAA,EAAS,IAAA,CAAK,MAAA,CAAO,OAAA,IAAW,OAAA;AAAA,MAChC,aAAa,IAAA,CAAK,WAAA;AAAA,MAClB,QAAA,EAAU,KAAK,IAAA,GAAO,MAAA,CAAO,KAAK,IAAA,CAAK,IAAI,EAAE,MAAA,GAAS,CAAA;AAAA,MACtD,OAAA,EAAS,KAAK,MAAA,CAAO;AAAA,KACvB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,YAAA,GAA0B;AACxB,IAAA,OAAO,IAAA,CAAK,SAAA;AAAA,EACd;AACF,CAAA;;;AChSA,eAAe,YAAY,QAAA,EAAyC;AAClE,EAAA,MAAM,MAAA,GAAuB;AAAA,IAC3B,OAAA,EAAS,IAAA;AAAA,IACT,SAAA,EAAW,CAAA;AAAA,IACX,QAAQ,EAAC;AAAA,IACT,UAAU;AAAC,GACb;AAEA,EAAA,MAAM,MAAA,GAAS,IAAA,CAAK,IAAA,CAAK,QAAA,EAAU,KAAK,CAAA;AAGxC,EAAA,IAAI,CAAE,MAAMA,GAAAA,CAAG,UAAA,CAAW,MAAM,CAAA,EAAI;AAClC,IAAA,MAAA,CAAO,MAAA,CAAO,IAAA,CAAK,CAAA,yBAAA,EAA4B,MAAM,CAAA,CAAE,CAAA;AACvD,IAAA,MAAA,CAAO,MAAA,CAAO,KAAK,6DAA6D,CAAA;AAChF,IAAA,MAAA,CAAO,OAAA,GAAU,KAAA;AACjB,IAAA,OAAO,MAAA;AAAA,EACT;AAGA,EAAA,MAAM,aAAA,GAAgB,CAAC,WAAA,EAAa,mBAAA,EAAqB,eAAe,CAAA;AAExE,EAAA,KAAA,MAAW,QAAQ,aAAA,EAAe;AAChC,IAAA,MAAM,QAAA,GAAW,IAAA,CAAK,IAAA,CAAK,MAAA,EAAQ,IAAI,CAAA;AACvC,IAAA,IAAI,CAAE,MAAMA,GAAAA,CAAG,UAAA,CAAW,QAAQ,CAAA,EAAI;AACpC,MAAA,MAAA,CAAO,MAAA,CAAO,IAAA,CAAK,CAAA,uBAAA,EAA0B,QAAQ,CAAA,CAAE,CAAA;AACvD,MAAA,MAAA,CAAO,OAAA,GAAU,KAAA;AAAA,IACnB;AAAA,EACF;AAEA,EAAA,IAAI,CAAC,OAAO,OAAA,EAAS;AACnB,IAAA,OAAO,MAAA;AAAA,EACT;AAGA,EAAA,IAAI;AACF,IAAA,MAAM,QAAA,GAAW,IAAA,CAAK,IAAA,CAAK,MAAA,EAAQ,WAAW,CAAA;AAC9C,IAAA,MAAM,IAAA,GAAO,MAAMA,GAAAA,CAAG,QAAA,CAAS,QAAQ,CAAA;AAEvC,IAAA,IAAI,OAAO,IAAA,KAAS,QAAA,IAAY,IAAA,KAAS,IAAA,EAAM;AAC7C,MAAA,MAAA,CAAO,MAAA,CAAO,KAAK,iCAAiC,CAAA;AACpD,MAAA,MAAA,CAAO,OAAA,GAAU,KAAA;AAAA,IACnB,CAAA,MAAO;AACL,MAAA,MAAA,CAAO,SAAA,GAAY,MAAA,CAAO,IAAA,CAAK,IAAI,CAAA,CAAE,MAAA;AAErC,MAAA,IAAI,MAAA,CAAO,cAAc,CAAA,EAAG;AAC1B,QAAA,MAAA,CAAO,QAAA,CAAS,KAAK,iCAAiC,CAAA;AAAA,MACxD;AAGA,MAAA,KAAA,MAAW,CAAC,KAAA,EAAO,GAAG,KAAK,MAAA,CAAO,OAAA,CAAQ,IAAI,CAAA,EAAG;AAC/C,QAAA,MAAM,CAAA,GAAI,GAAA;AACV,QAAA,IAAI,CAAC,CAAA,CAAE,KAAA,IAAS,OAAO,CAAA,CAAE,UAAU,QAAA,EAAU;AAC3C,UAAA,MAAA,CAAO,QAAA,CAAS,IAAA,CAAK,CAAA,SAAA,EAAY,KAAK,CAAA,mBAAA,CAAqB,CAAA;AAAA,QAC7D;AACA,QAAA,IAAI,CAAC,CAAA,CAAE,QAAA,IAAY,OAAO,CAAA,CAAE,aAAa,QAAA,EAAU;AACjD,UAAA,MAAA,CAAO,QAAA,CAAS,IAAA,CAAK,CAAA,SAAA,EAAY,KAAK,CAAA,4BAAA,CAA8B,CAAA;AAAA,QACtE;AAAA,MACF;AAAA,IACF;AAAA,EACF,SAAS,KAAA,EAAO;AACd,IAAA,MAAA,CAAO,MAAA,CAAO,IAAA,CAAK,CAAA,2BAAA,EAA+B,KAAA,CAAgB,OAAO,CAAA,CAAE,CAAA;AAC3E,IAAA,MAAA,CAAO,OAAA,GAAU,KAAA;AAAA,EACnB;AAGA,EAAA,IAAI;AACF,IAAA,MAAM,SAAA,GAAY,IAAA,CAAK,IAAA,CAAK,MAAA,EAAQ,mBAAmB,CAAA;AACvD,IAAA,MAAM,SAAA,GAAY,MAAMA,GAAAA,CAAG,QAAA,CAAS,SAAS,CAAA;AAE7C,IAAA,IAAI,OAAO,SAAA,KAAc,QAAA,IAAY,SAAA,KAAc,IAAA,EAAM;AACvD,MAAA,MAAA,CAAO,MAAA,CAAO,KAAK,yCAAyC,CAAA;AAC5D,MAAA,MAAA,CAAO,OAAA,GAAU,KAAA;AAAA,IACnB;AAAA,EACF,SAAS,KAAA,EAAO;AACd,IAAA,MAAA,CAAO,MAAA,CAAO,IAAA,CAAK,CAAA,mCAAA,EAAuC,KAAA,CAAgB,OAAO,CAAA,CAAE,CAAA;AACnF,IAAA,MAAA,CAAO,OAAA,GAAU,KAAA;AAAA,EACnB;AAGA,EAAA,IAAI;AACF,IAAA,MAAM,YAAA,GAAe,IAAA,CAAK,IAAA,CAAK,MAAA,EAAQ,eAAe,CAAA;AACtD,IAAA,MAAM,QAAA,GAAW,MAAMA,GAAAA,CAAG,QAAA,CAAS,YAAY,CAAA;AAE/C,IAAA,IAAI,CAAC,QAAA,CAAS,IAAA,IAAQ,OAAO,QAAA,CAAS,SAAS,QAAA,EAAU;AACvD,MAAA,MAAA,CAAO,QAAA,CAAS,KAAK,sCAAsC,CAAA;AAAA,IAC7D;AACA,IAAA,IAAI,CAAC,QAAA,CAAS,OAAA,IAAW,OAAO,QAAA,CAAS,YAAY,QAAA,EAAU;AAC7D,MAAA,MAAA,CAAO,QAAA,CAAS,KAAK,yCAAyC,CAAA;AAAA,IAChE;AAAA,EACF,SAAS,KAAA,EAAO;AACd,IAAA,MAAA,CAAO,MAAA,CAAO,IAAA,CAAK,CAAA,+BAAA,EAAmC,KAAA,CAAgB,OAAO,CAAA,CAAE,CAAA;AAC/E,IAAA,MAAA,CAAO,OAAA,GAAU,KAAA;AAAA,EACnB;AAEA,EAAA,OAAO,MAAA;AACT;AAKA,eAAe,WAAW,QAAA,EAAkE;AAC1F,EAAA,MAAM,MAAA,GAAS,IAAA,CAAK,IAAA,CAAK,QAAA,EAAU,KAAK,CAAA;AACxC,EAAA,MAAM,QAAA,GAAW,IAAA,CAAK,IAAA,CAAK,MAAA,EAAQ,WAAW,CAAA;AAC9C,EAAA,MAAM,SAAA,GAAY,IAAA,CAAK,IAAA,CAAK,MAAA,EAAQ,mBAAmB,CAAA;AACvD,EAAA,MAAM,YAAA,GAAe,IAAA,CAAK,IAAA,CAAK,MAAA,EAAQ,eAAe,CAAA;AAEtD,EAAA,IAAI;AACF,IAAA,MAAM,QAAA,GAAW,MAAMA,GAAAA,CAAG,QAAA,CAAS,YAAY,CAAA;AAC/C,IAAA,MAAM,IAAA,GAAO,MAAMA,GAAAA,CAAG,QAAA,CAAS,QAAQ,CAAA;AACvC,IAAA,MAAM,eAAA,GAAkB,MAAMA,GAAAA,CAAG,QAAA,CAAS,SAAS,CAAA;AAEnD,IAAA,MAAM,MAAA,GAAS,IAAI,aAAA,CAAc;AAAA,MAC/B,IAAA,EAAM,SAAS,IAAA,IAAQ,WAAA;AAAA,MACvB,OAAA,EAAS,SAAS,OAAA,IAAW,OAAA;AAAA,MAC7B,IAAA;AAAA,MACA;AAAA,KACD,CAAA;AAED,IAAA,MAAM,OAAO,UAAA,EAAW;AACxB,IAAA,MAAM,MAAA,GAAS,MAAM,MAAA,CAAO,SAAA,EAAU;AAEtC,IAAA,IAAI,CAAC,OAAO,WAAA,EAAa;AACvB,MAAA,OAAO,EAAE,OAAA,EAAS,KAAA,EAAO,OAAA,EAAS,6BAAA,EAA8B;AAAA,IAClE;AAEA,IAAA,IAAI,MAAA,CAAO,aAAa,CAAA,EAAG;AACzB,MAAA,OAAO,EAAE,OAAA,EAAS,KAAA,EAAO,OAAA,EAAS,gCAAA,EAAiC;AAAA,IACrE;AAEA,IAAA,OAAO;AAAA,MACL,OAAA,EAAS,IAAA;AAAA,MACT,OAAA,EAAS,CAAA,wBAAA,EAA2B,MAAA,CAAO,QAAQ,CAAA,UAAA;AAAA,KACrD;AAAA,EACF,SAAS,KAAA,EAAO;AACd,IAAA,OAAO;AAAA,MACL,OAAA,EAAS,KAAA;AAAA,MACT,OAAA,EAAS,CAAA,oBAAA,EAAwB,KAAA,CAAgB,OAAO,CAAA;AAAA,KAC1D;AAAA,EACF;AACF;AAKA,eAAe,IAAA,GAAsB;AACnC,EAAA,MAAM,IAAA,GAAO,OAAA,CAAQ,IAAA,CAAK,KAAA,CAAM,CAAC,CAAA;AACjC,EAAA,MAAM,QAAA,GAAW,IAAA,CAAK,CAAC,CAAA,IAAK,SAAA;AAE5B,EAAA,OAAA,CAAQ,IAAI,EAAE,CAAA;AACd,EAAA,OAAA,CAAQ,IAAI,kCAA2B,CAAA;AACvC,EAAA,OAAA,CAAQ,GAAA,CAAI,GAAA,CAAI,MAAA,CAAO,EAAE,CAAC,CAAA;AAC1B,EAAA,OAAA,CAAQ,IAAI,CAAA,iBAAA,EAAoB,IAAA,CAAK,OAAA,CAAQ,QAAQ,CAAC,CAAA,CAAE,CAAA;AACxD,EAAA,OAAA,CAAQ,IAAI,EAAE,CAAA;AAGd,EAAA,OAAA,CAAQ,IAAI,oCAA6B,CAAA;AACzC,EAAA,MAAM,YAAA,GAAe,MAAM,WAAA,CAAY,QAAQ,CAAA;AAE/C,EAAA,IAAI,YAAA,CAAa,MAAA,CAAO,MAAA,GAAS,CAAA,EAAG;AAClC,IAAA,OAAA,CAAQ,IAAI,EAAE,CAAA;AACd,IAAA,OAAA,CAAQ,IAAI,gBAAW,CAAA;AACvB,IAAA,KAAA,MAAW,KAAA,IAAS,aAAa,MAAA,EAAQ;AACvC,MAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,UAAA,EAAQ,KAAK,CAAA,CAAE,CAAA;AAAA,IAC7B;AAAA,EACF;AAEA,EAAA,IAAI,YAAA,CAAa,QAAA,CAAS,MAAA,GAAS,CAAA,EAAG;AACpC,IAAA,OAAA,CAAQ,IAAI,EAAE,CAAA;AACd,IAAA,OAAA,CAAQ,IAAI,yBAAe,CAAA;AAC3B,IAAA,KAAA,MAAW,OAAA,IAAW,aAAa,QAAA,EAAU;AAC3C,MAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,UAAA,EAAQ,OAAO,CAAA,CAAE,CAAA;AAAA,IAC/B;AAAA,EACF;AAEA,EAAA,IAAI,CAAC,aAAa,OAAA,EAAS;AACzB,IAAA,OAAA,CAAQ,IAAI,EAAE,CAAA;AACd,IAAA,OAAA,CAAQ,IAAI,kCAA6B,CAAA;AACzC,IAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,EAChB;AAEA,EAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,gBAAA,EAAc,YAAA,CAAa,SAAS,CAAA,UAAA,CAAY,CAAA;AAC5D,EAAA,OAAA,CAAQ,IAAI,sCAAiC,CAAA;AAC7C,EAAA,OAAA,CAAQ,IAAI,gCAA2B,CAAA;AAGvC,EAAA,OAAA,CAAQ,IAAI,EAAE,CAAA;AACd,EAAA,OAAA,CAAQ,IAAI,iCAA0B,CAAA;AACtC,EAAA,MAAM,YAAA,GAAe,MAAM,UAAA,CAAW,QAAQ,CAAA;AAE9C,EAAA,IAAI,CAAC,aAAa,OAAA,EAAS;AACzB,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,UAAA,EAAQ,YAAA,CAAa,OAAO,CAAA,CAAE,CAAA;AAC1C,IAAA,OAAA,CAAQ,IAAI,EAAE,CAAA;AACd,IAAA,OAAA,CAAQ,IAAI,2BAAsB,CAAA;AAClC,IAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,EAChB;AAEA,EAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,UAAA,EAAQ,YAAA,CAAa,OAAO,CAAA,CAAE,CAAA;AAE1C,EAAA,OAAA,CAAQ,IAAI,EAAE,CAAA;AACd,EAAA,OAAA,CAAQ,IAAI,2BAAsB,CAAA;AAClC,EAAA,OAAA,CAAQ,IAAI,EAAE,CAAA;AACd,EAAA,OAAA,CAAQ,IAAI,aAAa,CAAA;AACzB,EAAA,OAAA,CAAQ,IAAI,6CAA6C,CAAA;AACzD,EAAA,OAAA,CAAQ,IAAI,8DAA8D,CAAA;AAC1E,EAAA,OAAA,CAAQ,IAAI,8CAA8C,CAAA;AAC1D,EAAA,OAAA,CAAQ,IAAI,EAAE,CAAA;AAEd,EAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAChB;AAEA,IAAA,EAAK,CAAE,KAAA,CAAM,CAAC,KAAA,KAAU;AACtB,EAAA,OAAA,CAAQ,KAAA,CAAM,qBAAqB,KAAK,CAAA;AACxC,EAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAChB,CAAC,CAAA","file":"verify.mjs","sourcesContent":["import FlexSearch from 'flexsearch';\nimport type { ProcessedDoc, SearchResult } from '../types/index.js';\nimport type { IndexableDocument } from './types.js';\n\nexport type FlexSearchDocument = FlexSearch.Document<IndexableDocument, string[]>;\n\n/**\n * Field weights for search ranking\n * Higher values = more importance\n */\nconst FIELD_WEIGHTS = {\n title: 3.0,\n headings: 2.0,\n description: 1.5,\n content: 1.0,\n} as const;\n\n/**\n * Simple English stemmer\n * Handles common suffixes for better matching\n */\nfunction englishStemmer(word: string): string {\n // Only process words longer than 3 characters\n if (word.length <= 3) return word;\n\n return (\n word\n // -ing endings\n .replace(/ing$/, '')\n // -tion, -sion endings -> t, s\n .replace(/tion$/, 't')\n .replace(/sion$/, 's')\n // -ed endings (careful with short words)\n .replace(/([^aeiou])ed$/, '$1')\n // -es endings\n .replace(/([^aeiou])es$/, '$1')\n // -ly endings\n .replace(/ly$/, '')\n // -ment endings\n .replace(/ment$/, '')\n // -ness endings\n .replace(/ness$/, '')\n // -ies -> y\n .replace(/ies$/, 'y')\n // -s endings (simple plural)\n .replace(/([^s])s$/, '$1')\n );\n}\n\n/**\n * Create a FlexSearch document index with enhanced configuration\n *\n * Features:\n * - Full substring matching (finds \"auth\" in \"authentication\")\n * - English stemming (finds \"authenticate\" when searching \"authentication\")\n * - Context-aware scoring for phrase matching\n * - Optimized resolution for relevance ranking\n */\nexport function createSearchIndex(): FlexSearchDocument {\n return new FlexSearch.Document<IndexableDocument, string[]>({\n // Use 'full' tokenization for substring matching\n // This allows \"auth\" to match \"authentication\"\n tokenize: 'full',\n\n // Enable caching for faster repeated queries\n cache: 100,\n\n // Higher resolution = more granular ranking (1-9)\n resolution: 9,\n\n // Enable context for phrase/proximity matching\n context: {\n resolution: 2,\n depth: 2,\n bidirectional: true,\n },\n\n // Apply stemming to normalize word forms\n encode: (str: string) => {\n // Normalize to lowercase and split into words\n const words = str.toLowerCase().split(/[\\s\\-_.,;:!?'\"()[\\]{}]+/);\n // Apply stemmer to each word\n return words.filter(Boolean).map(englishStemmer);\n },\n\n // Document schema\n document: {\n id: 'id',\n // Index these fields for searching\n index: ['title', 'content', 'headings', 'description'],\n // Store these fields in results (for enriched queries)\n store: ['title', 'description'],\n },\n });\n}\n\n/**\n * Add a document to the search index\n */\nexport function addDocumentToIndex(index: FlexSearchDocument, doc: ProcessedDoc): void {\n const indexable: IndexableDocument = {\n id: doc.route,\n title: doc.title,\n content: doc.markdown,\n headings: doc.headings.map((h) => h.text).join(' '),\n description: doc.description,\n };\n\n index.add(indexable);\n}\n\n/**\n * Build the search index from processed documents\n */\nexport function buildSearchIndex(docs: ProcessedDoc[]): FlexSearchDocument {\n const index = createSearchIndex();\n\n for (const doc of docs) {\n addDocumentToIndex(index, doc);\n }\n\n return index;\n}\n\n/**\n * Search the index and return results with weighted ranking\n *\n * Ranking combines:\n * - Field importance (title > headings > description > content)\n * - Position in results (earlier = more relevant)\n */\nexport function searchIndex(\n index: FlexSearchDocument,\n docs: Record<string, ProcessedDoc>,\n query: string,\n options: { limit?: number } = {}\n): SearchResult[] {\n const { limit = 5 } = options;\n\n // Search across all fields\n const rawResults = index.search(query, {\n limit: limit * 3, // Get extra results for better ranking after weighting\n enrich: true,\n });\n\n // Aggregate scores across fields with weighting\n const docScores = new Map<string, number>();\n\n for (const fieldResult of rawResults) {\n // Determine which field this result is from\n const field = fieldResult.field as keyof typeof FIELD_WEIGHTS;\n const fieldWeight = FIELD_WEIGHTS[field] ?? 1.0;\n\n // With enrich: true, results are objects with id property\n const results = fieldResult.result as unknown as Array<{ id: string } | string>;\n\n for (let i = 0; i < results.length; i++) {\n const item = results[i];\n if (!item) continue;\n\n const docId = typeof item === 'string' ? item : item.id;\n\n // Position-based score (earlier = higher)\n const positionScore = (results.length - i) / results.length;\n\n // Apply field weight to position score\n const weightedScore = positionScore * fieldWeight;\n\n // Combine with existing score (additive for multi-field matches)\n const existingScore = docScores.get(docId) ?? 0;\n docScores.set(docId, existingScore + weightedScore);\n }\n }\n\n // Build results array\n const results: SearchResult[] = [];\n\n for (const [docId, score] of docScores) {\n const doc = docs[docId];\n if (!doc) continue;\n\n results.push({\n route: doc.route,\n title: doc.title,\n score,\n snippet: generateSnippet(doc.markdown, query),\n matchingHeadings: findMatchingHeadings(doc, query),\n });\n }\n\n // Sort by score (highest first) and limit\n results.sort((a, b) => b.score - a.score);\n return results.slice(0, limit);\n}\n\n/**\n * Generate a snippet from the markdown content around the query terms\n */\nexport function generateSnippet(markdown: string, query: string): string {\n const maxLength = 200;\n const queryTerms = query.toLowerCase().split(/\\s+/).filter(Boolean);\n\n if (queryTerms.length === 0) {\n return markdown.slice(0, maxLength) + (markdown.length > maxLength ? '...' : '');\n }\n\n const lowerMarkdown = markdown.toLowerCase();\n let bestIndex = -1;\n let bestTerm = '';\n\n // Also try stemmed versions of query terms\n const allTerms = [...queryTerms, ...queryTerms.map(englishStemmer)];\n\n for (const term of allTerms) {\n const index = lowerMarkdown.indexOf(term);\n if (index !== -1 && (bestIndex === -1 || index < bestIndex)) {\n bestIndex = index;\n bestTerm = term;\n }\n }\n\n if (bestIndex === -1) {\n // No term found, return beginning of document\n return markdown.slice(0, maxLength) + (markdown.length > maxLength ? '...' : '');\n }\n\n const snippetStart = Math.max(0, bestIndex - 50);\n const snippetEnd = Math.min(markdown.length, bestIndex + bestTerm.length + 150);\n\n let snippet = markdown.slice(snippetStart, snippetEnd);\n\n snippet = snippet\n // Remove markdown headings\n .replace(/^#{1,6}\\s+/gm, '')\n // Remove markdown links but keep text\n .replace(/\\[([^\\]]+)\\]\\([^)]+\\)/g, '$1')\n // Remove markdown images\n .replace(/!\\[([^\\]]*)\\]\\([^)]+\\)/g, '')\n // Remove code block markers\n .replace(/```[a-z]*\\n?/g, '')\n // Remove inline code backticks\n .replace(/`([^`]+)`/g, '$1')\n // Clean up whitespace\n .replace(/\\s+/g, ' ')\n .trim();\n\n const prefix = snippetStart > 0 ? '...' : '';\n const suffix = snippetEnd < markdown.length ? '...' : '';\n\n return prefix + snippet + suffix;\n}\n\n/**\n * Find headings that match the query (including stemmed forms)\n */\nfunction findMatchingHeadings(doc: ProcessedDoc, query: string): string[] {\n const queryTerms = query.toLowerCase().split(/\\s+/).filter(Boolean);\n // Include stemmed versions for better matching\n const allTerms = [...queryTerms, ...queryTerms.map(englishStemmer)];\n const matching: string[] = [];\n\n for (const heading of doc.headings) {\n const headingLower = heading.text.toLowerCase();\n const headingStemmed = headingLower.split(/\\s+/).map(englishStemmer).join(' ');\n\n // Check if any query term matches the heading or its stemmed form\n if (\n allTerms.some(\n (term) => headingLower.includes(term) || headingStemmed.includes(englishStemmer(term))\n )\n ) {\n matching.push(heading.text);\n }\n }\n\n return matching.slice(0, 3); // Limit to 3 matching headings\n}\n\n/**\n * Export the search index to a serializable format\n */\nexport async function exportSearchIndex(index: FlexSearchDocument): Promise<unknown> {\n const exportData: Record<string, unknown> = {};\n\n await index.export((key, data) => {\n exportData[key as string] = data;\n });\n\n return exportData;\n}\n\n/**\n * Import a search index from serialized data\n */\nexport async function importSearchIndex(\n data: Record<string, unknown>\n): Promise<FlexSearchDocument> {\n const index = createSearchIndex();\n\n for (const [key, value] of Object.entries(data)) {\n // FlexSearch's import expects the data in a specific format\n await (index as unknown as { import: (key: string, data: unknown) => Promise<void> }).import(\n key,\n value\n );\n }\n\n return index;\n}\n","import type { ProcessedDoc, SearchResult, DocsSearchParams } from '../../types/index.js';\nimport { searchIndex, type FlexSearchDocument } from '../../search/flexsearch-indexer.js';\n\n/**\n * Tool definition for docs_search\n */\nexport const docsSearchTool = {\n name: 'docs_search',\n description:\n 'Search across developer documentation. Returns ranked results with snippets and matching headings.',\n inputSchema: {\n type: 'object' as const,\n properties: {\n query: {\n type: 'string',\n description: 'Search query string',\n },\n limit: {\n type: 'number',\n description: 'Maximum number of results to return (default: 5, max: 20)',\n default: 5,\n },\n },\n required: ['query'],\n },\n};\n\n/**\n * Execute the docs_search tool\n */\nexport function executeDocsSearch(\n params: DocsSearchParams,\n index: FlexSearchDocument,\n docs: Record<string, ProcessedDoc>\n): SearchResult[] {\n const { query, limit = 5 } = params;\n\n // Validate parameters\n if (!query || typeof query !== 'string' || query.trim().length === 0) {\n throw new Error('Query parameter is required and must be a non-empty string');\n }\n\n const effectiveLimit = Math.min(Math.max(1, limit), 20);\n\n // Search the index\n const results = searchIndex(index, docs, query.trim(), {\n limit: effectiveLimit,\n });\n\n return results;\n}\n\n/**\n * Format search results for MCP response\n */\nexport function formatSearchResults(results: SearchResult[], baseUrl?: string): string {\n if (results.length === 0) {\n return 'No matching documents found.';\n }\n\n const lines: string[] = [`Found ${results.length} result(s):\\n`];\n\n for (let i = 0; i < results.length; i++) {\n const result = results[i];\n if (!result) continue;\n\n lines.push(`${i + 1}. **${result.title}**`);\n\n // Include full URL if baseUrl is provided\n if (baseUrl) {\n const fullUrl = `${baseUrl.replace(/\\/$/, '')}${result.route}`;\n lines.push(` URL: ${fullUrl}`);\n }\n\n lines.push(` Route: ${result.route}`);\n\n if (result.matchingHeadings && result.matchingHeadings.length > 0) {\n lines.push(` Matching sections: ${result.matchingHeadings.join(', ')}`);\n }\n\n lines.push(` ${result.snippet}`);\n lines.push('');\n }\n\n return lines.join('\\n');\n}\n","import type { ProcessedDoc, DocsGetPageParams } from '../../types/index.js';\n\n/**\n * Tool definition for docs_get_page\n */\nexport const docsGetPageTool = {\n name: 'docs_get_page',\n description:\n 'Retrieve the full content of a documentation page as markdown. Use this after searching to get complete page content.',\n inputSchema: {\n type: 'object' as const,\n properties: {\n route: {\n type: 'string',\n description: 'The route path of the page (e.g., /docs/getting-started)',\n },\n },\n required: ['route'],\n },\n};\n\n/**\n * Execute the docs_get_page tool\n */\nexport function executeDocsGetPage(\n params: DocsGetPageParams,\n docs: Record<string, ProcessedDoc>\n): ProcessedDoc | null {\n const { route } = params;\n\n // Validate parameters\n if (!route || typeof route !== 'string') {\n throw new Error('Route parameter is required and must be a string');\n }\n\n // Normalize route (ensure leading slash, remove trailing slash)\n let normalizedRoute = route.trim();\n if (!normalizedRoute.startsWith('/')) {\n normalizedRoute = '/' + normalizedRoute;\n }\n if (normalizedRoute.length > 1 && normalizedRoute.endsWith('/')) {\n normalizedRoute = normalizedRoute.slice(0, -1);\n }\n\n // Look up the document\n const doc = docs[normalizedRoute];\n\n if (!doc) {\n // Try without leading slash\n const altRoute = normalizedRoute.slice(1);\n if (docs[altRoute]) {\n return docs[altRoute] ?? null;\n }\n return null;\n }\n\n return doc;\n}\n\n/**\n * Format page content for MCP response\n */\nexport function formatPageContent(doc: ProcessedDoc | null, baseUrl?: string): string {\n if (!doc) {\n return 'Page not found. Please check the route path and try again.';\n }\n\n const lines: string[] = [];\n\n // Header\n lines.push(`# ${doc.title}`);\n lines.push('');\n\n // Metadata\n if (doc.description) {\n lines.push(`> ${doc.description}`);\n lines.push('');\n }\n\n // Include full URL if baseUrl is provided\n if (baseUrl) {\n const fullUrl = `${baseUrl.replace(/\\/$/, '')}${doc.route}`;\n lines.push(`**URL:** ${fullUrl}`);\n }\n\n lines.push(`**Route:** ${doc.route}`);\n lines.push('');\n\n // Table of contents (if there are headings)\n if (doc.headings.length > 0) {\n lines.push('## Contents');\n lines.push('');\n for (const heading of doc.headings) {\n if (heading.level <= 3) {\n const indent = ' '.repeat(heading.level - 1);\n lines.push(`${indent}- [${heading.text}](#${heading.id})`);\n }\n }\n lines.push('');\n lines.push('---');\n lines.push('');\n }\n\n // Main content\n lines.push(doc.markdown);\n\n return lines.join('\\n');\n}\n","import type { DocHeading } from '../types/index.js';\n\n/**\n * Extract headings from markdown content with their positions\n */\nexport function extractHeadingsFromMarkdown(markdown: string): DocHeading[] {\n const headings: DocHeading[] = [];\n const lines = markdown.split('\\n');\n let currentOffset = 0;\n\n for (let i = 0; i < lines.length; i++) {\n const line = lines[i] ?? '';\n const headingMatch = line.match(/^(#{1,6})\\s+(.+?)(?:\\s+\\{#([^}]+)\\})?$/);\n\n if (headingMatch) {\n const hashes = headingMatch[1] ?? '';\n const level = hashes.length;\n let text = headingMatch[2] ?? '';\n let id = headingMatch[3] ?? '';\n\n // If no explicit ID, generate one from text (Docusaurus style)\n if (!id) {\n id = generateHeadingId(text);\n }\n\n // Clean up text (remove any remaining markdown formatting)\n text = text.replace(/\\*\\*([^*]+)\\*\\*/g, '$1'); // Remove bold\n text = text.replace(/_([^_]+)_/g, '$1'); // Remove italic\n text = text.replace(/`([^`]+)`/g, '$1'); // Remove code\n\n headings.push({\n level,\n text: text.trim(),\n id,\n startOffset: currentOffset,\n endOffset: -1, // Will be calculated below\n });\n }\n\n currentOffset += line.length + 1; // +1 for newline\n }\n\n // Calculate end offsets (each heading ends where the next same-or-higher level heading starts)\n for (let i = 0; i < headings.length; i++) {\n const current = headings[i];\n if (!current) continue;\n\n let endOffset = markdown.length;\n\n // Find the next heading at the same or higher level\n for (let j = i + 1; j < headings.length; j++) {\n const next = headings[j];\n if (next && next.level <= current.level) {\n endOffset = next.startOffset;\n break;\n }\n }\n\n current.endOffset = endOffset;\n }\n\n return headings;\n}\n\n/**\n * Generate a URL-safe heading ID (Docusaurus style)\n */\nexport function generateHeadingId(text: string): string {\n return (\n text\n .toLowerCase()\n // Remove any non-alphanumeric characters except spaces and hyphens\n .replace(/[^\\w\\s-]/g, '')\n // Replace spaces with hyphens\n .replace(/\\s+/g, '-')\n // Remove consecutive hyphens\n .replace(/-+/g, '-')\n // Remove leading/trailing hyphens\n .replace(/^-|-$/g, '')\n );\n}\n\n/**\n * Extract a specific section from markdown by heading ID\n */\nexport function extractSection(\n markdown: string,\n headingId: string,\n headings: DocHeading[]\n): string | null {\n const heading = headings.find((h) => h.id === headingId);\n\n if (!heading) {\n return null;\n }\n\n return markdown.slice(heading.startOffset, heading.endOffset).trim();\n}\n","import type { ProcessedDoc, DocsGetSectionParams } from '../../types/index.js';\nimport { extractSection } from '../../processing/heading-extractor.js';\n\n/**\n * Tool definition for docs_get_section\n */\nexport const docsGetSectionTool = {\n name: 'docs_get_section',\n description:\n 'Retrieve a specific section of a documentation page by heading ID. Use this to get focused content from a larger page.',\n inputSchema: {\n type: 'object' as const,\n properties: {\n route: {\n type: 'string',\n description: 'The route path of the page (e.g., /docs/getting-started)',\n },\n headingId: {\n type: 'string',\n description: 'The ID of the heading to retrieve (e.g., authentication)',\n },\n },\n required: ['route', 'headingId'],\n },\n};\n\n/**\n * Result of executing docs_get_section\n */\nexport interface SectionResult {\n /** The section content as markdown */\n content: string | null;\n /** The document the section belongs to */\n doc: ProcessedDoc | null;\n /** The heading text */\n headingText: string | null;\n /** Available headings in the document */\n availableHeadings: Array<{ id: string; text: string; level: number }>;\n}\n\n/**\n * Execute the docs_get_section tool\n */\nexport function executeDocsGetSection(\n params: DocsGetSectionParams,\n docs: Record<string, ProcessedDoc>\n): SectionResult {\n const { route, headingId } = params;\n\n // Validate parameters\n if (!route || typeof route !== 'string') {\n throw new Error('Route parameter is required and must be a string');\n }\n if (!headingId || typeof headingId !== 'string') {\n throw new Error('HeadingId parameter is required and must be a string');\n }\n\n // Normalize route\n let normalizedRoute = route.trim();\n if (!normalizedRoute.startsWith('/')) {\n normalizedRoute = '/' + normalizedRoute;\n }\n if (normalizedRoute.length > 1 && normalizedRoute.endsWith('/')) {\n normalizedRoute = normalizedRoute.slice(0, -1);\n }\n\n // Look up the document\n const doc = docs[normalizedRoute];\n\n if (!doc) {\n return {\n content: null,\n doc: null,\n headingText: null,\n availableHeadings: [],\n };\n }\n\n // Get available headings\n const availableHeadings = doc.headings.map((h) => ({\n id: h.id,\n text: h.text,\n level: h.level,\n }));\n\n // Find the heading\n const heading = doc.headings.find((h) => h.id === headingId.trim());\n\n if (!heading) {\n return {\n content: null,\n doc,\n headingText: null,\n availableHeadings,\n };\n }\n\n // Extract the section content\n const content = extractSection(doc.markdown, headingId.trim(), doc.headings);\n\n return {\n content,\n doc,\n headingText: heading.text,\n availableHeadings,\n };\n}\n\n/**\n * Format section content for MCP response\n */\nexport function formatSectionContent(\n result: SectionResult,\n headingId: string,\n baseUrl?: string\n): string {\n if (!result.doc) {\n return 'Page not found. Please check the route path and try again.';\n }\n\n if (!result.content) {\n const lines = [`Section \"${headingId}\" not found in this document.`, '', 'Available sections:'];\n\n for (const heading of result.availableHeadings) {\n const indent = ' '.repeat(heading.level - 1);\n lines.push(`${indent}- ${heading.text} (id: ${heading.id})`);\n }\n\n return lines.join('\\n');\n }\n\n const lines: string[] = [];\n\n // Build URL with anchor if baseUrl is provided\n const fullUrl = baseUrl ? `${baseUrl.replace(/\\/$/, '')}${result.doc.route}#${headingId}` : null;\n\n // Header\n lines.push(`# ${result.headingText}`);\n if (fullUrl) {\n lines.push(`> From: ${result.doc.title} - ${fullUrl}`);\n } else {\n lines.push(`> From: ${result.doc.title} (${result.doc.route})`);\n }\n lines.push('');\n lines.push('---');\n lines.push('');\n\n // Section content\n lines.push(result.content);\n\n return lines.join('\\n');\n}\n","import fs from 'fs-extra';\nimport { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';\nimport { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js';\nimport { WebStandardStreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/webStandardStreamableHttp.js';\nimport { z } from 'zod';\nimport type { IncomingMessage, ServerResponse } from 'node:http';\nimport type {\n ProcessedDoc,\n McpServerConfig,\n McpServerFileConfig,\n McpServerDataConfig,\n} from '../types/index.js';\nimport { importSearchIndex, type FlexSearchDocument } from '../search/flexsearch-indexer.js';\nimport { executeDocsSearch, formatSearchResults } from './tools/docs-search.js';\nimport { executeDocsGetPage, formatPageContent } from './tools/docs-get-page.js';\nimport { executeDocsGetSection, formatSectionContent } from './tools/docs-get-section.js';\n\n/**\n * Type guard to check if config uses file-based loading\n */\nfunction isFileConfig(config: McpServerConfig): config is McpServerFileConfig {\n return 'docsPath' in config && 'indexPath' in config;\n}\n\n/**\n * Type guard to check if config uses pre-loaded data\n */\nfunction isDataConfig(config: McpServerConfig): config is McpServerDataConfig {\n return 'docs' in config && 'searchIndexData' in config;\n}\n\n/**\n * MCP Server for documentation\n *\n * This class provides the MCP server implementation that can be used\n * with any HTTP framework (Express, Vercel, Cloudflare Workers, etc.)\n *\n * Supports two modes:\n * - File-based: Load docs and search index from filesystem (Node.js)\n * - Pre-loaded: Accept docs and search index data directly (Workers)\n *\n * Uses the official MCP SDK for proper protocol handling.\n */\nexport class McpDocsServer {\n private config: McpServerConfig;\n private docs: Record<string, ProcessedDoc> | null = null;\n private searchIndex: FlexSearchDocument | null = null;\n private mcpServer: McpServer;\n private initialized = false;\n\n constructor(config: McpServerConfig) {\n this.config = config;\n\n // Create MCP server using the high-level API\n this.mcpServer = new McpServer(\n {\n name: config.name,\n version: config.version ?? '1.0.0',\n },\n {\n capabilities: {\n tools: {},\n },\n }\n );\n\n this.registerTools();\n }\n\n /**\n * Register all MCP tools using the SDK's registerTool API\n */\n private registerTools(): void {\n // docs_search - Search across documentation\n this.mcpServer.registerTool(\n 'docs_search',\n {\n description:\n 'Search the documentation for relevant pages. Returns matching documents with snippets and relevance scores. Use this to find information across all documentation.',\n inputSchema: {\n query: z.string().min(1).describe('The search query string'),\n limit: z\n .number()\n .int()\n .min(1)\n .max(20)\n .optional()\n .default(5)\n .describe('Maximum number of results to return (1-20, default: 5)'),\n },\n },\n async ({ query, limit }) => {\n await this.initialize();\n\n if (!this.docs || !this.searchIndex) {\n return {\n content: [{ type: 'text' as const, text: 'Server not initialized. Please try again.' }],\n isError: true,\n };\n }\n\n const results = executeDocsSearch({ query, limit }, this.searchIndex, this.docs);\n return {\n content: [\n { type: 'text' as const, text: formatSearchResults(results, this.config.baseUrl) },\n ],\n };\n }\n );\n\n // docs_get_page - Retrieve full page content\n this.mcpServer.registerTool(\n 'docs_get_page',\n {\n description:\n 'Retrieve the complete content of a documentation page as markdown. Use this when you need the full content of a specific page.',\n inputSchema: {\n route: z\n .string()\n .min(1)\n .describe('The page route path (e.g., \"/docs/getting-started\" or \"/api/reference\")'),\n },\n },\n async ({ route }) => {\n await this.initialize();\n\n if (!this.docs) {\n return {\n content: [{ type: 'text' as const, text: 'Server not initialized. Please try again.' }],\n isError: true,\n };\n }\n\n const doc = executeDocsGetPage({ route }, this.docs);\n return {\n content: [{ type: 'text' as const, text: formatPageContent(doc, this.config.baseUrl) }],\n };\n }\n );\n\n // docs_get_section - Retrieve a specific section\n this.mcpServer.registerTool(\n 'docs_get_section',\n {\n description:\n 'Retrieve a specific section from a documentation page by its heading ID. Use this when you need only a portion of a page rather than the entire content.',\n inputSchema: {\n route: z.string().min(1).describe('The page route path'),\n headingId: z\n .string()\n .min(1)\n .describe(\n 'The heading ID of the section to extract (e.g., \"installation\", \"api-reference\")'\n ),\n },\n },\n async ({ route, headingId }) => {\n await this.initialize();\n\n if (!this.docs) {\n return {\n content: [{ type: 'text' as const, text: 'Server not initialized. Please try again.' }],\n isError: true,\n };\n }\n\n const result = executeDocsGetSection({ route, headingId }, this.docs);\n return {\n content: [\n {\n type: 'text' as const,\n text: formatSectionContent(result, headingId, this.config.baseUrl),\n },\n ],\n };\n }\n );\n }\n\n /**\n * Load docs and search index\n *\n * For file-based config: reads from disk\n * For data config: uses pre-loaded data directly\n */\n async initialize(): Promise<void> {\n if (this.initialized) {\n return;\n }\n\n try {\n if (isDataConfig(this.config)) {\n // Pre-loaded data mode (Cloudflare Workers, etc.)\n this.docs = this.config.docs;\n this.searchIndex = await importSearchIndex(this.config.searchIndexData);\n } else if (isFileConfig(this.config)) {\n // File-based mode (Node.js)\n if (await fs.pathExists(this.config.docsPath)) {\n this.docs = await fs.readJson(this.config.docsPath);\n } else {\n throw new Error(`Docs file not found: ${this.config.docsPath}`);\n }\n\n if (await fs.pathExists(this.config.indexPath)) {\n const indexData = await fs.readJson(this.config.indexPath);\n this.searchIndex = await importSearchIndex(indexData);\n } else {\n throw new Error(`Search index not found: ${this.config.indexPath}`);\n }\n } else {\n throw new Error('Invalid server config: must provide either file paths or pre-loaded data');\n }\n\n this.initialized = true;\n } catch (error) {\n console.error('[MCP] Failed to initialize:', error);\n throw error;\n }\n }\n\n /**\n * Handle an HTTP request using the MCP SDK's transport\n *\n * This method is designed for serverless environments (Vercel, Netlify).\n * It creates a stateless transport instance and processes the request.\n *\n * @param req - Node.js IncomingMessage or compatible request object\n * @param res - Node.js ServerResponse or compatible response object\n * @param parsedBody - Optional pre-parsed request body\n */\n async handleHttpRequest(\n req: IncomingMessage,\n res: ServerResponse,\n parsedBody?: unknown\n ): Promise<void> {\n await this.initialize();\n\n // Create a stateless transport for this request\n // enableJsonResponse: true means we get simple JSON responses instead of SSE\n const transport = new StreamableHTTPServerTransport({\n sessionIdGenerator: undefined, // Stateless mode - no session tracking\n enableJsonResponse: true, // Return JSON instead of SSE streams\n });\n\n // Connect the server to this transport\n await this.mcpServer.connect(transport);\n\n try {\n // Let the transport handle the request\n await transport.handleRequest(req, res, parsedBody);\n } finally {\n // Clean up the transport after request\n await transport.close();\n }\n }\n\n /**\n * Handle a Web Standard Request (Cloudflare Workers, Deno, Bun)\n *\n * This method is designed for Web Standard environments that use\n * the Fetch API Request/Response pattern.\n *\n * @param request - Web Standard Request object\n * @returns Web Standard Response object\n */\n async handleWebRequest(request: Request): Promise<Response> {\n await this.initialize();\n\n // Create a stateless transport for Web Standards\n const transport = new WebStandardStreamableHTTPServerTransport({\n sessionIdGenerator: undefined, // Stateless mode\n enableJsonResponse: true,\n });\n\n // Connect the server to this transport\n await this.mcpServer.connect(transport);\n\n try {\n // Let the transport handle the request and return the response\n return await transport.handleRequest(request);\n } finally {\n // Clean up the transport after request\n await transport.close();\n }\n }\n\n /**\n * Get server status information\n *\n * Useful for health checks and debugging\n */\n async getStatus(): Promise<{\n name: string;\n version: string;\n initialized: boolean;\n docCount: number;\n baseUrl?: string;\n }> {\n return {\n name: this.config.name,\n version: this.config.version ?? '1.0.0',\n initialized: this.initialized,\n docCount: this.docs ? Object.keys(this.docs).length : 0,\n baseUrl: this.config.baseUrl,\n };\n }\n\n /**\n * Get the underlying McpServer instance\n *\n * Useful for advanced use cases like custom transports\n */\n getMcpServer(): McpServer {\n return this.mcpServer;\n }\n}\n","#!/usr/bin/env node\n/**\n * MCP Verification CLI\n *\n * Verifies that the MCP build output is valid and the server works correctly.\n *\n * Usage:\n * npx docusaurus-mcp-verify [buildDir]\n *\n * Options:\n * buildDir Path to Docusaurus build output (default: ./build)\n */\n\nimport fs from 'fs-extra';\nimport path from 'path';\nimport { McpDocsServer } from '../mcp/server.js';\n\ninterface VerifyResult {\n success: boolean;\n docsFound: number;\n errors: string[];\n warnings: string[];\n}\n\n/**\n * Verify the MCP build output\n */\nasync function verifyBuild(buildDir: string): Promise<VerifyResult> {\n const result: VerifyResult = {\n success: true,\n docsFound: 0,\n errors: [],\n warnings: [],\n };\n\n const mcpDir = path.join(buildDir, 'mcp');\n\n // Check if MCP directory exists\n if (!(await fs.pathExists(mcpDir))) {\n result.errors.push(`MCP directory not found: ${mcpDir}`);\n result.errors.push('Did you run \"npm run build\" with the MCP plugin configured?');\n result.success = false;\n return result;\n }\n\n // Check required files\n const requiredFiles = ['docs.json', 'search-index.json', 'manifest.json'];\n\n for (const file of requiredFiles) {\n const filePath = path.join(mcpDir, file);\n if (!(await fs.pathExists(filePath))) {\n result.errors.push(`Required file missing: ${filePath}`);\n result.success = false;\n }\n }\n\n if (!result.success) {\n return result;\n }\n\n // Validate docs.json\n try {\n const docsPath = path.join(mcpDir, 'docs.json');\n const docs = await fs.readJson(docsPath);\n\n if (typeof docs !== 'object' || docs === null) {\n result.errors.push('docs.json is not a valid object');\n result.success = false;\n } else {\n result.docsFound = Object.keys(docs).length;\n\n if (result.docsFound === 0) {\n result.warnings.push('docs.json contains no documents');\n }\n\n // Validate document structure\n for (const [route, doc] of Object.entries(docs)) {\n const d = doc as Record<string, unknown>;\n if (!d.title || typeof d.title !== 'string') {\n result.warnings.push(`Document ${route} is missing a title`);\n }\n if (!d.markdown || typeof d.markdown !== 'string') {\n result.warnings.push(`Document ${route} is missing markdown content`);\n }\n }\n }\n } catch (error) {\n result.errors.push(`Failed to parse docs.json: ${(error as Error).message}`);\n result.success = false;\n }\n\n // Validate search-index.json\n try {\n const indexPath = path.join(mcpDir, 'search-index.json');\n const indexData = await fs.readJson(indexPath);\n\n if (typeof indexData !== 'object' || indexData === null) {\n result.errors.push('search-index.json is not a valid object');\n result.success = false;\n }\n } catch (error) {\n result.errors.push(`Failed to parse search-index.json: ${(error as Error).message}`);\n result.success = false;\n }\n\n // Validate manifest.json\n try {\n const manifestPath = path.join(mcpDir, 'manifest.json');\n const manifest = await fs.readJson(manifestPath);\n\n if (!manifest.name || typeof manifest.name !== 'string') {\n result.warnings.push('manifest.json is missing server name');\n }\n if (!manifest.version || typeof manifest.version !== 'string') {\n result.warnings.push('manifest.json is missing server version');\n }\n } catch (error) {\n result.errors.push(`Failed to parse manifest.json: ${(error as Error).message}`);\n result.success = false;\n }\n\n return result;\n}\n\n/**\n * Test the MCP server with the build output\n */\nasync function testServer(buildDir: string): Promise<{ success: boolean; message: string }> {\n const mcpDir = path.join(buildDir, 'mcp');\n const docsPath = path.join(mcpDir, 'docs.json');\n const indexPath = path.join(mcpDir, 'search-index.json');\n const manifestPath = path.join(mcpDir, 'manifest.json');\n\n try {\n const manifest = await fs.readJson(manifestPath);\n const docs = await fs.readJson(docsPath);\n const searchIndexData = await fs.readJson(indexPath);\n\n const server = new McpDocsServer({\n name: manifest.name || 'test-docs',\n version: manifest.version || '1.0.0',\n docs,\n searchIndexData,\n });\n\n await server.initialize();\n const status = await server.getStatus();\n\n if (!status.initialized) {\n return { success: false, message: 'Server failed to initialize' };\n }\n\n if (status.docCount === 0) {\n return { success: false, message: 'Server has no documents loaded' };\n }\n\n return {\n success: true,\n message: `Server initialized with ${status.docCount} documents`,\n };\n } catch (error) {\n return {\n success: false,\n message: `Server test failed: ${(error as Error).message}`,\n };\n }\n}\n\n/**\n * Main CLI entry point\n */\nasync function main(): Promise<void> {\n const args = process.argv.slice(2);\n const buildDir = args[0] || './build';\n\n console.log('');\n console.log('🔍 MCP Build Verification');\n console.log('='.repeat(50));\n console.log(`Build directory: ${path.resolve(buildDir)}`);\n console.log('');\n\n // Verify build output\n console.log('📁 Checking build output...');\n const verifyResult = await verifyBuild(buildDir);\n\n if (verifyResult.errors.length > 0) {\n console.log('');\n console.log('❌ Errors:');\n for (const error of verifyResult.errors) {\n console.log(` • ${error}`);\n }\n }\n\n if (verifyResult.warnings.length > 0) {\n console.log('');\n console.log('⚠️ Warnings:');\n for (const warning of verifyResult.warnings) {\n console.log(` • ${warning}`);\n }\n }\n\n if (!verifyResult.success) {\n console.log('');\n console.log('❌ Build verification failed');\n process.exit(1);\n }\n\n console.log(` ✓ Found ${verifyResult.docsFound} documents`);\n console.log(' ✓ All required files present');\n console.log(' ✓ File structure valid');\n\n // Test server\n console.log('');\n console.log('🚀 Testing MCP server...');\n const serverResult = await testServer(buildDir);\n\n if (!serverResult.success) {\n console.log(` ❌ ${serverResult.message}`);\n console.log('');\n console.log('❌ Server test failed');\n process.exit(1);\n }\n\n console.log(` ✓ ${serverResult.message}`);\n\n console.log('');\n console.log('✅ All checks passed!');\n console.log('');\n console.log('Next steps:');\n console.log(' 1. Deploy your site to a hosting provider');\n console.log(' 2. Configure MCP endpoint (see README for platform guides)');\n console.log(' 3. Connect your AI tools to the MCP server');\n console.log('');\n\n process.exit(0);\n}\n\nmain().catch((error) => {\n console.error('Unexpected error:', error);\n process.exit(1);\n});\n"]}
@@ -0,0 +1,190 @@
1
+ /**
2
+ * Configuration options for the MCP server plugin
3
+ */
4
+ interface McpServerPluginOptions {
5
+ /** Output directory for MCP artifacts (relative to build dir). Default: 'mcp' */
6
+ outputDir?: string;
7
+ /** CSS selectors for content extraction, in order of priority */
8
+ contentSelectors?: string[];
9
+ /** CSS selectors for elements to remove from content before processing */
10
+ excludeSelectors?: string[];
11
+ /** Minimum content length (in characters) to consider a page valid. Default: 50 */
12
+ minContentLength?: number;
13
+ /** Server configuration */
14
+ server?: {
15
+ /** Name of the MCP server */
16
+ name?: string;
17
+ /** Version of the MCP server */
18
+ version?: string;
19
+ };
20
+ /** Routes to exclude from processing (glob patterns) */
21
+ excludeRoutes?: string[];
22
+ }
23
+ /**
24
+ * Resolved plugin options with defaults applied
25
+ */
26
+ interface ResolvedPluginOptions {
27
+ outputDir: string;
28
+ contentSelectors: string[];
29
+ excludeSelectors: string[];
30
+ minContentLength: number;
31
+ server: {
32
+ name: string;
33
+ version: string;
34
+ };
35
+ excludeRoutes: string[];
36
+ }
37
+ /**
38
+ * A processed documentation page
39
+ */
40
+ interface ProcessedDoc {
41
+ /** URL route path (e.g., /docs/getting-started) */
42
+ route: string;
43
+ /** Page title extracted from HTML */
44
+ title: string;
45
+ /** Meta description if available */
46
+ description: string;
47
+ /** Full page content as markdown */
48
+ markdown: string;
49
+ /** Headings with IDs for section navigation */
50
+ headings: DocHeading[];
51
+ }
52
+ /**
53
+ * A heading within a document
54
+ */
55
+ interface DocHeading {
56
+ /** Heading level (1-6) */
57
+ level: number;
58
+ /** Heading text content */
59
+ text: string;
60
+ /** Anchor ID for linking */
61
+ id: string;
62
+ /** Character offset where this section starts in the markdown */
63
+ startOffset: number;
64
+ /** Character offset where this section ends in the markdown */
65
+ endOffset: number;
66
+ }
67
+ /**
68
+ * A flattened route from Docusaurus
69
+ */
70
+ interface FlattenedRoute {
71
+ /** The URL path */
72
+ path: string;
73
+ /** Path to the corresponding HTML file */
74
+ htmlPath: string;
75
+ }
76
+ /**
77
+ * Search result from FlexSearch
78
+ */
79
+ interface SearchResult {
80
+ /** Route of the matching document */
81
+ route: string;
82
+ /** Title of the document */
83
+ title: string;
84
+ /** Relevance score */
85
+ score: number;
86
+ /** Snippet of matching content */
87
+ snippet: string;
88
+ /** Matching headings if any */
89
+ matchingHeadings?: string[];
90
+ }
91
+ /**
92
+ * Manifest metadata for the MCP artifacts
93
+ */
94
+ interface McpManifest {
95
+ /** Plugin version */
96
+ version: string;
97
+ /** Build timestamp */
98
+ buildTime: string;
99
+ /** Number of documents indexed */
100
+ docCount: number;
101
+ /** Server name */
102
+ serverName: string;
103
+ /** Base URL of the documentation site */
104
+ baseUrl?: string;
105
+ }
106
+ /**
107
+ * MCP Server configuration for file-based loading
108
+ */
109
+ interface McpServerFileConfig {
110
+ /** Path to docs.json file */
111
+ docsPath: string;
112
+ /** Path to search-index.json file */
113
+ indexPath: string;
114
+ /** Server name */
115
+ name: string;
116
+ /** Server version */
117
+ version?: string;
118
+ /** Base URL for constructing full page URLs (e.g., https://docs.example.com) */
119
+ baseUrl?: string;
120
+ }
121
+ /**
122
+ * MCP Server configuration for pre-loaded data (e.g., Cloudflare Workers)
123
+ */
124
+ interface McpServerDataConfig {
125
+ /** Pre-loaded docs data */
126
+ docs: Record<string, ProcessedDoc>;
127
+ /** Pre-loaded search index data (exported from FlexSearch via exportSearchIndex) */
128
+ searchIndexData: Record<string, unknown>;
129
+ /** Server name */
130
+ name: string;
131
+ /** Server version */
132
+ version?: string;
133
+ /** Base URL for constructing full page URLs (e.g., https://docs.example.com) */
134
+ baseUrl?: string;
135
+ }
136
+ /**
137
+ * MCP Server configuration - supports both file-based and pre-loaded data modes
138
+ */
139
+ type McpServerConfig = McpServerFileConfig | McpServerDataConfig;
140
+ /**
141
+ * Internal representation of the docs index
142
+ */
143
+ interface DocsIndex {
144
+ /** All processed documents keyed by route */
145
+ docs: Record<string, ProcessedDoc>;
146
+ /** Manifest metadata */
147
+ manifest: McpManifest;
148
+ }
149
+ /**
150
+ * Input parameters for docs_search tool
151
+ */
152
+ interface DocsSearchParams {
153
+ /** Search query string */
154
+ query: string;
155
+ /** Maximum number of results (default: 5, max: 20) */
156
+ limit?: number;
157
+ }
158
+ /**
159
+ * Input parameters for docs_get_page tool
160
+ */
161
+ interface DocsGetPageParams {
162
+ /** Route path of the page */
163
+ route: string;
164
+ }
165
+ /**
166
+ * Input parameters for docs_get_section tool
167
+ */
168
+ interface DocsGetSectionParams {
169
+ /** Route path of the page */
170
+ route: string;
171
+ /** Heading ID of the section */
172
+ headingId: string;
173
+ }
174
+ /**
175
+ * Content extraction result from HTML
176
+ */
177
+ interface ExtractedContent {
178
+ /** Page title */
179
+ title: string;
180
+ /** Meta description */
181
+ description: string;
182
+ /** Main content as HTML */
183
+ contentHtml: string;
184
+ }
185
+ /**
186
+ * Default plugin options
187
+ */
188
+ declare const DEFAULT_OPTIONS: ResolvedPluginOptions;
189
+
190
+ export { type DocHeading as D, type ExtractedContent as E, type FlattenedRoute as F, type McpServerConfig as M, type ProcessedDoc as P, type ResolvedPluginOptions as R, type SearchResult as S, type McpServerPluginOptions as a, DEFAULT_OPTIONS as b, type DocsGetPageParams as c, type DocsGetSectionParams as d, type DocsIndex as e, type DocsSearchParams as f, type McpManifest as g, type McpServerDataConfig as h, type McpServerFileConfig as i };
@@ -0,0 +1,190 @@
1
+ /**
2
+ * Configuration options for the MCP server plugin
3
+ */
4
+ interface McpServerPluginOptions {
5
+ /** Output directory for MCP artifacts (relative to build dir). Default: 'mcp' */
6
+ outputDir?: string;
7
+ /** CSS selectors for content extraction, in order of priority */
8
+ contentSelectors?: string[];
9
+ /** CSS selectors for elements to remove from content before processing */
10
+ excludeSelectors?: string[];
11
+ /** Minimum content length (in characters) to consider a page valid. Default: 50 */
12
+ minContentLength?: number;
13
+ /** Server configuration */
14
+ server?: {
15
+ /** Name of the MCP server */
16
+ name?: string;
17
+ /** Version of the MCP server */
18
+ version?: string;
19
+ };
20
+ /** Routes to exclude from processing (glob patterns) */
21
+ excludeRoutes?: string[];
22
+ }
23
+ /**
24
+ * Resolved plugin options with defaults applied
25
+ */
26
+ interface ResolvedPluginOptions {
27
+ outputDir: string;
28
+ contentSelectors: string[];
29
+ excludeSelectors: string[];
30
+ minContentLength: number;
31
+ server: {
32
+ name: string;
33
+ version: string;
34
+ };
35
+ excludeRoutes: string[];
36
+ }
37
+ /**
38
+ * A processed documentation page
39
+ */
40
+ interface ProcessedDoc {
41
+ /** URL route path (e.g., /docs/getting-started) */
42
+ route: string;
43
+ /** Page title extracted from HTML */
44
+ title: string;
45
+ /** Meta description if available */
46
+ description: string;
47
+ /** Full page content as markdown */
48
+ markdown: string;
49
+ /** Headings with IDs for section navigation */
50
+ headings: DocHeading[];
51
+ }
52
+ /**
53
+ * A heading within a document
54
+ */
55
+ interface DocHeading {
56
+ /** Heading level (1-6) */
57
+ level: number;
58
+ /** Heading text content */
59
+ text: string;
60
+ /** Anchor ID for linking */
61
+ id: string;
62
+ /** Character offset where this section starts in the markdown */
63
+ startOffset: number;
64
+ /** Character offset where this section ends in the markdown */
65
+ endOffset: number;
66
+ }
67
+ /**
68
+ * A flattened route from Docusaurus
69
+ */
70
+ interface FlattenedRoute {
71
+ /** The URL path */
72
+ path: string;
73
+ /** Path to the corresponding HTML file */
74
+ htmlPath: string;
75
+ }
76
+ /**
77
+ * Search result from FlexSearch
78
+ */
79
+ interface SearchResult {
80
+ /** Route of the matching document */
81
+ route: string;
82
+ /** Title of the document */
83
+ title: string;
84
+ /** Relevance score */
85
+ score: number;
86
+ /** Snippet of matching content */
87
+ snippet: string;
88
+ /** Matching headings if any */
89
+ matchingHeadings?: string[];
90
+ }
91
+ /**
92
+ * Manifest metadata for the MCP artifacts
93
+ */
94
+ interface McpManifest {
95
+ /** Plugin version */
96
+ version: string;
97
+ /** Build timestamp */
98
+ buildTime: string;
99
+ /** Number of documents indexed */
100
+ docCount: number;
101
+ /** Server name */
102
+ serverName: string;
103
+ /** Base URL of the documentation site */
104
+ baseUrl?: string;
105
+ }
106
+ /**
107
+ * MCP Server configuration for file-based loading
108
+ */
109
+ interface McpServerFileConfig {
110
+ /** Path to docs.json file */
111
+ docsPath: string;
112
+ /** Path to search-index.json file */
113
+ indexPath: string;
114
+ /** Server name */
115
+ name: string;
116
+ /** Server version */
117
+ version?: string;
118
+ /** Base URL for constructing full page URLs (e.g., https://docs.example.com) */
119
+ baseUrl?: string;
120
+ }
121
+ /**
122
+ * MCP Server configuration for pre-loaded data (e.g., Cloudflare Workers)
123
+ */
124
+ interface McpServerDataConfig {
125
+ /** Pre-loaded docs data */
126
+ docs: Record<string, ProcessedDoc>;
127
+ /** Pre-loaded search index data (exported from FlexSearch via exportSearchIndex) */
128
+ searchIndexData: Record<string, unknown>;
129
+ /** Server name */
130
+ name: string;
131
+ /** Server version */
132
+ version?: string;
133
+ /** Base URL for constructing full page URLs (e.g., https://docs.example.com) */
134
+ baseUrl?: string;
135
+ }
136
+ /**
137
+ * MCP Server configuration - supports both file-based and pre-loaded data modes
138
+ */
139
+ type McpServerConfig = McpServerFileConfig | McpServerDataConfig;
140
+ /**
141
+ * Internal representation of the docs index
142
+ */
143
+ interface DocsIndex {
144
+ /** All processed documents keyed by route */
145
+ docs: Record<string, ProcessedDoc>;
146
+ /** Manifest metadata */
147
+ manifest: McpManifest;
148
+ }
149
+ /**
150
+ * Input parameters for docs_search tool
151
+ */
152
+ interface DocsSearchParams {
153
+ /** Search query string */
154
+ query: string;
155
+ /** Maximum number of results (default: 5, max: 20) */
156
+ limit?: number;
157
+ }
158
+ /**
159
+ * Input parameters for docs_get_page tool
160
+ */
161
+ interface DocsGetPageParams {
162
+ /** Route path of the page */
163
+ route: string;
164
+ }
165
+ /**
166
+ * Input parameters for docs_get_section tool
167
+ */
168
+ interface DocsGetSectionParams {
169
+ /** Route path of the page */
170
+ route: string;
171
+ /** Heading ID of the section */
172
+ headingId: string;
173
+ }
174
+ /**
175
+ * Content extraction result from HTML
176
+ */
177
+ interface ExtractedContent {
178
+ /** Page title */
179
+ title: string;
180
+ /** Meta description */
181
+ description: string;
182
+ /** Main content as HTML */
183
+ contentHtml: string;
184
+ }
185
+ /**
186
+ * Default plugin options
187
+ */
188
+ declare const DEFAULT_OPTIONS: ResolvedPluginOptions;
189
+
190
+ export { type DocHeading as D, type ExtractedContent as E, type FlattenedRoute as F, type McpServerConfig as M, type ProcessedDoc as P, type ResolvedPluginOptions as R, type SearchResult as S, type McpServerPluginOptions as a, DEFAULT_OPTIONS as b, type DocsGetPageParams as c, type DocsGetSectionParams as d, type DocsIndex as e, type DocsSearchParams as f, type McpManifest as g, type McpServerDataConfig as h, type McpServerFileConfig as i };