stream-markdown-parser 0.0.2 → 0.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","names":[],"sources":["../src/factory.ts","../src/types.ts","../src/parser/inline-parsers/index.ts","../src/parser/index.ts","../src/config.ts","../src/findMatchingClose.ts","../src/parser/inline-parsers/fence-parser.ts","../src/plugins/containers.ts","../src/plugins/isMathLike.ts","../src/plugins/math.ts","../src/index.ts"],"sourcesContent":[],"mappings":";;;UAOiB,cAAA,SAAuB;sBAClB;EADL,UAAA,CAAA,EAAA,OAAe;;;;ICPf,iBAAQ,CAAA,EAAA,OAAA;EAQR,CAAA;AAMjB;;;UAdiB,QAAA;;;EDOA,OAAA,CAAA,EAAA,OAAe;;;;ACPf,UAQA,QAAA,SAAiB,QART,CAAA;EAQR,IAAA,EAAA,MAAS;EAMT,OAAA,EAAA,MAAY;EAOZ,MAAA,CAAA,EAAA,OAAc;AAM/B;AAQiB,UArBA,WAAA,SAAoB,QAqBC,CAAA;EAKrB,IAAA,EAAA,SAAA;EAiBA,KAAA,EAAA,MAAA;EAKA,IAAA,EAAA,MAAS;EAQT,QAAA,EApDL,UAoDe,EAAA;AAO3B;AAIiB,UA5DA,aAAA,SAAsB,QA4DN,CAAA;EASrB,IAAA,EAAA,WAAc;EAST,QAAA,EA5EL,UA4EoB,EAAA;EAKf,aAAU,CAAA,EAAA,OAAA;;AAGnB,UAhFS,QAAA,SAAiB,QAgF1B,CAAA;EAH2B,IAAA,EAAA,MAAA;EAAQ,OAAA,EAAA,OAAA;EAM1B,KAAA,CAAA,EAAA,MAAA;EAKA,KAAA,EAnFR,YAmFsB,EAAA;AAM/B;AAKiB,UA3FA,YAAA,SAAqB,QA2FF,CAAA;EAE5B,IAAA,EAAA,WAAA;EACM,QAAA,EA5FF,UA4FE,EAAA;;AAHsC,UAtFnC,aAAA,SAAsB,QAsFa,CAAA;EAMnC,IAAA,EAAA,YAAa;EAMb,QAAA,EAAA,MAAA;EAKA,IAAA,EAAA,MAAA;EAOA,SAAA,CAAA,EAAA,MAAW;EAKX,OAAA,CAAA,EAAA,MAAa;EAKb,OAAA,CAAA,EAAA,OAAA;EAKA,IAAA,CAAA,EAAA,OAAA;EAKA,YAAA,CAAW,EAAA,MAAA;EAKX,WAAA,CAAA,EAAA,MAAc;EAKd,GAAA,EAAA,MAAA;AAKjB;AAKiB,UArIA,cAAA,SAAuB,QAqIW,CAAA;EAKlC,IAAA,EAAA,aAAU;EAMV,IAAA,EAAA,MAAA;AAIjB;AAKiB,UApJA,QAAA,SAAiB,QAoJa,CAAA;EAK9B,IAAA,EAAA,MAAA;EAMA,IAAA,EAAA,MAAA;EAcL,KAAA,EAAA,MAAU,GAAA,IAAA;EAChB,IAAA,EAAA,MAAA;EACA,QAAA,EA1KM,UA0KN,EAAA;;AAEA,UAzKW,SAAA,SAAkB,QAyK7B,CAAA;EACA,IAAA,EAAA,OAAA;EACA,GAAA,EAAA,MAAA;EACA,GAAA,EAAA,MAAA;EACA,KAAA,EAAA,MAAA,GAAA,IAAA;;AAEA,UAxKW,iBAAA,SAA0B,QAwKrC,CAAA;EACA,IAAA,EAAA,gBAAA;;AAEA,UAvKW,gBAAA,CAuKX;EACA,IAAA,EAAA;IACA,IAAA,EAAA,YAAA;IACA,QAAA,EAAA,MAAA;IACA,IAAA,EAAA,MAAA;IACA,OAAA,CAAA,EAAA,OAAA;EACA,CAAA;;AAEA,KAtKM,cAAA,GAsKN;EACA,OAAA,EAAA,MAAA;EACA,KAAA,CAAA,EAAA,SAAA;CACA,GAAA;EACA,OAAA,CAAA,EAAA,SAAA;EACA,KAAA,EApKK,QAoKL,EAAA;CACA;AACA,UApKW,cAAA,SAAuB,QAoKlC,CAAA;EACA,IAAA,EAAA,YAAA;EACA,QAAA,EApKM,UAoKN,EAAA;;AAEA,UAnKW,SAAA,SAAkB,QAmK7B,CAAA;EACA,IAAA,EAAA,OAAA;EACA,MAAA,EAnKI,YAmKJ;EAAM,IAAA,EAlKJ,YAkKI,EAAA;AACZ;AAkCY,UAlMK,YAAA,SAAqB,QAkMK,CAAA;EAE1B,IAAA,EAAA,WAAY;EAKjB,KAAA,EAvMH,aAuMG,EAAA;;UApMK,aAAA,SAAsB;;EC3FvB,MAAA,EAAA,OAAA;EAA0B,QAAA,ED8F9B,UC9F8B,EAAA;;AAA2D,UDiGpF,kBAAA,SAA2B,QCjGyD,CAAA;EAAU,IAAA,EAAA,iBAAA;SDmGtG;;UAGQ,kBAAA,SAA2B;EE3G5B,IAAA,EAAA,iBAAA;EAEV,IAAA,EF2GE,UE3GF,EAAA;EACK,UAAA,EF2GG,UE3GH,EAAA;;AACE,UF6GI,YAAA,SAAqB,QE7GzB,CAAA;EAyCG,IAAA,EAAA,UAAa;;YFuEjB;;AG9HK,UHiIA,qBAAA,SAA8B,QGjInB,CAAA;EASZ,IAAA,EAAA,oBAAqB;;;UH6HpB,cAAA,SAAuB;EI/IxB,IAAA,EAAA,YAAA;;;YJmJJ;AKtHZ;ULyHiB,UAAA,SAAmB;;YAExB;AMrJZ;UNwJiB,YAAA,SAAqB;;YAE1B;AO7JZ;AAqBa,UP2II,iBAAA,SAA0B,QO3I+E,CAAA;EAuB1G,IAAA,EAAA,eAAU;YPsHd;;UAGK,aAAA,SAAsB;EQpJ1B,IAAA,EAAA,WAuDZ;EAmCe,QAAA,ER4DJ,UQ5DI,EAAA;AA+ChB;URgBiB,UAAA,SAAmB;;YAExB;ASxIZ;AACW,UT0IM,aAAA,SAAsB,QS1I5B,CAAA;EACU,IAAA,EAAA,WAAA;EAAX,QAAA,ET2IE,US3IF,EAAA;;AAFkC,UTgJ3B,eAAA,SAAwB,QShJG,CAAA;EAAc,IAAA,EAAA,aAAA;EAU1C,QAAA,ETwIJ,USxIe,EAAA;AAwJ3B;AAUgB,UTvBC,YAAA,SAAqB,QSuBO,CAAA;;;;UTlB5B,iBAAA,SAA0B;;;;UAK1B,SAAA,SAAkB;;;;;UAMlB,aAAA,SAAsB;;;UAItB,cAAA,SAAuB;;;;UAKvB,aAAA,SAAsB;;;;UAKtB,aAAA,SAAsB;;;;UAMtB,aAAA;;;;;;aAMJ;;;;;;;KAQD,UAAA,GACN,WACA,cACA,gBACA,WACA,eACA,gBACA,iBACA,WACA,YACA,oBACA,iBACA,YACA,eACA,gBACA,aACA,eACA,oBACA,gBACA,aACA,gBACA,kBACA,eACA,oBACA,YACA,qBACA,qBACA,eACA,wBACA,iBACA,gBACA,iBACA,gBACA,gBACA;UACW,gBAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;KAkCL,mBAAA,YAA+B,oBAAoB;UAE9C,YAAA;uBACM;wBACC;;KAGZ,sBAAA,WAAiC,iBAAiB;;;iBC/R9C,iBAAA,SAA0B,2CAA2C,gBAAgB;;;AFjBpF,iBGYD,wBAAA,CHZ8B,QAAA,EAAA,MAAA,EAAA,EAAA,EGcxC,UHdwC,EAAA,OAAA,CAAA,EGenC,YHfmC,CAAA,EGgB3C,UHhB2C,EAAA;iBGyD9B,aAAA,SAAsB,kBAAkB;;;;;;AHzDxD;;;;ACPA;AAQA;AAMiB,UGLA,WAAA,CHKY;EAOZ;EAMA,QAAA,CAAA,EAAS,SAAA,MAKjB,EAAA;EAGQ;EAKA,iBAAc,CAAA,EAAA,OAAA;AAiB/B;AAKiB,iBG5CD,qBAAA,CH4CkB,IAAQ,EG5CE,WH4CF,GAAA,SAAA,CAAA,EAAA,IAAA;;;iBI9D1B,iBAAA;;;iBC6BA,eAAA,QAAuB,gBAAgB;;;iBC1BvC,eAAA,KAAoB;;;cCHvB;cAqBA;iBAuBG,UAAA;;;cC3BH;iBA0FG,6BAAA,mBAAgD;iBA+ChD,SAAA,KAAc,uBAAuB;;;ARvHpC,USCA,kBAAA,SAA2B,cTDE,CAAA;EAK7B,MAAA,CAAA,ESHN,KTGM,CAAA,GAAc,CAAA;EAiBd,KAAA,CAAA,ESnBP,KTmBO,CAAA,CAAA,EAAe,ESnBX,UTmBmB,EAAA,GAAA,IAAQ,CAAA;EAK/B;AAQjB;AAOA;AAIA;EASY,IAAA,CAAA,EAAA,CAAA,CAAA,GAAA,EAAA,MAAc,EAAA,GAAA,MAOf,CAAA,GStD0B,MTsDlB,CAAA,MAAA,EAAA,MAAA,CAAA;AAEnB;AAKiB,iBS1DD,WAAA,CT0DW,KAAA,CAAA,EAAA,MAAA,EAAA,OAAA,CAAA,ES1DkD,kBT0DlD,CAAA,ES1DyE,UT0DzE;AAEjB,iBS4FM,iBAAA,CAAA,CT5FN,ES4FuB,UT5FvB;AACF,iBSqGQ,cAAA,CTrGR,EAAA,ESqG2B,UTrG3B,EAAA,OAAA,EAAA,MAAA,CAAA,EAAA,MAAA"}
1
+ {"version":3,"file":"index.d.ts","names":[],"sources":["../src/factory.ts","../src/types.ts","../src/parser/inline-parsers/index.ts","../src/parser/index.ts","../src/config.ts","../src/findMatchingClose.ts","../src/parser/inline-parsers/fence-parser.ts","../src/plugins/containers.ts","../src/plugins/isMathLike.ts","../src/plugins/math.ts","../src/index.ts"],"sourcesContent":[],"mappings":";;;UAOiB,cAAA,SAAuB;sBAClB;EADL,UAAA,CAAA,EAAA,OAAe;;;;ICPf,iBAAQ,CAAA,EAAA,OAAA;EAQR,CAAA;AAMjB;;;UAdiB,QAAA;;;EDOA,OAAA,CAAA,EAAA,OAAe;;;;ACPf,UAQA,QAAA,SAAiB,QART,CAAA;EAQR,IAAA,EAAA,MAAS;EAMT,OAAA,EAAA,MAAY;EAOZ,MAAA,CAAA,EAAA,OAAc;AAM/B;AAQiB,UArBA,WAAA,SAAoB,QAqBC,CAAA;EAKrB,IAAA,EAAA,SAAA;EAiBA,KAAA,EAAA,MAAA;EAKA,IAAA,EAAA,MAAS;EAQT,QAAA,EApDL,UAoDe,EAAA;AAO3B;AAIiB,UA5DA,aAAA,SAAsB,QA4DN,CAAA;EASrB,IAAA,EAAA,WAAc;EAST,QAAA,EA5EL,UA4EoB,EAAA;EAKf,aAAU,CAAA,EAAA,OAAA;;AAGnB,UAhFS,QAAA,SAAiB,QAgF1B,CAAA;EAH2B,IAAA,EAAA,MAAA;EAAQ,OAAA,EAAA,OAAA;EAM1B,KAAA,CAAA,EAAA,MAAA;EAKA,KAAA,EAnFR,YAmFsB,EAAA;AAM/B;AAKiB,UA3FA,YAAA,SAAqB,QA2FF,CAAA;EAE5B,IAAA,EAAA,WAAA;EACM,QAAA,EA5FF,UA4FE,EAAA;;AAHsC,UAtFnC,aAAA,SAAsB,QAsFa,CAAA;EAMnC,IAAA,EAAA,YAAa;EAMb,QAAA,EAAA,MAAA;EAKA,IAAA,EAAA,MAAA;EAOA,SAAA,CAAA,EAAA,MAAW;EAKX,OAAA,CAAA,EAAA,MAAa;EAKb,OAAA,CAAA,EAAA,OAAA;EAKA,IAAA,CAAA,EAAA,OAAA;EAKA,YAAA,CAAW,EAAA,MAAA;EAKX,WAAA,CAAA,EAAA,MAAc;EAKd,GAAA,EAAA,MAAA;AAKjB;AAKiB,UArIA,cAAA,SAAuB,QAqIW,CAAA;EAKlC,IAAA,EAAA,aAAU;EAMV,IAAA,EAAA,MAAA;AAIjB;AAKiB,UApJA,QAAA,SAAiB,QAoJa,CAAA;EAK9B,IAAA,EAAA,MAAA;EAMA,IAAA,EAAA,MAAA;EAcL,KAAA,EAAA,MAAU,GAAA,IAAA;EAChB,IAAA,EAAA,MAAA;EACA,QAAA,EA1KM,UA0KN,EAAA;;AAEA,UAzKW,SAAA,SAAkB,QAyK7B,CAAA;EACA,IAAA,EAAA,OAAA;EACA,GAAA,EAAA,MAAA;EACA,GAAA,EAAA,MAAA;EACA,KAAA,EAAA,MAAA,GAAA,IAAA;;AAEA,UAxKW,iBAAA,SAA0B,QAwKrC,CAAA;EACA,IAAA,EAAA,gBAAA;;AAEA,UAvKW,gBAAA,CAuKX;EACA,IAAA,EAAA;IACA,IAAA,EAAA,YAAA;IACA,QAAA,EAAA,MAAA;IACA,IAAA,EAAA,MAAA;IACA,OAAA,CAAA,EAAA,OAAA;EACA,CAAA;;AAEA,KAtKM,cAAA,GAsKN;EACA,OAAA,EAAA,MAAA;EACA,KAAA,CAAA,EAAA,SAAA;CACA,GAAA;EACA,OAAA,CAAA,EAAA,SAAA;EACA,KAAA,EApKK,QAoKL,EAAA;CACA;AACA,UApKW,cAAA,SAAuB,QAoKlC,CAAA;EACA,IAAA,EAAA,YAAA;EACA,QAAA,EApKM,UAoKN,EAAA;;AAEA,UAnKW,SAAA,SAAkB,QAmK7B,CAAA;EACA,IAAA,EAAA,OAAA;EACA,MAAA,EAnKI,YAmKJ;EAAM,IAAA,EAlKJ,YAkKI,EAAA;AACZ;AAkCY,UAlMK,YAAA,SAAqB,QAkMK,CAAA;EAE1B,IAAA,EAAA,WAAY;EAKjB,KAAA,EAvMH,aAuMG,EAAA;;UApMK,aAAA,SAAsB;;EC3FvB,MAAA,EAAA,OAAA;EAA0B,QAAA,ED8F9B,UC9F8B,EAAA;;AAA2D,UDiGpF,kBAAA,SAA2B,QCjGyD,CAAA;EAAU,IAAA,EAAA,iBAAA;SDmGtG;;UAGQ,kBAAA,SAA2B;EE3G5B,IAAA,EAAA,iBAAA;EAEV,IAAA,EF2GE,UE3GF,EAAA;EACK,UAAA,EF2GG,UE3GH,EAAA;;AACE,UF6GI,YAAA,SAAqB,QE7GzB,CAAA;EAwCG,IAAA,EAAA,UAAa;;YFwEjB;;AG9HK,UHiIA,qBAAA,SAA8B,QGjInB,CAAA;EASZ,IAAA,EAAA,oBAAqB;;;UH6HpB,cAAA,SAAuB;EI/IxB,IAAA,EAAA,YAAA;;;YJmJJ;AKtHZ;ULyHiB,UAAA,SAAmB;;YAExB;AMrJZ;UNwJiB,YAAA,SAAqB;;YAE1B;AO7JZ;AAqBa,UP2II,iBAAA,SAA0B,QO3I+E,CAAA;EAuB1G,IAAA,EAAA,eAAU;YPsHd;;UAGK,aAAA,SAAsB;EQpJ1B,IAAA,EAAA,WAuDZ;EAmCe,QAAA,ER4DJ,UQ5DI,EAAA;AA+ChB;URgBiB,UAAA,SAAmB;;YAExB;ASxIZ;AACW,UT0IM,aAAA,SAAsB,QS1I5B,CAAA;EACU,IAAA,EAAA,WAAA;EAAX,QAAA,ET2IE,US3IF,EAAA;;AAFkC,UTgJ3B,eAAA,SAAwB,QShJG,CAAA;EAAc,IAAA,EAAA,aAAA;EAU1C,QAAA,ETwIJ,USxIe,EAAA;AAwJ3B;AAUgB,UTvBC,YAAA,SAAqB,QSuBO,CAAA;;;;UTlB5B,iBAAA,SAA0B;;;;UAK1B,SAAA,SAAkB;;;;;UAMlB,aAAA,SAAsB;;;UAItB,cAAA,SAAuB;;;;UAKvB,aAAA,SAAsB;;;;UAKtB,aAAA,SAAsB;;;;UAMtB,aAAA;;;;;;aAMJ;;;;;;;KAQD,UAAA,GACN,WACA,cACA,gBACA,WACA,eACA,gBACA,iBACA,WACA,YACA,oBACA,iBACA,YACA,eACA,gBACA,aACA,eACA,oBACA,gBACA,aACA,gBACA,kBACA,eACA,oBACA,YACA,qBACA,qBACA,eACA,wBACA,iBACA,gBACA,iBACA,gBACA,gBACA;UACW,gBAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;KAkCL,mBAAA,YAA+B,oBAAoB;UAE9C,YAAA;uBACM;wBACC;;KAGZ,sBAAA,WAAiC,iBAAiB;;;iBC/R9C,iBAAA,SAA0B,2CAA2C,gBAAgB;;;AFjBpF,iBGYD,wBAAA,CHZ8B,QAAA,EAAA,MAAA,EAAA,EAAA,EGcxC,UHdwC,EAAA,OAAA,CAAA,EGenC,YHfmC,CAAA,EGgB3C,UHhB2C,EAAA;iBGwD9B,aAAA,SAAsB,kBAAkB;;;;;;AHxDxD;;;;ACPA;AAQA;AAMiB,UGLA,WAAA,CHSL;EAGK;EAMA,QAAA,CAAA,EAAS,SAAA,MAKjB,EAAA;EAGQ;EAKA,iBAAc,CAAA,EAAA,OAAA;AAiB/B;AAKiB,iBG5CD,qBAAA,CH4CkB,IAAQ,EG5CE,WH4CF,GAAA,SAAA,CAAA,EAAA,IAAA;;;iBI9D1B,iBAAA;;;iBC6BA,eAAA,QAAuB,gBAAgB;;;iBC1BvC,eAAA,KAAoB;;;cCHvB;cAqBA;iBAuBG,UAAA;;;cC3BH;iBA0FG,6BAAA,mBAAgD;iBA+ChD,SAAA,KAAc,uBAAuB;;;ARvHpC,USCA,kBAAA,SAA2B,cTDE,CAAA;EAK7B,MAAA,CAAA,ESHN,KTGM,CAAA,GAAc,CAAA;EAiBd,KAAA,CAAA,ESnBP,KTmBO,CAAA,CAAA,EAAe,ESnBX,UTmBmB,EAAA,GAAA,IAAQ,CAAA;EAK/B;AAQjB;AAOA;AAIA;EASY,IAAA,CAAA,EAAA,CAAA,CAAA,GAAA,EAAA,MAAc,EAAA,GAAA,MAOf,CAAA,GStD0B,MTsDlB,CAAA,MAAA,EAAA,MAAA,CAAA;AAEnB;AAKiB,iBS1DD,WAAA,CT0DW,KAAA,CAAA,EAAA,MAAA,EAAA,OAAA,CAAA,ES1DkD,kBT0DlD,CAAA,ES1DyE,UT0DzE;AAEjB,iBS4FM,iBAAA,CAAA,CT5FN,ES4FuB,UT5FvB;AACF,iBSqGQ,cAAA,CTrGR,EAAA,ESqG2B,UTrG3B,EAAA,OAAA,EAAA,MAAA,CAAA,EAAA,MAAA"}
package/dist/index.js CHANGED
@@ -789,7 +789,7 @@ function fixLinkToken(tokens) {
789
789
  if (first.type !== "text" && !first.content.endsWith("[")) return fixLinkTokens2(tokens);
790
790
  if (tokens[tokens.length - 4].tag !== "em") return fixLinkTokens2(tokens);
791
791
  const last = tokens[tokens.length - 1];
792
- if (!last.content.startsWith("]")) return fixLinkTokens2(tokens);
792
+ if (last.type === "text" && !last.content.startsWith("]")) return fixLinkTokens2(tokens);
793
793
  const third = tokens[tokens.length - 3];
794
794
  const href = last.content.replace(/^\]\(*/, "");
795
795
  const loading = !last.content.includes(")");
@@ -820,6 +820,7 @@ function fixLinkTokens2(tokens) {
820
820
  if (tokens[length - 7].type !== "em_open") return tokens;
821
821
  const third = tokens[length - 6];
822
822
  const first = tokens[length - 8];
823
+ if (first.type !== "text") return tokens;
823
824
  let href = tokens[length - 2].content;
824
825
  let count = 4;
825
826
  if (length !== tokens.length) {
@@ -848,7 +849,7 @@ function fixLinkTokens2(tokens) {
848
849
  //#region src/parser/inline-parsers/fixListItem.ts
849
850
  function fixListItem(tokens) {
850
851
  const last = tokens[tokens.length - 1];
851
- if (/\d+\.\s*$/.test(last.content || "") && tokens[tokens.length - 2]?.tag === "br") tokens.splice(tokens.length - 1, 1);
852
+ if (last?.type === "text" && /\d+\.\s*$/.test(last.content || "") && tokens[tokens.length - 2]?.tag === "br") tokens.splice(tokens.length - 1, 1);
852
853
  return tokens;
853
854
  }
854
855
 
@@ -1166,14 +1167,29 @@ function parseInlineTokens(tokens, raw, pPreToken) {
1166
1167
  tokens = fixLinkToken(tokens);
1167
1168
  while (i < tokens.length) {
1168
1169
  const token = tokens[i];
1170
+ handleToken(token);
1171
+ }
1172
+ function handleToken(token) {
1169
1173
  switch (token.type) {
1170
1174
  case "text": {
1175
+ let index = result.length - 1;
1171
1176
  let content = token.content.replace(/\\/g, "") || "";
1177
+ for (; index >= 0; index--) {
1178
+ const item = result[index];
1179
+ if (item.type === "text") {
1180
+ currentTextNode = null;
1181
+ content = item.content + content;
1182
+ continue;
1183
+ }
1184
+ break;
1185
+ }
1186
+ if (index < result.length - 1) result.splice(index + 1);
1187
+ const nextToken = tokens[i + 1];
1172
1188
  if (content === "`" || content === "|" || content === "$" || content === "1" || /^\*+$/.test(content) || /^\d$/.test(content)) {
1173
1189
  i++;
1174
1190
  break;
1175
1191
  }
1176
- if (/[^\]]\s*\(\s*$/.test(content)) content = content.replace(/\(\s*$/, "");
1192
+ if (!nextToken && /[^\]]\s*\(\s*$/.test(content)) content = content.replace(/\(\s*$/, "");
1177
1193
  if (raw?.startsWith("[") && pPreToken?.type === "list_item_open") {
1178
1194
  const w = content.slice(1).match(/[^\s\]]/);
1179
1195
  if (w === null) {
@@ -1193,10 +1209,18 @@ function parseInlineTokens(tokens, raw, pPreToken) {
1193
1209
  }
1194
1210
  if (/`[^`]*/.test(content)) {
1195
1211
  currentTextNode = null;
1212
+ const index$1 = content.indexOf("`");
1213
+ const _text = content.slice(0, index$1);
1214
+ const codeContent = content.slice(index$1);
1215
+ if (_text) result.push({
1216
+ type: "text",
1217
+ content: _text || "",
1218
+ raw: _text || ""
1219
+ });
1196
1220
  result.push({
1197
1221
  type: "inline_code",
1198
- code: content.replace(/`/g, ""),
1199
- raw: content || ""
1222
+ code: codeContent.replace(/`/g, ""),
1223
+ raw: codeContent || ""
1200
1224
  });
1201
1225
  i++;
1202
1226
  break;
@@ -1206,8 +1230,8 @@ function parseInlineTokens(tokens, raw, pPreToken) {
1206
1230
  break;
1207
1231
  }
1208
1232
  if (/[^~]*~{2,}[^~]+/.test(content)) {
1209
- const index = content.indexOf("~~") || 0;
1210
- const _text = content.slice(0, index);
1233
+ const index$1 = content.indexOf("~~") || 0;
1234
+ const _text = content.slice(0, index$1);
1211
1235
  if (_text) if (currentTextNode) {
1212
1236
  currentTextNode.content += _text;
1213
1237
  currentTextNode.raw += _text;
@@ -1219,7 +1243,7 @@ function parseInlineTokens(tokens, raw, pPreToken) {
1219
1243
  };
1220
1244
  result.push(currentTextNode);
1221
1245
  }
1222
- const strikethroughContent = content.slice(index);
1246
+ const strikethroughContent = content.slice(index$1);
1223
1247
  currentTextNode = null;
1224
1248
  const { node } = parseStrikethroughToken([
1225
1249
  {
@@ -1252,8 +1276,8 @@ function parseInlineTokens(tokens, raw, pPreToken) {
1252
1276
  break;
1253
1277
  }
1254
1278
  if (/[^*]*\*\*[^*]+/.test(content)) {
1255
- const index = content.indexOf("*") || 0;
1256
- const _text = content.slice(0, index);
1279
+ const index$1 = content.indexOf("*") || 0;
1280
+ const _text = content.slice(0, index$1);
1257
1281
  if (_text) if (currentTextNode) {
1258
1282
  currentTextNode.content += _text;
1259
1283
  currentTextNode.raw += _text;
@@ -1265,7 +1289,7 @@ function parseInlineTokens(tokens, raw, pPreToken) {
1265
1289
  };
1266
1290
  result.push(currentTextNode);
1267
1291
  }
1268
- const strongContent = content.slice(index);
1292
+ const strongContent = content.slice(index$1);
1269
1293
  currentTextNode = null;
1270
1294
  const { node } = parseStrongToken([
1271
1295
  {
@@ -1297,8 +1321,8 @@ function parseInlineTokens(tokens, raw, pPreToken) {
1297
1321
  i++;
1298
1322
  break;
1299
1323
  } else if (/[^*]*\*[^*]+/.test(content)) {
1300
- const index = content.indexOf("*") || 0;
1301
- const _text = content.slice(0, index);
1324
+ const index$1 = content.indexOf("*") || 0;
1325
+ const _text = content.slice(0, index$1);
1302
1326
  if (_text) if (currentTextNode) {
1303
1327
  currentTextNode.content += _text;
1304
1328
  currentTextNode.raw += _text;
@@ -1310,7 +1334,7 @@ function parseInlineTokens(tokens, raw, pPreToken) {
1310
1334
  };
1311
1335
  result.push(currentTextNode);
1312
1336
  }
1313
- const emphasisContent = content.slice(index);
1337
+ const emphasisContent = content.slice(index$1);
1314
1338
  currentTextNode = null;
1315
1339
  const { node } = parseEmphasisToken([
1316
1340
  {
@@ -1367,24 +1391,59 @@ function parseInlineTokens(tokens, raw, pPreToken) {
1367
1391
  const textNodeContent = content.slice(0, linkStart);
1368
1392
  const linkEnd = content.indexOf("](", linkStart);
1369
1393
  if (linkEnd !== -1) {
1394
+ const textToken = tokens[i + 2];
1370
1395
  const text = content.slice(linkStart + 1, linkEnd);
1371
- if (!/[[\]()]/.test(text)) {
1372
- result.push({
1396
+ if (!/[[\]]/.test(text)) {
1397
+ if (content.endsWith("](") && nextToken?.type === "link_open" && textToken) {
1398
+ const last = tokens[i + 4];
1399
+ let index$1 = 4;
1400
+ let loading$1 = true;
1401
+ if (last?.type === "text" && last.content === ")") {
1402
+ index$1++;
1403
+ loading$1 = false;
1404
+ } else if (last?.type === "text" && last.content === ".") i++;
1405
+ result.push({
1406
+ type: "link",
1407
+ href: textToken.content || "",
1408
+ text,
1409
+ children: [{
1410
+ type: "text",
1411
+ content: text,
1412
+ raw: text
1413
+ }],
1414
+ loading: loading$1
1415
+ });
1416
+ i += index$1;
1417
+ break;
1418
+ }
1419
+ const linkContentEnd = content.indexOf(")", linkEnd);
1420
+ const href = linkContentEnd !== -1 ? content.slice(linkEnd + 2, linkContentEnd) : "";
1421
+ const loading = linkContentEnd === -1;
1422
+ if (textNodeContent) result.push({
1373
1423
  type: "text",
1374
1424
  content: textNodeContent,
1375
1425
  raw: textNodeContent
1376
1426
  });
1377
1427
  result.push({
1378
1428
  type: "link",
1379
- href: "",
1429
+ href,
1380
1430
  text,
1381
1431
  children: [{
1382
1432
  type: "text",
1383
1433
  content: text,
1384
1434
  raw: text
1385
1435
  }],
1386
- loading: true
1436
+ loading
1387
1437
  });
1438
+ const afterText = linkContentEnd !== -1 ? content.slice(linkContentEnd + 1) : "";
1439
+ if (afterText) {
1440
+ handleToken({
1441
+ type: "text",
1442
+ content: afterText,
1443
+ raw: afterText
1444
+ });
1445
+ i--;
1446
+ }
1388
1447
  i++;
1389
1448
  break;
1390
1449
  }
@@ -1396,7 +1455,7 @@ function parseInlineTokens(tokens, raw, pPreToken) {
1396
1455
  currentTextNode.raw += textNode.raw;
1397
1456
  } else {
1398
1457
  const maybeMath = preToken?.tag === "br" && tokens[i - 2]?.content === "[";
1399
- textNode.content = textNode.content.replace(/(\*+|\(|\\)$/, "");
1458
+ if (!tokens[i + 1]) textNode.content = textNode.content.replace(/(\*+|\(|\\)$/, "");
1400
1459
  currentTextNode = textNode;
1401
1460
  currentTextNode.center = maybeMath;
1402
1461
  result.push(currentTextNode);
@@ -1419,6 +1478,18 @@ function parseInlineTokens(tokens, raw, pPreToken) {
1419
1478
  case "link_open": {
1420
1479
  currentTextNode = null;
1421
1480
  const href = token.attrs?.find((attr) => attr[0] === "href")?.[1];
1481
+ if (raw && tokens[i + 1].type === "text") {
1482
+ const text = tokens[i + 1]?.content || "";
1483
+ if (!(/* @__PURE__ */ new RegExp(`\\[${text}\\s*\\]`)).test(raw)) {
1484
+ result.push({
1485
+ type: "text",
1486
+ content: text,
1487
+ raw: text
1488
+ });
1489
+ i += 3;
1490
+ break;
1491
+ }
1492
+ }
1422
1493
  if (raw && href) {
1423
1494
  const loadingMath = /* @__PURE__ */ new RegExp(`\\(\\s*${href}\\s*\\)`);
1424
1495
  const pre = result.length > 0 ? result[result.length - 1] : null;
@@ -1568,7 +1639,7 @@ function parseInlineTokens(tokens, raw, pPreToken) {
1568
1639
  break;
1569
1640
  case "reference":
1570
1641
  currentTextNode = null;
1571
- result.push(parseReferenceToken(token));
1642
+ if (!tokens[i + 1]?.content?.startsWith("(")) result.push(parseReferenceToken(token));
1572
1643
  i++;
1573
1644
  break;
1574
1645
  default:
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"file":"index.js","names":["defaultMathOptions: MathOptions | undefined","contentLines: string[]","i","CONTROL_MAP: Record<string, string>","re: RegExp","delimiters: [string, string][]","t","findMatchingClose","content","token: any","i","children: ParsedNode[]","innerTokens: MarkdownToken[]","orig: string[]","updated: string[]","children: ParsedNode[]","innerTokens: MarkdownToken[]","children: ParsedNode[]","innerTokens: MarkdownToken[]","linkTokens: MarkdownToken[]","children: ParsedNode[]","innerTokens: MarkdownToken[]","children: ParsedNode[]","innerTokens: MarkdownToken[]","children: ParsedNode[]","innerTokens: MarkdownToken[]","children: ParsedNode[]","innerTokens: MarkdownToken[]","result: ParsedNode[]","currentTextNode: TextNode | null","pre: any","blockquoteChildren: ParsedNode[]","items: DefinitionItemNode[]","termNodes: ParsedNode[]","definitionNodes: ParsedNode[]","footnoteChildren: ParsedNode[]","headerRow: TableRowNode | null","rows: TableRowNode[]","cells: TableCellNode[]","rowNode: TableRowNode","listItems: ListItemNode[]","itemChildren: ParsedNode[]","nestedItems: ListItemNode[]","admonitionChildren: ParsedNode[]","children: ParsedNode[]","result: ParsedNode[]","defaultTranslations: Record<string, string>","t: (key: string) => string","markdownItEmoji","openLine: number","endLine: number","markup: string"],"sources":["../src/config.ts","../src/plugins/containers.ts","../src/findMatchingClose.ts","../src/plugins/isMathLike.ts","../src/plugins/math.ts","../src/renderers/index.ts","../src/factory.ts","../src/parser/fixTableTokens.ts","../src/parser/inline-parsers/checkbox-parser.ts","../src/parser/inline-parsers/emoji-parser.ts","../src/parser/inline-parsers/emphasis-parser.ts","../src/parser/inline-parsers/fence-parser.ts","../src/parser/inline-parsers/fixLinkToken.ts","../src/parser/inline-parsers/fixListItem.ts","../src/parser/inline-parsers/fixStrongTokens.ts","../src/parser/inline-parsers/footnote-ref-parser.ts","../src/parser/inline-parsers/hardbreak-parser.ts","../src/parser/inline-parsers/highlight-parser.ts","../src/parser/inline-parsers/image-parser.ts","../src/parser/inline-parsers/inline-code-parser.ts","../src/parser/inline-parsers/insert-parser.ts","../src/parser/inline-parsers/link-parser.ts","../src/parser/inline-parsers/math-inline-parser.ts","../src/parser/inline-parsers/reference-parser.ts","../src/parser/inline-parsers/strikethrough-parser.ts","../src/parser/inline-parsers/strong-parser.ts","../src/parser/inline-parsers/subscript-parser.ts","../src/parser/inline-parsers/superscript-parser.ts","../src/parser/inline-parsers/text-parser.ts","../src/parser/inline-parsers/index.ts","../src/parser/node-parsers/blockquote-parser.ts","../src/parser/node-parsers/code-block-parser.ts","../src/parser/node-parsers/definition-list-parser.ts","../src/parser/node-parsers/footnote-parser.ts","../src/parser/node-parsers/heading-parser.ts","../src/parser/node-parsers/math-block-parser.ts","../src/parser/node-parsers/table-parser.ts","../src/parser/node-parsers/thematic-break-parser.ts","../src/parser/node-parsers/list-parser.ts","../src/parser/node-parsers/admonition-parser.ts","../src/parser/node-parsers/container-parser.ts","../src/parser/node-parsers/hardbreak-parser.ts","../src/parser/node-parsers/paragraph-parser.ts","../src/parser/index.ts","../src/index.ts"],"sourcesContent":["/**\n * MathOptions control how the math plugin normalizes content before\n * handing it to KaTeX (or other math renderers).\n *\n * - commands: list of command words that should be auto-prefixed with a\n * backslash if not already escaped (e.g. 'infty' -> '\\\\infty'). Use a\n * conservative list to avoid false positives in prose.\n * - escapeExclamation: whether to escape standalone '!' to '\\\\!' (default true).\n */\nexport interface MathOptions {\n /** List of command words to auto-escape. */\n commands?: readonly string[]\n /** Whether to escape standalone '!' (default: true). */\n escapeExclamation?: boolean\n}\n\nlet defaultMathOptions: MathOptions | undefined\n\nexport function setDefaultMathOptions(opts: MathOptions | undefined) {\n defaultMathOptions = opts\n}\n\nexport function getDefaultMathOptions(): MathOptions | undefined {\n return defaultMathOptions\n}\n","import type MarkdownIt from 'markdown-it'\nimport markdownItContainer from 'markdown-it-container'\n\nexport function applyContainers(md: MarkdownIt) {\n ;[\n 'admonition',\n 'info',\n 'warning',\n 'error',\n 'tip',\n 'danger',\n 'note',\n 'caution',\n ].forEach((name) => {\n md.use(markdownItContainer, name, {\n render(tokens: any, idx: number) {\n const token = tokens[idx]\n if (token.nesting === 1) {\n return `<div class=\"vmr-container vmr-container-${name}\">`\n }\n else {\n return '</div>\\n'\n }\n },\n })\n })\n\n // fallback for simple ::: blocks (kept for backwards compat)\n md.block.ruler.before(\n 'fence',\n 'vmr_container_fallback',\n (state: any, startLine: number, endLine: number, silent: boolean) => {\n const startPos = state.bMarks[startLine] + state.tShift[startLine]\n const lineMax = state.eMarks[startLine]\n const markerMatch = state.src\n .slice(startPos, lineMax)\n .match(/^:::\\s*(\\w+)/)\n if (!markerMatch)\n return false\n if (silent)\n return true\n\n const name = markerMatch[1]\n let nextLine = startLine + 1\n let found = false\n while (nextLine <= endLine) {\n const sPos = state.bMarks[nextLine] + state.tShift[nextLine]\n const ePos = state.eMarks[nextLine]\n if (state.src.slice(sPos, ePos).trim() === ':::') {\n found = true\n break\n }\n nextLine++\n }\n if (!found)\n return false\n\n const tokenOpen = state.push('vmr_container_open', 'div', 1)\n tokenOpen.attrSet('class', `vmr-container vmr-container-${name}`)\n\n const contentLines: string[] = []\n for (let i = startLine + 1; i < nextLine; i++) {\n const sPos = state.bMarks[i] + state.tShift[i]\n const ePos = state.eMarks[i]\n contentLines.push(state.src.slice(sPos, ePos))\n }\n\n // Open a paragraph, push inline content and then close paragraph\n state.push('paragraph_open', 'p', 1)\n const inlineToken = state.push('inline', '', 0)\n inlineToken.content = contentLines.join('\\n')\n inlineToken.map = [startLine + 1, nextLine]\n // Ensure children exist and parse the inline content into them so the renderer\n // won't encounter a null children array (which causes .length read errors).\n inlineToken.children = []\n state.md.inline.parse(inlineToken.content, state.md, state.env, inlineToken.children)\n state.push('paragraph_close', 'p', -1)\n\n state.push('vmr_container_close', 'div', -1)\n\n state.line = nextLine + 1\n return true\n },\n )\n}\n","export function findMatchingClose(src: string, startIdx: number, open: string, close: string) {\n const len = src.length\n // Special-case $$ since it's a two-char delimiter that shouldn't\n // be interpreted as nested parentheses.\n if (open === '$$' && close === '$$') {\n let i = startIdx\n while (i < len - 1) {\n if (src[i] === '$' && src[i + 1] === '$') {\n // ensure not escaped\n let k = i - 1\n let backslashes = 0\n while (k >= 0 && src[k] === '\\\\') {\n backslashes++\n k--\n }\n if (backslashes % 2 === 0)\n return i\n }\n i++\n }\n return -1\n }\n\n const openChar = open[open.length - 1]\n const closeSeq = close\n let depth = 0\n let i = startIdx\n while (i < len) {\n // If there's an unescaped close sequence here\n if (src.slice(i, i + closeSeq.length) === closeSeq) {\n let k = i - 1\n let backslashes = 0\n while (k >= 0 && src[k] === '\\\\') {\n backslashes++\n k--\n }\n if (backslashes % 2 === 0) {\n if (depth === 0)\n return i\n depth--\n i += closeSeq.length\n continue\n }\n }\n\n const ch = src[i]\n // skip escaped characters\n if (ch === '\\\\') {\n i += 2\n continue\n }\n\n if (ch === openChar) {\n depth++\n }\n else if (ch === closeSeq[closeSeq.length - 1]) {\n if (depth > 0)\n depth--\n }\n i++\n }\n return -1\n}\n\nexport default findMatchingClose\n","export const TEX_BRACE_COMMANDS = [\n 'mathbf',\n 'boldsymbol',\n 'mathbb',\n 'mathcal',\n 'mathfrak',\n 'mathrm',\n 'mathit',\n 'mathsf',\n 'vec',\n 'hat',\n 'bar',\n 'tilde',\n 'overline',\n 'underline',\n 'mathscr',\n 'mathnormal',\n 'operatorname',\n 'mathbf*',\n]\n\nexport const ESCAPED_TEX_BRACE_COMMANDS = TEX_BRACE_COMMANDS.map(c => c.replace(/[.*+?^${}()|[\\\\]\"\\]/g, '\\\\$&')).join('|')\n\nconst TEX_CMD_RE = /\\\\[a-z]+/i\nconst PREFIX_CLASS = '(?:\\\\\\\\|\\\\u0008)'\nconst TEX_CMD_WITH_BRACES_RE = new RegExp(`${PREFIX_CLASS}(?:${ESCAPED_TEX_BRACE_COMMANDS})\\\\s*\\\\{[^}]+\\\\}`, 'i')\n// Detect brace-taking TeX commands even when the leading backslash or the\n// closing brace/content is missing (e.g. \"operatorname{\" or \"operatorname{span\").\n// This helps the heuristic treat incomplete but clearly TeX-like fragments\n// as math-like instead of plain text.\nconst TEX_BRACE_CMD_START_RE = new RegExp(`(?:${PREFIX_CLASS})?(?:${ESCAPED_TEX_BRACE_COMMANDS})\\s*\\{`, 'i')\nconst TEX_SPECIFIC_RE = /\\\\(?:text|frac|left|right|times)/\n// Match common math operator symbols or named commands.\n// Avoid treating the C/C++ increment operator (\"++\") as a math operator by\n// ensuring a lone '+' isn't matched when it's part of a '++' sequence.\n// Use a RegExp constructed from a string to avoid issues escaping '/' in a\n// regex literal on some platforms/linters.\n// eslint-disable-next-line prefer-regex-literals\nconst OPS_RE = new RegExp('(?<!\\\\+)\\\\+(?!\\\\+)|[=\\\\-*/^<>]|\\\\\\\\times|\\\\\\\\pm|\\\\\\\\cdot|\\\\\\\\le|\\\\\\\\ge|\\\\\\\\neq')\nconst FUNC_CALL_RE = /[A-Z]+\\s*\\([^)]+\\)/i\nconst WORDS_RE = /\\b(?:sin|cos|tan|log|ln|exp|sqrt|frac|sum|lim|int|prod)\\b/\n// Heuristic to detect common date/time patterns like 2025/9/30 21:37:24 and\n// avoid classifying them as math merely because they contain '/' or ':'\nconst DATE_TIME_RE = /\\b\\d{4}\\/\\d{1,2}\\/\\d{1,2}(?:[ T]\\d{1,2}:\\d{2}(?::\\d{2})?)?\\b/\nexport function isMathLike(s: string) {\n if (!s)\n return false\n\n // Normalize accidental control characters that may appear if a single\n // backslash sequence was interpreted in a JS string literal (for example\n // '\\\\b' becoming a backspace U+0008). Convert such control characters\n // back into their two-character escaped forms so our regexes can match\n // TeX commands reliably.\n // eslint-disable-next-line no-control-regex\n const norm = s.replace(/\\u0008/g, '\\\\b')\n const stripped = norm.trim()\n\n // quick bailouts\n // If the content looks like a timestamp or date, it's not math.\n if (DATE_TIME_RE.test(stripped))\n return false\n if (stripped.length > 2000)\n return true // very long blocks likely math\n\n if (/[./]\\s*\\D|\\D\\s*[./]/.test(s)) {\n return false\n }\n\n // TeX commands e.g. \\frac, \\alpha\n const texCmd = TEX_CMD_RE.test(norm)\n const texCmdWithBraces = TEX_CMD_WITH_BRACES_RE.test(norm)\n const texBraceStart = TEX_BRACE_CMD_START_RE.test(norm)\n\n // Explicit common TeX tokens (keeps compatibility with previous heuristic)\n const texSpecific = TEX_SPECIFIC_RE.test(norm)\n const subscriptPattern = /(?:^|[^\\w\\\\])(?:[A-Z]|\\\\[A-Z]+)_(?:\\{[^}]+\\}|[A-Z0-9\\\\])/i\n const superscriptPattern = /(?:^|[^\\w\\\\])(?:[A-Z]|\\\\[A-Z]+)\\^(?:\\{[^}]+\\}|[A-Z0-9\\\\])/i\n const superSub = subscriptPattern.test(norm) || superscriptPattern.test(norm)\n // common math operator symbols or named commands\n const ops = OPS_RE.test(norm)\n // function-like patterns: f(x), sin(x)\n const funcCall = FUNC_CALL_RE.test(norm)\n // common math words\n const words = WORDS_RE.test(norm)\n // 纯单个英文字命,也渲染成数学公式\n // e.g. (w) (x) (y) (z)\n // const pureWord = /^\\([a-zA-Z]\\)$/i.test(stripped)\n\n return texCmd || texCmdWithBraces || texBraceStart || texSpecific || superSub || ops || funcCall || words\n}\n","import type MarkdownIt from 'markdown-it'\nimport type { MathOptions } from '../config'\n\nimport findMatchingClose from '../findMatchingClose'\nimport { ESCAPED_TEX_BRACE_COMMANDS, isMathLike } from './isMathLike'\n\n// Heuristic to decide whether a piece of text is likely math.\n// Matches common TeX commands, math operators, function-call patterns like f(x),\n// superscripts/subscripts, and common math words.\n// Common TeX formatting commands that take a brace argument, e.g. \\boldsymbol{...}\n// Keep this list in a single constant so it's easy to extend/test.\n\n// Precompute an escaped, |-joined string of TEX brace commands so we don't\n// rebuild it on every call to `isMathLike`.\n\n// Common KaTeX/TeX command names that might lose their leading backslash.\n// Keep this list conservative to avoid false-positives in normal text.\nexport const KATEX_COMMANDS = [\n 'ldots',\n 'cdots',\n 'quad',\n 'in',\n 'infty',\n 'perp',\n 'mid',\n 'operatorname',\n 'to',\n 'rightarrow',\n 'leftarrow',\n 'math',\n 'mathrm',\n 'mathbf',\n 'mathit',\n 'mathbb',\n 'mathcal',\n 'mathfrak',\n 'alpha',\n 'beta',\n 'gamma',\n 'delta',\n 'epsilon',\n 'lambda',\n 'sum',\n 'prod',\n 'int',\n 'sqrt',\n 'fbox',\n 'boxed',\n 'color',\n 'rule',\n 'edef',\n 'fcolorbox',\n 'hline',\n 'hdashline',\n 'cdot',\n 'times',\n 'pm',\n 'le',\n 'ge',\n 'neq',\n 'sin',\n 'cos',\n 'tan',\n 'log',\n 'ln',\n 'exp',\n 'lim',\n 'frac',\n 'text',\n 'left',\n 'right',\n 'times',\n]\n\n// Precompute escaped KATEX commands and default regex used by\n// `normalizeStandaloneBackslashT` when no custom commands are provided.\n// Sort commands by length (desc) before joining so longer commands like\n// 'operatorname' are preferred over shorter substrings like 'to'. This\n// avoids accidental partial matches when building the regex.\nexport const ESCAPED_KATEX_COMMANDS = KATEX_COMMANDS\n .slice()\n .sort((a, b) => b.length - a.length)\n .map(c => c.replace(/[.*+?^${}()|[\\\\]\\\\\\]/g, '\\\\$&'))\n .join('|')\nconst CONTROL_CHARS_CLASS = '[\\t\\r\\b\\f\\v]'\n\n// Hoisted map of control characters -> escaped letter (e.g. '\\t' -> 't').\n// Kept at module scope to avoid recreating on every normalization call.\nconst CONTROL_MAP: Record<string, string> = {\n '\\t': 't',\n '\\r': 'r',\n '\\b': 'b',\n '\\f': 'f',\n '\\v': 'v',\n}\n\nfunction countUnescapedStrong(s: string) {\n const re = /(^|[^\\\\])(__|\\*\\*)/g\n let m: RegExpExecArray | null\n let c = 0\n // eslint-disable-next-line unused-imports/no-unused-vars\n while ((m = re.exec(s)) !== null) {\n c++\n }\n return c\n}\n\nexport function normalizeStandaloneBackslashT(s: string, opts?: MathOptions) {\n const commands = opts?.commands ?? KATEX_COMMANDS\n const escapeExclamation = opts?.escapeExclamation ?? true\n\n const useDefault = opts?.commands == null\n\n // Build or reuse regex: match control chars or unescaped command words.\n let re: RegExp\n if (useDefault) {\n re = new RegExp(`${CONTROL_CHARS_CLASS}|(?<!\\\\\\\\|\\\\w)(${ESCAPED_KATEX_COMMANDS})\\\\b`, 'g')\n }\n else {\n const commandPattern = `(?:${commands.slice().sort((a, b) => b.length - a.length).map(c => c.replace(/[.*+?^${}()|[\\\\]\\\\\"\\]/g, '\\\\$&')).join('|')})`\n re = new RegExp(`${CONTROL_CHARS_CLASS}|(?<!\\\\\\\\|\\\\w)(${commandPattern})\\\\b`, 'g')\n }\n\n let out = s.replace(re, (m: string, cmd?: string) => {\n if (CONTROL_MAP[m] !== undefined)\n return `\\\\${CONTROL_MAP[m]}`\n if (cmd && commands.includes(cmd))\n return `\\\\${cmd}`\n return m\n })\n\n // Escape standalone '!' but don't double-escape already escaped ones.\n if (escapeExclamation)\n out = out.replace(/(^|[^\\\\])!/g, '$1\\\\!')\n\n // Final pass: some TeX command names take a brace argument and may have\n // lost their leading backslash, e.g. \"operatorname{span}\". Ensure we\n // restore a backslash before known brace-taking commands when they are\n // followed by '{' and are not already escaped.\n // Use default escaped list when possible. Include TEX_BRACE_COMMANDS so\n // known brace-taking TeX commands (e.g. `text`, `boldsymbol`) are also\n // restored when their leading backslash was lost.\n const braceEscaped = useDefault\n ? [ESCAPED_TEX_BRACE_COMMANDS, ESCAPED_KATEX_COMMANDS].filter(Boolean).join('|')\n : [commands.map(c => c.replace(/[.*+?^${}()|[\\\\]\\\\\\]/g, '\\\\$&')).join('|'), ESCAPED_TEX_BRACE_COMMANDS].filter(Boolean).join('|')\n let result = out\n if (braceEscaped) {\n const braceCmdRe = new RegExp(`(^|[^\\\\\\\\])(${braceEscaped})\\\\s*\\\\{`, 'g')\n result = result.replace(braceCmdRe, (_m: string, p1: string, p2: string) => `${p1}\\\\${p2}{`)\n }\n result = result.replace(/span\\{([^}]+)\\}/, 'span\\\\{$1\\\\}')\n .replace(/\\\\operatorname\\{span\\}\\{((?:[^{}]|\\{[^}]*\\})+)\\}/, '\\\\operatorname{span}\\\\{$1\\\\}')\n return result\n}\nexport function applyMath(md: MarkdownIt, mathOpts?: MathOptions) {\n // Inline rule for \\(...\\) and $$...$$ and $...$\n const mathInline = (state: any, silent: boolean) => {\n if (/^\\*[^*]+/.test(state.src)) {\n return false\n }\n const delimiters: [string, string][] = [\n ['$$', '$$'],\n ['\\\\(', '\\\\)'],\n ['\\(', '\\)'],\n ]\n\n let searchPos = 0\n let preMathPos = 0\n // use findMatchingClose from util\n for (const [open, close] of delimiters) {\n // We'll scan the entire inline source and tokenize all occurrences\n const src = state.src\n let foundAny = false\n const pushText = (text: string) => {\n // sanitize unexpected values\n if (text === 'undefined' || text == null) {\n text = ''\n }\n if (text === '\\\\') {\n state.pos = state.pos + text.length\n searchPos = state.pos\n return\n }\n if (text === '\\\\)' || text === '\\\\(') {\n const t = state.push('text_special', '', 0)\n t.content = text === '\\\\)' ? ')' : '('\n t.markup = text\n state.pos = state.pos + text.length\n searchPos = state.pos\n return\n }\n\n if (!text)\n return\n // const strongMatch = text.match(/^(\\*+)([^*]+)(\\**)/)\n // if (strongMatch) {\n // const strongToken = state.push('strong_open', '', 0)\n // strongToken.markup = strongMatch[1]\n // const strongTextToken = state.push('text', '', 0)\n // // guard against unexpected undefined values\n // strongTextToken.content = strongMatch[2] == null ? '' : String(strongMatch[2])\n // const strongCloseToken = state.push('strong_close', '', 0)\n // strongCloseToken.markup = strongMatch[1]\n // if (!strongMatch[3])\n // return\n // text = text.slice(strongMatch[0].length)\n // if (text) {\n // const t = state.push('text', '', 0)\n // t.content = text\n // }\n // state.pos = state.src.length\n // searchPos = state.pos\n // return\n // }\n\n const t = state.push('text', '', 0)\n t.content = text\n state.pos = state.pos + text.length\n searchPos = state.pos\n }\n\n while (true) {\n if (searchPos >= src.length)\n break\n const index = src.indexOf(open, searchPos)\n if (index === -1)\n break\n\n // If the delimiter is immediately preceded by a ']' (possibly with\n // intervening spaces), it's likely part of a markdown link like\n // `[text](...)`, so we should not treat this '(' as the start of\n // an inline math span. Also guard the index to avoid OOB access.\n if (index > 0) {\n let i = index - 1\n // skip spaces between ']' and the delimiter\n while (i >= 0 && src[i] === ' ')\n i--\n if (i >= 0 && src[i] === ']')\n return false\n }\n // 有可能遇到 \\((\\operatorname{span}\\\\{\\boldsymbol{\\alpha}\\\\})^\\perp\\)\n // 这种情况,前面的 \\( 是数学公式的开始,后面的 ( 是普通括号\n // endIndex 需要找到与 open 对应的 close\n // 不能简单地用 indexOf 找到第一个 close — 需要处理嵌套与转义字符\n const endIdx = findMatchingClose(src, index + open.length, open, close)\n\n if (endIdx === -1) {\n // no matching close for this opener; skip forward\n const content = src.slice(index + open.length)\n if (isMathLike(content)) {\n searchPos = index + open.length\n foundAny = true\n if (!silent) {\n state.pending = ''\n const toPushBefore = preMathPos ? src.slice(preMathPos, searchPos) : src.slice(0, searchPos)\n const isStrongPrefix = countUnescapedStrong(toPushBefore) % 2 === 1\n\n if (preMathPos)\n pushText(src.slice(preMathPos, searchPos))\n else\n pushText(src.slice(0, searchPos))\n if (isStrongPrefix) {\n const strongToken = state.push('strong_open', '', 0)\n strongToken.markup = src.slice(0, index + 2)\n const token = state.push('math_inline', 'math', 0)\n token.content = normalizeStandaloneBackslashT(content, mathOpts)\n token.markup = open === '$$' ? '$$' : open === '\\\\(' ? '\\\\(\\\\)' : open === '$' ? '$' : '()'\n token.raw = `${open}${content}${close}`\n token.loading = true\n strongToken.content = content\n state.push('strong_close', '', 0)\n }\n else {\n const token = state.push('math_inline', 'math', 0)\n token.content = normalizeStandaloneBackslashT(content, mathOpts)\n token.markup = open === '$$' ? '$$' : open === '\\\\(' ? '\\\\(\\\\)' : open === '$' ? '$' : '()'\n token.raw = `${open}${content}${close}`\n token.loading = true\n }\n // consume the full inline source\n state.pos = src.length\n }\n searchPos = src.length\n preMathPos = searchPos\n }\n break\n }\n const content = src.slice(index + open.length, endIdx)\n if (!isMathLike(content)) {\n // push remaining text after last match\n // not math-like; skip this match and continue scanning\n searchPos = endIdx + close.length\n const text = src.slice(state.pos, searchPos)\n if (!state.pending)\n pushText(text)\n continue\n }\n foundAny = true\n\n if (!silent) {\n // push text before this math\n const before = src.slice(0, index)\n // If we already consumed some content, avoid duplicating the prefix\n // Only push the portion from previous search position\n const prevConsumed = src.slice(0, searchPos)\n // Determine whether there's an unclosed strong opener (**) or (__)\n // before this math delimiter. We only want to treat a prefix as a\n // strong-open when the number of unescaped strong markers in the\n // preceding segment is odd (i.e. there's an unmatched opener). This\n // avoids treating a fully paired `**bold**` as an open prefix.\n\n let toPushBefore = prevConsumed ? src.slice(preMathPos, index) : before\n const isStrongPrefix = countUnescapedStrong(toPushBefore) % 2 === 1\n if (index !== state.pos && isStrongPrefix) {\n toPushBefore = state.pending + src.slice(state.pos, index)\n }\n\n // strong prefix handling (preserve previous behavior)\n if (state.pending !== toPushBefore) {\n state.pending = ''\n if (isStrongPrefix) {\n const _match = toPushBefore.match(/(\\*+)/)\n const after = toPushBefore.slice(_match!.index! + _match![0].length)\n pushText(toPushBefore.slice(0, _match!.index!))\n const strongToken = state.push('strong_open', '', 0)\n strongToken.markup = _match![0]\n const textToken = state.push('text', '', 0)\n textToken.content = after\n state.push('strong_close', '', 0)\n }\n else {\n pushText(toPushBefore)\n }\n }\n if (isStrongPrefix) {\n const strongToken = state.push('strong_open', '', 0)\n strongToken.markup = '**'\n const token = state.push('math_inline', 'math', 0)\n token.content = normalizeStandaloneBackslashT(content, mathOpts)\n token.markup = open === '$$' ? '$$' : open === '\\\\(' ? '\\\\(\\\\)' : open === '$' ? '$' : '()'\n token.raw = `${open}${content}${close}`\n token.loading = false\n const raw = src.slice(endIdx + close.length)\n const isBeforeClose = raw.startsWith('*')\n if (isBeforeClose) {\n state.push('strong_close', '', 0)\n }\n if (raw) {\n const textContentToken = state.push('text', '', 0)\n textContentToken.content = (raw == null ? '' : String(raw)).replace(/^\\*+/, '')\n }\n if (!isBeforeClose)\n state.push('strong_close', '', 0)\n state.pos = src.length\n searchPos = src.length\n preMathPos = searchPos\n continue\n }\n else {\n const token = state.push('math_inline', 'math', 0)\n token.content = normalizeStandaloneBackslashT(content, mathOpts)\n token.markup = open === '$$' ? '$$' : open === '\\\\(' ? '\\\\(\\\\)' : open === '$' ? '$' : '()'\n token.raw = `${open}${content}${close}`\n token.loading = false\n }\n }\n\n searchPos = endIdx + close.length\n preMathPos = searchPos\n state.pos = searchPos\n }\n\n if (foundAny) {\n if (!silent) {\n // push remaining text after last match\n if (searchPos < src.length)\n pushText(src.slice(searchPos))\n // consume the full inline source\n state.pos = src.length\n }\n else {\n // in silent mode, advance position past what we scanned\n state.pos = searchPos\n }\n\n return true\n }\n }\n\n return false\n }\n\n // Block math rule similar to previous implementation\n const mathBlock = (\n state: any,\n startLine: number,\n endLine: number,\n silent: boolean,\n ) => {\n const delimiters: [string, string][] = [\n ['\\\\[', '\\\\]'],\n ['\\[', '\\]'],\n ['$$', '$$'],\n ]\n\n const startPos = state.bMarks[startLine] + state.tShift[startLine]\n const lineText = state.src.slice(startPos, state.eMarks[startLine]).trim()\n let matched = false\n let openDelim = ''\n let closeDelim = ''\n for (const [open, close] of delimiters) {\n if (lineText === open || lineText.startsWith(open)) {\n if (open.includes('[')) {\n if (lineText.replace('\\\\', '') === '[') {\n if (startLine + 1 < endLine) {\n // const nextLineStart\n // = state.bMarks[startLine + 1] + state.tShift[startLine + 1]\n // const nextLineText = state.src.slice(\n // nextLineStart,\n // state.eMarks[startLine + 1],\n // )\n matched = true\n openDelim = open\n closeDelim = close\n break\n }\n continue\n }\n }\n else {\n matched = true\n openDelim = open\n closeDelim = close\n break\n }\n }\n }\n\n if (!matched)\n return false\n if (silent)\n return true\n\n if (\n lineText.includes(closeDelim)\n && lineText.indexOf(closeDelim) > openDelim.length\n ) {\n const startDelimIndex = lineText.indexOf(openDelim)\n const endDelimIndex = lineText.indexOf(\n closeDelim,\n startDelimIndex + openDelim.length,\n )\n const content = lineText.slice(\n startDelimIndex + openDelim.length,\n endDelimIndex,\n )\n\n const token: any = state.push('math_block', 'math', 0)\n token.content = normalizeStandaloneBackslashT(content)\n token.markup\n = openDelim === '$$' ? '$$' : openDelim === '[' ? '[]' : '\\\\[\\\\]'\n token.map = [startLine, startLine + 1]\n token.raw = `${openDelim}${content}${closeDelim}`\n token.block = true\n token.loading = false\n state.line = startLine + 1\n return true\n }\n\n let nextLine = startLine\n let content = ''\n let found = false\n\n const firstLineContent\n = lineText === openDelim ? '' : lineText.slice(openDelim.length)\n\n if (firstLineContent.includes(closeDelim)) {\n const endIndex = firstLineContent.indexOf(closeDelim)\n content = firstLineContent.slice(0, endIndex)\n found = true\n nextLine = startLine\n }\n else {\n if (firstLineContent)\n content = firstLineContent\n\n for (nextLine = startLine + 1; nextLine < endLine; nextLine++) {\n const lineStart = state.bMarks[nextLine] + state.tShift[nextLine]\n const lineEnd = state.eMarks[nextLine]\n const currentLine = state.src.slice(lineStart - 1, lineEnd)\n if (currentLine.trim() === closeDelim) {\n found = true\n break\n }\n else if (currentLine.includes(closeDelim)) {\n found = true\n const endIndex = currentLine.indexOf(closeDelim)\n content += (content ? '\\n' : '') + currentLine.slice(0, endIndex)\n break\n }\n content += (content ? '\\n' : '') + currentLine\n }\n }\n\n const token: any = state.push('math_block', 'math', 0)\n token.content = normalizeStandaloneBackslashT(content)\n token.markup\n = openDelim === '$$' ? '$$' : openDelim === '[' ? '[]' : '\\\\[\\\\]'\n token.raw = `${openDelim}${content}${content.startsWith('\\n') ? '\\n' : ''}${closeDelim}`\n token.map = [startLine, nextLine + 1]\n token.block = true\n token.loading = !found\n state.line = nextLine + 1\n return true\n }\n\n // Register math before the escape rule so inline math is tokenized\n // before markdown-it processes backslash escapes. This preserves\n // backslashes inside math content (e.g. \"\\\\{\") instead of having\n // the escape rule remove them from the token content.\n md.inline.ruler.before('escape', 'math', mathInline)\n md.block.ruler.before('paragraph', 'math_block', mathBlock, {\n alt: ['paragraph', 'reference', 'blockquote', 'list'],\n })\n}\n","import type MarkdownIt from 'markdown-it'\n\nexport function applyRenderRules(md: MarkdownIt) {\n const defaultImage\n = md.renderer.rules.image\n || function (tokens: any, idx: number, options: any, env: any, self: any) {\n return self.renderToken(tokens, idx, options)\n }\n\n md.renderer.rules.image = (\n tokens: any,\n idx: number,\n options: any,\n env: any,\n self: any,\n ) => {\n const token = tokens[idx]\n token.attrSet?.('loading', 'lazy')\n return defaultImage(tokens, idx, options, env, self)\n }\n\n md.renderer.rules.fence\n = md.renderer.rules.fence\n || ((tokens: any, idx: number) => {\n const token = tokens[idx]\n const info = token.info ? token.info.trim() : ''\n const langClass = info\n ? `language-${md.utils.escapeHtml(info.split(/\\s+/g)[0])}`\n : ''\n const code = md.utils.escapeHtml(token.content)\n return `<pre class=\"${langClass}\"><code>${code}</code></pre>`\n })\n}\n","import type { MathOptions } from './config'\nimport MarkdownIt from 'markdown-it'\nimport { getDefaultMathOptions } from './config'\nimport { applyContainers } from './plugins/containers'\nimport { applyMath } from './plugins/math'\nimport { applyRenderRules } from './renderers'\n\nexport interface FactoryOptions extends Record<string, any> {\n markdownItOptions?: Record<string, any>\n enableMath?: boolean\n enableContainers?: boolean\n mathOptions?: { commands?: string[], escapeExclamation?: boolean }\n}\n\nexport function factory(opts: FactoryOptions = {}): MarkdownIt {\n const md = new MarkdownIt({\n html: true,\n linkify: true,\n typographer: true,\n ...(opts.markdownItOptions ?? {}),\n })\n\n if (opts.enableMath ?? true) {\n const mergedMathOptions: MathOptions = { ...(getDefaultMathOptions() ?? {}), ...(opts.mathOptions ?? {}) }\n applyMath(md, mergedMathOptions)\n }\n if (opts.enableContainers ?? true)\n applyContainers(md)\n applyRenderRules(md)\n\n return md\n}\n","import type { MarkdownToken } from '../types'\n\nfunction createStart() {\n return [\n {\n type: 'table_open',\n tag: 'table',\n attrs: null,\n map: null,\n children: null,\n content: '',\n markup: '',\n info: '',\n level: 0,\n loading: true,\n meta: null,\n },\n {\n type: 'thead_open',\n tag: 'thead',\n attrs: null,\n block: true,\n level: 1,\n children: null,\n },\n {\n type: 'tr_open',\n tag: 'tr',\n attrs: null,\n block: true,\n level: 2,\n children: null,\n },\n\n ]\n}\nfunction createEnd() {\n return [\n {\n type: 'tr_close',\n tag: 'tr',\n attrs: null,\n block: true,\n level: 2,\n children: null,\n },\n {\n type: 'thead_close',\n tag: 'thead',\n attrs: null,\n block: true,\n level: 1,\n children: null,\n },\n {\n type: 'table_close',\n tag: 'table',\n attrs: null,\n map: null,\n children: null,\n content: '',\n markup: '',\n info: '',\n level: 0,\n meta: null,\n },\n ]\n}\nfunction createTh(text: string) {\n return [{\n type: 'th_open',\n tag: 'th',\n attrs: null,\n block: true,\n level: 3,\n children: null,\n }, {\n type: 'inline',\n tag: '',\n children: [\n {\n tag: '',\n type: 'text',\n block: false,\n content: text,\n children: null,\n },\n ],\n content: text,\n level: 4,\n attrs: null,\n block: true,\n }, {\n type: 'th_close',\n tag: 'th',\n attrs: null,\n block: true,\n level: 3,\n children: null,\n }]\n}\nexport function fixTableTokens(tokens: MarkdownToken[]): MarkdownToken[] {\n const fixedTokens = [...tokens]\n if (tokens.length < 3)\n return fixedTokens\n const i = tokens.length - 2\n const token = tokens[i]\n\n if (token.type === 'inline') {\n if (/^\\|(?:[^|\\n]+\\|?)+/.test(token.content!)) {\n // 解析 table\n const body = token.children![0].content!.slice(1).split('|').map(i => i.trim()).filter(Boolean).flatMap(i => createTh(i))\n const insert = [\n ...createStart(),\n ...body,\n ...createEnd(),\n ] as any\n fixedTokens.splice(i - 1, 3, ...insert)\n }\n else if (/^\\|(?:[^|\\n]+\\|)+\\n\\|:?-/.test(token.content!)) {\n // 解析 table\n const body = token.children![0].content!.slice(1, -1).split('|').map(i => i.trim()).flatMap(i => createTh(i))\n const insert = [\n ...createStart(),\n ...body,\n ...createEnd(),\n ] as any\n fixedTokens.splice(i - 1, 3, ...insert)\n }\n else if (/^\\|(?:[^|\\n:]+\\|)+\\n\\|:?$/.test(token.content!)) {\n token.content = token.content!.slice(0, -2)\n token.children!.splice(2, 1)\n }\n }\n\n return fixedTokens\n}\n","import type { CheckboxInputNode, CheckboxNode, MarkdownToken } from '../../types'\n\nexport function parseCheckboxToken(token: MarkdownToken): CheckboxNode {\n return {\n type: 'checkbox',\n checked: token.meta?.checked === true,\n raw: token.meta?.checked ? '[x]' : '[ ]',\n }\n}\n\nexport function parseCheckboxInputToken(token: any): CheckboxInputNode {\n return {\n type: 'checkbox_input',\n checked: token.attrGet('checked') === '' || token.attrGet('checked') === 'true',\n raw: token.attrGet('checked') === '' || token.attrGet('checked') === 'true' ? '[x]' : '[ ]',\n }\n}\n","import type { EmojiNode, MarkdownToken } from '../../types'\n\nexport function parseEmojiToken(token: MarkdownToken): EmojiNode {\n return {\n type: 'emoji',\n name: token.content || '',\n markup: token.markup || '',\n raw: `:${token.content || ''}:`,\n }\n}\n","import type { EmphasisNode, MarkdownToken, ParsedNode } from '../../types'\nimport { parseInlineTokens } from '../index'\n\nexport function parseEmphasisToken(\n tokens: MarkdownToken[],\n startIndex: number,\n): {\n node: EmphasisNode\n nextIndex: number\n} {\n const children: ParsedNode[] = []\n let emText = ''\n let i = startIndex + 1\n const innerTokens: MarkdownToken[] = []\n\n // Process tokens between em_open and em_close\n while (i < tokens.length && tokens[i].type !== 'em_close') {\n emText += tokens[i].content || ''\n innerTokens.push(tokens[i])\n i++\n }\n\n // Parse inner tokens to handle nested elements\n children.push(...parseInlineTokens(innerTokens))\n\n const node: EmphasisNode = {\n type: 'emphasis',\n children,\n raw: `*${emText}*`,\n }\n\n // Skip to after em_close\n const nextIndex = i < tokens.length ? i + 1 : tokens.length\n\n return { node, nextIndex }\n}\n","import type { CodeBlockNode, MarkdownToken } from '../../types'\n\nfunction splitUnifiedDiff(content: string) {\n const orig: string[] = []\n const updated: string[] = []\n for (const rawLine of content.split(/\\r?\\n/)) {\n const line = rawLine\n // skip diff metadata lines\n if (/^(?:diff |index |--- |\\+\\+\\+ |@@ )/.test(line))\n continue\n\n if (line.startsWith('- ')) {\n orig.push(` ${line.slice(1)}`)\n }\n else if (line.startsWith('+ ')) {\n updated.push(` ${line.slice(1)}`)\n }\n else {\n // fallback: treat as context (no prefix)\n orig.push(line)\n updated.push(line)\n }\n }\n return {\n original: orig.join('\\n'),\n updated: updated.join('\\n'),\n }\n}\n\nexport function parseFenceToken(token: MarkdownToken): CodeBlockNode {\n const hasMap = Array.isArray(token.map) && token.map.length === 2\n const meta = (token as any).meta\n const closed = typeof meta?.closed === 'boolean' ? meta.closed : undefined\n const diff = token.info?.startsWith('diff') || false\n const language = diff ? token.info?.split(' ')[1] || '' : token.info || ''\n\n // Defensive sanitization: sometimes a closing fence line (e.g. ``` or ``)\n // can accidentally end up inside `token.content` (for example when\n // the parser/mapping is confused). Remove a trailing line that only\n // contains backticks and optional whitespace so we don't render stray\n // ` or `` characters at the end of the code output. This is a\n // conservative cleanup and only strips a final line that looks like a\n // fence marker (starts with optional spaces then one or more ` and\n // only whitespace until end-of-string).\n let content = token.content || ''\n const trailingFenceLine = /\\r?\\n[ \\t]*`+\\s*$/\n if (trailingFenceLine.test(content))\n content = content.replace(trailingFenceLine, '')\n\n if (diff) {\n const { original, updated } = splitUnifiedDiff(content)\n // 返回时保留原来的 code 字段为 updated(编辑后代码),并额外附加原始与更新的文本\n return {\n type: 'code_block',\n language,\n code: updated || '',\n raw: content,\n diff,\n loading: closed === true ? false : closed === false ? true : !hasMap,\n originalCode: original,\n updatedCode: updated,\n }\n }\n\n return {\n type: 'code_block',\n language,\n code: content || '',\n raw: content || '',\n diff,\n loading: closed === true ? false : closed === false ? true : !hasMap,\n }\n}\n","import type { MarkdownToken } from '../../types'\n\nexport function fixLinkToken(tokens: MarkdownToken[]): MarkdownToken[] {\n if (tokens.length < 5)\n return tokens\n const first = tokens[tokens.length - 5]\n if (first.type !== 'text' && !first.content!.endsWith('['))\n return fixLinkTokens2(tokens)\n const second = tokens[tokens.length - 4]\n if (second.tag !== 'em')\n return fixLinkTokens2(tokens)\n const last = tokens[tokens.length - 1]\n if (!last.content!.startsWith(']'))\n return fixLinkTokens2(tokens)\n\n const third = tokens[tokens.length - 3]\n const href = last.content!.replace(/^\\]\\(*/, '')\n const loading = !last.content!.includes(')')\n first.content = first.content!.replace(/\\[$/, '')\n tokens.splice(tokens.length - 3, 1, {\n type: 'link',\n href,\n text: third.content,\n children: [\n {\n type: 'text',\n content: third.content,\n raw: third.content,\n },\n ],\n loading,\n } as any)\n tokens.splice(tokens.length - 1, 1)\n return tokens\n}\n\nexport function fixLinkTokens2(tokens: MarkdownToken[]): MarkdownToken[] {\n if (tokens.length < 8)\n return tokens\n let length = tokens.length\n let last = tokens[length - 1]\n if (last.type !== 'link_close') {\n length--\n last = tokens[length - 1]\n if (last.type !== 'link_close')\n return tokens\n }\n const second = tokens[length - 7]\n if (second.type !== 'em_open')\n return tokens\n const third = tokens[length - 6]\n const first = tokens[length - 8]\n\n let href = tokens[length - 2].content\n let count = 4\n if (length !== tokens.length) {\n // 合并 last 到 href\n href += last.content || ''\n count++\n }\n tokens.splice(length - 4, count)\n const content = third.content\n length -= 4\n first.content = first.content!.replace(/\\[$/, '')\n tokens.splice(length - 2, 1, {\n type: 'link',\n href,\n text: content,\n children: [\n {\n type: 'text',\n content,\n raw: content,\n },\n ],\n loading: true,\n } as any)\n return tokens\n}\n","import type { MarkdownToken } from '../../types'\n\nexport function fixListItem(tokens: MarkdownToken[]): MarkdownToken[] {\n const last = tokens[tokens.length - 1]\n\n if (/\\d+\\.\\s*$/.test(last.content || '') && tokens[tokens.length - 2]?.tag === 'br') {\n tokens.splice(tokens.length - 1, 1)\n }\n return tokens\n}\n","import type { MarkdownToken } from '../../types'\n\nexport function fixStrongTokens(tokens: MarkdownToken[]): MarkdownToken[] {\n const fixedTokens = [...tokens]\n if (tokens.length < 4)\n return fixedTokens\n const i = tokens.length - 4\n const token = tokens[i]\n const nextToken = tokens[i + 1]\n if (token.type === 'text' && token.content?.endsWith('*') && nextToken.type === 'em_open') {\n // 解析有问题,要合并 emphasis 和 前面的 * 为 strong\n const _nextToken = tokens[i + 2]\n const count = _nextToken?.type === 'text' ? 4 : 3\n const insert = [\n {\n type: 'strong_open',\n tag: 'strong',\n attrs: null,\n map: null,\n children: null,\n content: '',\n markup: '**',\n info: '',\n meta: null,\n },\n {\n type: 'text',\n content: _nextToken?.type === 'text' ? _nextToken.content : '',\n },\n {\n type: 'strong_close',\n tag: 'strong',\n attrs: null,\n map: null,\n children: null,\n content: '',\n markup: '**',\n info: '',\n meta: null,\n },\n ] as any\n const beforeText = token.content?.slice(0, -1)\n if (beforeText) {\n insert.unshift({\n type: 'text',\n content: beforeText,\n raw: beforeText,\n })\n }\n fixedTokens.splice(i, count, ...insert)\n return fixedTokens\n }\n\n return fixedTokens\n}\n","import type { FootnoteReferenceNode, MarkdownToken } from '../../types'\n\nexport function parseFootnoteRefToken(\n token: MarkdownToken,\n): FootnoteReferenceNode {\n return {\n type: 'footnote_reference',\n id: token.meta?.label || '',\n raw: `[^${token.meta?.label || ''}]`,\n }\n}\n","import type { HardBreakNode } from '../../types'\n\nexport function parseHardbreakToken(): HardBreakNode {\n return {\n type: 'hardbreak',\n raw: '\\\\\\n',\n }\n}\n","import type { HighlightNode, MarkdownToken, ParsedNode } from '../../types'\nimport { parseInlineTokens } from '../index'\n\nexport function parseHighlightToken(\n tokens: MarkdownToken[],\n startIndex: number,\n): {\n node: HighlightNode\n nextIndex: number\n} {\n const children: ParsedNode[] = []\n let markText = ''\n let i = startIndex + 1\n const innerTokens: MarkdownToken[] = []\n\n // Process tokens between mark_open and mark_close\n while (i < tokens.length && tokens[i].type !== 'mark_close') {\n markText += tokens[i].content || ''\n innerTokens.push(tokens[i])\n i++\n }\n\n // Parse inner tokens to handle nested elements\n children.push(...parseInlineTokens(innerTokens))\n\n const node: HighlightNode = {\n type: 'highlight',\n children,\n raw: `==${markText}==`,\n }\n\n // Skip to after mark_close\n const nextIndex = i < tokens.length ? i + 1 : tokens.length\n\n return { node, nextIndex }\n}\n","import type { ImageNode, MarkdownToken } from '../../types'\n\nexport function parseImageToken(token: MarkdownToken, loading = false): ImageNode {\n return {\n type: 'image',\n src: token.attrs?.find(attr => attr[0] === 'src')?.[1] || '',\n alt: token.attrs?.find(attr => attr[0] === 'alt')?.[1] || '',\n title: token.attrs?.find(attr => attr[0] === 'title')?.[1] || null,\n raw: token.content || '',\n loading,\n }\n}\n","import type { InlineCodeNode, MarkdownToken } from '../../types'\n\nexport function parseInlineCodeToken(token: MarkdownToken): InlineCodeNode {\n return {\n type: 'inline_code',\n code: token.content || '',\n raw: token.content || '',\n }\n}\n","import type { InsertNode, MarkdownToken, ParsedNode } from '../../types'\nimport { parseInlineTokens } from '../index'\n\nexport function parseInsertToken(\n tokens: MarkdownToken[],\n startIndex: number,\n): {\n node: InsertNode\n nextIndex: number\n} {\n const children: ParsedNode[] = []\n let insText = ''\n let i = startIndex + 1\n const innerTokens: MarkdownToken[] = []\n\n // Process tokens between ins_open and ins_close\n while (i < tokens.length && tokens[i].type !== 'ins_close') {\n insText += tokens[i].content || ''\n innerTokens.push(tokens[i])\n i++\n }\n\n // Parse inner tokens to handle nested elements\n children.push(...parseInlineTokens(innerTokens))\n\n const node: InsertNode = {\n type: 'insert',\n children,\n raw: `++${insText}++`,\n }\n\n // Skip to after ins_close\n const nextIndex = i < tokens.length ? i + 1 : tokens.length\n\n return { node, nextIndex }\n}\n","import type { LinkNode, MarkdownToken } from '../../types'\nimport { parseInlineTokens } from '../index'\n\nexport function parseLinkToken(\n tokens: MarkdownToken[],\n startIndex: number,\n): {\n node: LinkNode\n nextIndex: number\n} {\n const openToken = tokens[startIndex]\n const href = openToken.attrs?.find(attr => attr[0] === 'href')?.[1] || ''\n const title\n = openToken.attrs?.find(attr => attr[0] === 'title')?.[1] || null\n\n let i = startIndex + 1\n const linkTokens: MarkdownToken[] = []\n const loading = true\n\n // Collect all tokens between link_open and link_close\n while (i < tokens.length && tokens[i].type !== 'link_close') {\n linkTokens.push(tokens[i])\n i++\n }\n\n // Parse the collected tokens as inline content\n const children = parseInlineTokens(linkTokens)\n const linkText = children\n .map((node) => {\n if ('content' in node)\n return node.content\n return node.raw\n })\n .join('')\n\n const node: LinkNode = {\n type: 'link',\n href,\n title,\n text: linkText,\n children,\n raw: `[${linkText}](${href}${title ? ` \"${title}\"` : ''})`,\n loading,\n }\n\n // Skip to after link_close\n const nextIndex = i < tokens.length ? i + 1 : tokens.length\n\n return { node, nextIndex }\n}\n","import type { MarkdownToken, MathInlineNode } from '../../types'\n\n// Parse a math_inline token (inline math expressions)\nexport function parseMathInlineToken(token: MarkdownToken): MathInlineNode {\n return {\n type: 'math_inline',\n content: token.content || '',\n loading: !!token.loading,\n raw: token.raw!,\n }\n}\n","import type { MarkdownToken, ReferenceNode } from '../../types'\n\n// Parse a reference token from markdown-it\nexport function parseReferenceToken(token: MarkdownToken): ReferenceNode {\n return {\n type: 'reference',\n id: token.content || '',\n raw: token.markup || `[${token.content}]`,\n }\n}\n","import type {\n MarkdownToken,\n ParsedNode,\n StrikethroughNode,\n} from '../../types'\nimport { parseInlineTokens } from '../index'\n\nexport function parseStrikethroughToken(\n tokens: MarkdownToken[],\n startIndex: number,\n): {\n node: StrikethroughNode\n nextIndex: number\n} {\n const children: ParsedNode[] = []\n let sText = ''\n let i = startIndex + 1\n const innerTokens: MarkdownToken[] = []\n\n // Process tokens between s_open and s_close\n while (i < tokens.length && tokens[i].type !== 's_close') {\n sText += tokens[i].content || ''\n innerTokens.push(tokens[i])\n i++\n }\n\n // Parse inner tokens to handle nested elements\n children.push(...parseInlineTokens(innerTokens))\n\n const node: StrikethroughNode = {\n type: 'strikethrough',\n children,\n raw: `~~${sText}~~`,\n }\n\n // Skip to after s_close\n const nextIndex = i < tokens.length ? i + 1 : tokens.length\n\n return { node, nextIndex }\n}\n","import type { MarkdownToken, ParsedNode, StrongNode } from '../../types'\nimport { parseInlineTokens } from '../index'\n\nexport function parseStrongToken(\n tokens: MarkdownToken[],\n startIndex: number,\n raw?: string,\n): {\n node: StrongNode\n nextIndex: number\n} {\n const children: ParsedNode[] = []\n let strongText = ''\n let i = startIndex + 1\n const innerTokens: MarkdownToken[] = []\n\n // Process tokens between strong_open and strong_close\n while (i < tokens.length && tokens[i].type !== 'strong_close') {\n strongText += tokens[i].content || ''\n innerTokens.push(tokens[i])\n i++\n }\n\n // Parse inner tokens to handle nested elements\n children.push(...parseInlineTokens(innerTokens, raw))\n\n const node: StrongNode = {\n type: 'strong',\n children,\n raw: `**${strongText}**`,\n }\n\n // Skip to after strong_close\n const nextIndex = i < tokens.length ? i + 1 : tokens.length\n\n return { node, nextIndex }\n}\n","import type { MarkdownToken, ParsedNode, SubscriptNode } from '../../types'\nimport { parseInlineTokens } from '../index'\n\nexport function parseSubscriptToken(\n tokens: MarkdownToken[],\n startIndex: number,\n): {\n node: SubscriptNode\n nextIndex: number\n} {\n const children: ParsedNode[] = []\n let subText = ''\n let i = startIndex + 1\n const innerTokens: MarkdownToken[] = []\n\n // Process tokens between sub_open and sub_close (if applicable)\n while (i < tokens.length && tokens[i].type !== 'sub_close') {\n subText += tokens[i].content || ''\n innerTokens.push(tokens[i])\n i++\n }\n\n // Parse inner tokens to handle nested elements\n children.push(...parseInlineTokens(innerTokens))\n\n const node: SubscriptNode = {\n type: 'subscript',\n children:\n children.length > 0\n ? children\n : [\n {\n type: 'text',\n // Fallback to the collected inner text (e.g., \"2\" in H~2~O)\n content: subText || tokens[startIndex].content || '',\n raw: subText || tokens[startIndex].content || '',\n },\n ],\n raw: `~${subText || tokens[startIndex].content || ''}~`,\n }\n\n // Skip to after sub_close\n const nextIndex = i < tokens.length ? i + 1 : tokens.length\n\n return { node, nextIndex }\n}\n","import type { MarkdownToken, ParsedNode, SuperscriptNode } from '../../types'\nimport { parseInlineTokens } from '../index'\n\nexport function parseSuperscriptToken(\n tokens: MarkdownToken[],\n startIndex: number,\n): {\n node: SuperscriptNode\n nextIndex: number\n} {\n const children: ParsedNode[] = []\n let supText = ''\n let i = startIndex + 1\n const innerTokens: MarkdownToken[] = []\n\n // Process tokens between sup_open and sup_close (if applicable)\n while (i < tokens.length && tokens[i].type !== 'sup_close') {\n supText += tokens[i].content || ''\n innerTokens.push(tokens[i])\n i++\n }\n\n // Parse inner tokens to handle nested elements\n children.push(...parseInlineTokens(innerTokens))\n\n const node: SuperscriptNode = {\n type: 'superscript',\n children:\n children.length > 0\n ? children\n : [\n {\n type: 'text',\n // Fallback to the collected inner text (e.g., \"2\" in x^2^)\n content: supText || tokens[startIndex].content || '',\n raw: supText || tokens[startIndex].content || '',\n },\n ],\n raw: `^${supText || tokens[startIndex].content || ''}^`,\n }\n\n // Skip to after sup_close\n const nextIndex = i < tokens.length ? i + 1 : tokens.length\n\n return { node, nextIndex }\n}\n","import type { MarkdownToken, TextNode } from '../../types'\n\nexport function parseTextToken(token: MarkdownToken): TextNode {\n return {\n type: 'text',\n content: token.content || '',\n raw: token.content || '',\n }\n}\n","import type { MarkdownToken, ParsedNode, TextNode } from '../../types'\nimport { parseCheckboxInputToken, parseCheckboxToken } from './checkbox-parser'\nimport { parseEmojiToken } from './emoji-parser'\nimport { parseEmphasisToken } from './emphasis-parser'\nimport { parseFenceToken } from './fence-parser'\nimport { fixLinkToken } from './fixLinkToken'\nimport { fixListItem } from './fixListItem'\nimport { fixStrongTokens } from './fixStrongTokens'\nimport { parseFootnoteRefToken } from './footnote-ref-parser'\nimport { parseHardbreakToken } from './hardbreak-parser'\nimport { parseHighlightToken } from './highlight-parser'\nimport { parseImageToken } from './image-parser'\nimport { parseInlineCodeToken } from './inline-code-parser'\nimport { parseInsertToken } from './insert-parser'\nimport { parseLinkToken } from './link-parser'\nimport { parseMathInlineToken } from './math-inline-parser'\nimport { parseReferenceToken } from './reference-parser'\nimport { parseStrikethroughToken } from './strikethrough-parser'\nimport { parseStrongToken } from './strong-parser'\nimport { parseSubscriptToken } from './subscript-parser'\nimport { parseSuperscriptToken } from './superscript-parser'\nimport { parseTextToken } from './text-parser'\n\n// Process inline tokens (for text inside paragraphs, headings, etc.)\nexport function parseInlineTokens(tokens: MarkdownToken[], raw?: string, pPreToken?: MarkdownToken): ParsedNode[] {\n if (!tokens || tokens.length === 0)\n return []\n\n const result: ParsedNode[] = []\n let currentTextNode: TextNode | null = null\n\n let i = 0\n tokens = fixStrongTokens(tokens)\n tokens = fixListItem(tokens)\n tokens = fixLinkToken(tokens)\n\n while (i < tokens.length) {\n const token = tokens[i] as any\n switch (token.type) {\n case 'text': {\n let content = token.content.replace(/\\\\/g, '') || ''\n if (content === '`' || content === '|' || content === '$' || content === '1' || /^\\*+$/.test(content) || /^\\d$/.test(content)) {\n i++\n break\n }\n if (/[^\\]]\\s*\\(\\s*$/.test(content)) {\n content = content.replace(/\\(\\s*$/, '')\n }\n if (raw?.startsWith('[') && pPreToken?.type === 'list_item_open') {\n const _content = content.slice(1)\n const w = _content.match(/[^\\s\\]]/)\n if (w === null) {\n i++\n break\n }\n // 如果 里面不是 w, 应该不处理\n if ((w && /x/i.test(w[0])) || !w) {\n // 转换成 checkbox_input\n const checked = w ? (w[0] === 'x' || w[0] === 'X') : false\n result.push({\n type: 'checkbox_input',\n checked,\n raw: checked ? '[x]' : '[ ]',\n })\n i++\n break\n }\n }\n if (/`[^`]*/.test(content)) {\n // 包含了 `, 需要特殊处理 code\n currentTextNode = null // Reset current text node\n\n result.push({\n type: 'inline_code',\n code: content.replace(/`/g, ''),\n raw: content || '',\n })\n i++\n break\n }\n if (content === '[') {\n i++\n break\n }\n if (/[^~]*~{2,}[^~]+/.test(content)) {\n // 处理成 parseStrikethroughToken\n const index = content.indexOf('~~') || 0\n const _text = content.slice(0, index)\n if (_text) {\n if (currentTextNode) {\n // Merge with the previous text node\n currentTextNode.content += _text\n currentTextNode.raw += _text\n }\n else {\n // Start a new text node\n currentTextNode = {\n type: 'text',\n content: _text || '',\n raw: token.content || '',\n }\n result.push(currentTextNode)\n }\n }\n const strikethroughContent = content.slice(index)\n // 处理成 strikethrough parseStrikethroughToken\n currentTextNode = null // Reset current text node\n // 如果 * 是一个用 parseStrikethroughToken, 否则应该用 parseStrongToken\n // 将 text 包装成 strikethrough token 进行处理\n const { node } = parseStrikethroughToken([\n {\n type: 's_open',\n tag: 's',\n content: '',\n markup: '*',\n info: '',\n meta: null,\n },\n {\n type: 'text',\n tag: '',\n content: strikethroughContent.replace(/~/g, ''),\n markup: '',\n info: '',\n meta: null,\n },\n {\n type: 's_close',\n tag: 's',\n content: '',\n markup: '*',\n info: '',\n meta: null,\n },\n ], 0)\n result.push(node)\n i++\n break\n }\n if (/[^*]*\\*\\*[^*]+/.test(content)) {\n const index = content.indexOf('*') || 0\n const _text = content.slice(0, index)\n if (_text) {\n if (currentTextNode) {\n // Merge with the previous text node\n currentTextNode.content += _text\n currentTextNode.raw += _text\n }\n else {\n // Start a new text node\n currentTextNode = {\n type: 'text',\n content: _text || '',\n raw: token.content || '',\n }\n result.push(currentTextNode)\n }\n }\n const strongContent = content.slice(index)\n // 处理成 em parseEmphasisToken\n currentTextNode = null // Reset current text node\n // 如果 * 是一个用 parseEmphasisToken, 否则应该用 parseStrongToken\n // 将 text 包装成 emphasis token 进行处理\n const { node } = parseStrongToken([\n {\n type: 'strong_open',\n tag: 'strong',\n content: '',\n markup: '*',\n info: '',\n meta: null,\n },\n {\n type: 'text',\n tag: '',\n content: strongContent.replace(/\\*/g, ''),\n markup: '',\n info: '',\n meta: null,\n },\n {\n type: 'strong_close',\n tag: 'strong',\n content: '',\n markup: '*',\n info: '',\n meta: null,\n },\n ], 0, raw)\n result.push(node)\n i++\n break\n }\n else if (/[^*]*\\*[^*]+/.test(content)) {\n const index = content.indexOf('*') || 0\n const _text = content.slice(0, index)\n if (_text) {\n if (currentTextNode) {\n // Merge with the previous text node\n currentTextNode.content += _text\n currentTextNode.raw += _text\n }\n else {\n // Start a new text node\n currentTextNode = {\n type: 'text',\n content: _text || '',\n raw: token.content || '',\n }\n result.push(currentTextNode)\n }\n }\n const emphasisContent = content.slice(index)\n // 处理成 em parseEmphasisToken\n currentTextNode = null // Reset current text node\n // 如果 * 是一个用 parseEmphasisToken, 否则应该用 parseStrongToken\n // 将 text 包装成 emphasis token 进行处理\n const { node } = parseEmphasisToken([\n {\n type: 'em_open',\n tag: 'em',\n content: '',\n markup: '*',\n info: '',\n meta: null,\n },\n {\n type: 'text',\n tag: '',\n content: emphasisContent.replace(/\\*/g, ''),\n markup: '',\n info: '',\n meta: null,\n },\n {\n type: 'em_close',\n tag: 'em',\n content: '',\n markup: '*',\n info: '',\n meta: null,\n },\n ], 0)\n result.push(node)\n i++\n break\n }\n const imageStart = content.indexOf('![')\n if (imageStart !== -1) {\n const textNodeContent = content.slice(0, imageStart)\n if (!currentTextNode) {\n currentTextNode = {\n type: 'text',\n content: textNodeContent,\n raw: textNodeContent,\n }\n }\n else {\n currentTextNode.content += textNodeContent\n }\n result.push(currentTextNode)\n currentTextNode = null // Reset current text node\n result.push(parseImageToken(token, true))\n i++\n break\n }\n const linkStart = content.indexOf('[')\n\n if (content.endsWith('undefined') && !raw?.endsWith('undefined')) {\n content = content.slice(0, -9)\n }\n const textNode = parseTextToken({ ...token, content })\n\n if (linkStart !== -1) {\n const textNodeContent = content.slice(0, linkStart)\n const linkEnd = content.indexOf('](', linkStart)\n if (linkEnd !== -1) {\n const text = content.slice(linkStart + 1, linkEnd)\n // 过滤一些奇怪的情况\n if (!/[[\\]()]/.test(text)) {\n result.push({\n type: 'text',\n content: textNodeContent,\n raw: textNodeContent,\n })\n result.push({\n type: 'link',\n href: '',\n text,\n children: [\n {\n type: 'text',\n content: text,\n raw: text,\n },\n ],\n loading: true,\n } as any)\n i++\n break\n }\n }\n }\n const preToken = tokens[i - 1]\n if (currentTextNode) {\n // Merge with the previous text node\n currentTextNode.content += textNode.content.replace(/(\\*+|\\(|\\\\)$/, '')\n currentTextNode.raw += textNode.raw\n }\n else {\n const maybeMath = preToken?.tag === 'br' && tokens[i - 2]?.content === '['\n // Start a new text node\n textNode.content = textNode.content.replace(/(\\*+|\\(|\\\\)$/, '')\n currentTextNode = textNode\n currentTextNode.center = maybeMath\n result.push(currentTextNode)\n }\n i++\n break\n }\n\n case 'softbreak':\n if (currentTextNode) {\n // Append newline to the current text node\n currentTextNode.content += '\\n'\n currentTextNode.raw += '\\n' // Assuming raw should also reflect the newline\n }\n // Don't create a node for softbreak itself, just modify text\n i++\n break\n\n case 'code_inline':\n currentTextNode = null // Reset current text node\n result.push(parseInlineCodeToken(token))\n i++\n break\n\n case 'link_open': {\n currentTextNode = null // Reset current text node\n const href = token.attrs?.find((attr: any) => attr[0] === 'href')?.[1]\n if (raw && href) {\n const loadingMath = new RegExp(`\\\\(\\\\s*${href}\\\\s*\\\\)`)\n const pre: any = result.length > 0 ? result[result.length - 1] : null\n const loading = !loadingMath.test(raw)\n if (loading && pre) {\n const isLinkMatch = new RegExp(`\\\\[${pre.text}\\\\s*\\\\]\\\\(`)\n if (isLinkMatch.test(raw)) {\n const text = pre?.text || (pre as any)?.content?.slice(1, -1) || ''\n result.splice(result.length - 1, 1, {\n type: 'link',\n href: '',\n text,\n loading,\n } as any) // remove the pre node\n i += 3\n if (tokens[i]?.content === '.')\n i++\n break\n }\n }\n }\n const { node, nextIndex } = parseLinkToken(tokens, i)\n i = nextIndex\n\n node.loading = false\n result.push(node)\n break\n }\n\n case 'image':\n currentTextNode = null // Reset current text node\n result.push(parseImageToken(token))\n i++\n break\n\n case 'strong_open': {\n currentTextNode = null // Reset current text node\n const { node, nextIndex } = parseStrongToken(tokens, i, token.content)\n result.push(node)\n i = nextIndex\n break\n }\n\n case 'em_open': {\n currentTextNode = null // Reset current text node\n const { node, nextIndex } = parseEmphasisToken(tokens, i)\n result.push(node)\n i = nextIndex\n break\n }\n\n case 's_open': {\n currentTextNode = null // Reset current text node\n const { node, nextIndex } = parseStrikethroughToken(tokens, i)\n result.push(node)\n i = nextIndex\n break\n }\n\n case 'mark_open': {\n currentTextNode = null // Reset current text node\n const { node, nextIndex } = parseHighlightToken(tokens, i)\n result.push(node)\n i = nextIndex\n break\n }\n\n case 'ins_open': {\n currentTextNode = null // Reset current text node\n const { node, nextIndex } = parseInsertToken(tokens, i)\n result.push(node)\n i = nextIndex\n break\n }\n\n case 'sub_open': {\n currentTextNode = null // Reset current text node\n const { node, nextIndex } = parseSubscriptToken(tokens, i)\n result.push(node)\n i = nextIndex\n break\n }\n\n case 'sup_open': {\n currentTextNode = null // Reset current text node\n const { node, nextIndex } = parseSuperscriptToken(tokens, i)\n result.push(node)\n i = nextIndex\n break\n }\n\n case 'sub':\n currentTextNode = null // Reset current text node\n result.push({\n type: 'subscript',\n children: [\n {\n type: 'text',\n content: token.content || '',\n raw: token.content || '',\n },\n ],\n raw: `~${token.content || ''}~`,\n })\n i++\n break\n\n case 'sup':\n currentTextNode = null // Reset current text node\n result.push({\n type: 'superscript',\n children: [\n {\n type: 'text',\n content: token.content || '',\n raw: token.content || '',\n },\n ],\n raw: `^${token.content || ''}^`,\n })\n i++\n break\n\n case 'emoji': {\n currentTextNode = null // Reset current text node\n\n const preToken = tokens[i - 1]\n if (preToken?.type === 'text' && /\\|:-+/.test(preToken.content || '')) {\n // 处理表格中的 emoji,跳过\n result.push({\n type: 'text',\n content: '',\n raw: '',\n })\n }\n else {\n result.push(parseEmojiToken(token))\n }\n i++\n break\n }\n case 'checkbox':\n currentTextNode = null // Reset current text node\n result.push(parseCheckboxToken(token))\n i++\n break\n case 'checkbox_input':\n currentTextNode = null // Reset current text node\n result.push(parseCheckboxInputToken(token))\n i++\n break\n case 'footnote_ref':\n currentTextNode = null // Reset current text node\n result.push(parseFootnoteRefToken(token))\n i++\n break\n\n case 'hardbreak':\n currentTextNode = null // Reset current text node\n result.push(parseHardbreakToken())\n i++\n break\n\n case 'fence': {\n currentTextNode = null // Reset current text node\n // Handle fenced code blocks with language specifications\n result.push(parseFenceToken(tokens[i]))\n i++\n break\n }\n\n case 'math_inline': {\n currentTextNode = null // Reset current text node\n result.push(parseMathInlineToken(token))\n i++\n break\n }\n\n case 'reference': {\n currentTextNode = null // Reset current text node\n result.push(parseReferenceToken(token))\n i++\n break\n }\n\n default:\n // Skip unknown token types, ensure text merging stops\n result.push(token)\n currentTextNode = null // Reset current text node\n i++\n break\n }\n }\n\n return result\n}\n","import type { BlockquoteNode, MarkdownToken, ParsedNode } from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\nimport { parseList } from './list-parser'\n\nexport function parseBlockquote(\n tokens: MarkdownToken[],\n index: number,\n): [BlockquoteNode, number] {\n const blockquoteChildren: ParsedNode[] = []\n let j = index + 1\n\n // Process blockquote content until closing tag is found\n while (j < tokens.length && tokens[j].type !== 'blockquote_close') {\n if (tokens[j].type === 'paragraph_open') {\n const contentToken = tokens[j + 1]\n blockquoteChildren.push({\n type: 'paragraph',\n children: parseInlineTokens(contentToken.children || []),\n raw: contentToken.content || '',\n })\n j += 3 // Skip paragraph_open, inline, paragraph_close\n }\n else if (\n tokens[j].type === 'bullet_list_open'\n || tokens[j].type === 'ordered_list_open'\n ) {\n // Handle nested lists - use parseList directly for proper nested list support\n const [listNode, newIndex] = parseList(tokens, j)\n blockquoteChildren.push(listNode)\n j = newIndex\n }\n else {\n j++\n }\n }\n\n const blockquoteNode: BlockquoteNode = {\n type: 'blockquote',\n children: blockquoteChildren,\n raw: blockquoteChildren.map(child => child.raw).join('\\n'),\n }\n\n return [blockquoteNode, j + 1] // Skip blockquote_close\n}\n","import type { CodeBlockNode, MarkdownToken } from '../../types'\nimport { parseFenceToken } from '../inline-parsers/fence-parser'\n\nexport function parseCodeBlock(token: MarkdownToken): CodeBlockNode {\n // If this code block is actually a diff (some markdown-it backends\n // classify fences vs code_block differently), delegate to the\n // fence parser to preserve original/updated fields.\n if (token.info?.startsWith('diff')) {\n return parseFenceToken(token)\n }\n\n const match = token.content!.match(/ type=\"application\\/vnd\\.ant\\.([^\"]+)\"/)\n if (match?.[1]) {\n // 需要把 <antArtifact> 标签去掉\n token.content = token.content!\n .replace(/<antArtifact[^>]*>/g, '')\n .replace(/<\\/antArtifact>/g, '')\n }\n const hasMap = Array.isArray(token.map) && token.map.length === 2\n return {\n type: 'code_block',\n language: match ? match[1] : (token.info || ''),\n code: token.content || '',\n raw: token.content || '',\n loading: !hasMap,\n }\n}\n","import type {\n DefinitionItemNode,\n DefinitionListNode,\n MarkdownToken,\n ParsedNode,\n} from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\n\nexport function parseDefinitionList(\n tokens: MarkdownToken[],\n index: number,\n): [DefinitionListNode, number] {\n const items: DefinitionItemNode[] = []\n let j = index + 1\n let termNodes: ParsedNode[] = []\n let definitionNodes: ParsedNode[] = []\n\n while (j < tokens.length && tokens[j].type !== 'dl_close') {\n if (tokens[j].type === 'dt_open') {\n // Process term\n const termToken = tokens[j + 1]\n termNodes = parseInlineTokens(termToken.children || [])\n j += 3 // Skip dt_open, inline, dt_close\n }\n else if (tokens[j].type === 'dd_open') {\n // Process definition\n let k = j + 1\n definitionNodes = []\n\n while (k < tokens.length && tokens[k].type !== 'dd_close') {\n if (tokens[k].type === 'paragraph_open') {\n const contentToken = tokens[k + 1]\n definitionNodes.push({\n type: 'paragraph',\n children: parseInlineTokens(contentToken.children || [], contentToken.content || ''),\n raw: contentToken.content || '',\n })\n k += 3 // Skip paragraph_open, inline, paragraph_close\n }\n else {\n k++\n }\n }\n\n // Add definition item\n if (termNodes.length > 0) {\n items.push({\n type: 'definition_item',\n term: termNodes,\n definition: definitionNodes,\n raw: `${termNodes.map(term => term.raw).join('')}: ${definitionNodes\n .map(def => def.raw)\n .join('\\n')}`,\n })\n\n // Reset term nodes\n termNodes = []\n }\n\n j = k + 1 // Skip dd_close\n }\n else {\n j++\n }\n }\n\n const definitionListNode: DefinitionListNode = {\n type: 'definition_list',\n items,\n raw: items.map(item => item.raw).join('\\n'),\n }\n\n return [definitionListNode, j + 1] // Skip dl_close\n}\n","import type { FootnoteNode, MarkdownToken, ParsedNode } from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\n\nexport function parseFootnote(\n tokens: MarkdownToken[],\n index: number,\n): [FootnoteNode, number] {\n const token = tokens[index]\n const id = token.meta?.label ?? 0\n const footnoteChildren: ParsedNode[] = []\n let j = index + 1\n\n while (j < tokens.length && tokens[j].type !== 'footnote_close') {\n if (tokens[j].type === 'paragraph_open') {\n const contentToken = tokens[j + 1]\n footnoteChildren.push({\n type: 'paragraph',\n children: parseInlineTokens(contentToken.children || []),\n raw: contentToken.content || '',\n })\n j += 3 // Skip paragraph_open, inline, paragraph_close\n }\n else {\n j++\n }\n }\n\n const footnoteNode: FootnoteNode = {\n type: 'footnote',\n id,\n children: footnoteChildren,\n raw: `[^${id}]: ${footnoteChildren.map(child => child.raw).join('\\n')}`,\n }\n\n return [footnoteNode, j + 1] // Skip footnote_close\n}\n","import type { HeadingNode, MarkdownToken } from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\n\nexport function parseHeading(\n tokens: MarkdownToken[],\n index: number,\n): HeadingNode {\n const token = tokens[index]\n const headingLevel = Number.parseInt(token.tag?.substring(1) || '1')\n const headingContentToken = tokens[index + 1]\n const headingContent = headingContentToken.content || ''\n\n return {\n type: 'heading',\n level: headingLevel,\n text: headingContent,\n children: parseInlineTokens(headingContentToken.children || []),\n raw: headingContent,\n }\n}\n","import type { MarkdownToken, MathBlockNode } from '../../types'\n\n// Parse a math_block token (block/display math expressions)\nexport function parseMathBlock(token: MarkdownToken): MathBlockNode {\n return {\n type: 'math_block',\n content: token.content || '',\n loading: !!token.loading,\n raw: token.raw || '',\n }\n}\n","import type {\n MarkdownToken,\n TableCellNode,\n TableNode,\n TableRowNode,\n} from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\n\nexport function parseTable(\n tokens: MarkdownToken[],\n index: number,\n): [TableNode, number] {\n let j = index + 1\n let headerRow: TableRowNode | null = null\n const rows: TableRowNode[] = []\n let isHeader = false\n\n while (j < tokens.length && tokens[j].type !== 'table_close') {\n if (tokens[j].type === 'thead_open') {\n isHeader = true\n j++\n }\n else if (tokens[j].type === 'thead_close') {\n isHeader = false\n j++\n }\n else if (\n tokens[j].type === 'tbody_open'\n || tokens[j].type === 'tbody_close'\n ) {\n j++\n }\n else if (tokens[j].type === 'tr_open') {\n const cells: TableCellNode[] = []\n let k = j + 1\n\n while (k < tokens.length && tokens[k].type !== 'tr_close') {\n if (tokens[k].type === 'th_open' || tokens[k].type === 'td_open') {\n const isHeaderCell = tokens[k].type === 'th_open'\n const contentToken = tokens[k + 1]\n const content = contentToken.content || ''\n\n cells.push({\n type: 'table_cell',\n header: isHeaderCell || isHeader,\n children: parseInlineTokens(contentToken.children || [], content),\n raw: content,\n })\n\n k += 3 // Skip th_open/td_open, inline, th_close/td_close\n }\n else {\n k++\n }\n }\n\n const rowNode: TableRowNode = {\n type: 'table_row',\n cells,\n raw: cells.map(cell => cell.raw).join('|'),\n }\n\n if (isHeader) {\n headerRow = rowNode\n }\n else {\n rows.push(rowNode)\n }\n\n j = k + 1 // Skip tr_close\n }\n else {\n j++\n }\n }\n\n if (!headerRow) {\n // Default empty header if none found\n headerRow = {\n type: 'table_row',\n cells: [],\n raw: '',\n }\n }\n\n const tableNode: TableNode = {\n type: 'table',\n header: headerRow,\n rows,\n loading: tokens[index].loading || false,\n raw: [headerRow, ...rows].map(row => row.raw).join('\\n'),\n }\n\n return [tableNode, j + 1] // Skip table_close\n}\n","import type { ThematicBreakNode } from '../../types'\n\nexport function parseThematicBreak(): ThematicBreakNode {\n return {\n type: 'thematic_break',\n raw: '---',\n }\n}\n","import type {\n ListItemNode,\n ListNode,\n MarkdownToken,\n ParsedNode,\n} from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\nimport { parseFenceToken } from '../inline-parsers/fence-parser'\nimport { parseAdmonition } from './admonition-parser'\nimport { parseBlockquote } from './blockquote-parser'\nimport { parseCodeBlock } from './code-block-parser'\nimport { parseDefinitionList } from './definition-list-parser'\nimport { parseFootnote } from './footnote-parser'\nimport { parseHeading } from './heading-parser'\nimport { parseMathBlock } from './math-block-parser'\nimport { parseTable } from './table-parser'\nimport { parseThematicBreak } from './thematic-break-parser'\n\nexport function parseList(\n tokens: MarkdownToken[],\n index: number,\n): [ListNode, number] {\n const token = tokens[index]\n const listItems: ListItemNode[] = []\n let j = index + 1\n\n while (\n j < tokens.length\n && tokens[j].type !== 'bullet_list_close'\n && tokens[j].type !== 'ordered_list_close'\n ) {\n if (tokens[j].type === 'list_item_open') {\n if (tokens[j].markup === '*') {\n j++\n continue\n }\n const itemChildren: ParsedNode[] = []\n let k = j + 1\n while (k < tokens.length && tokens[k].type !== 'list_item_close') {\n // Handle different block types inside list items\n if (tokens[k].type === 'paragraph_open') {\n const contentToken = tokens[k + 1]\n const preToken = tokens[k - 1]\n if (/\\n\\d+$/.test(contentToken.content || '')) {\n contentToken.content = contentToken.content?.replace(/\\n\\d+$/, '')\n contentToken.children?.splice(-1, 1)\n }\n itemChildren.push({\n type: 'paragraph',\n children: parseInlineTokens(contentToken.children || [], contentToken.content || '', preToken),\n raw: contentToken.content || '',\n })\n k += 3 // Skip paragraph_open, inline, paragraph_close\n }\n else if (tokens[k].type === 'blockquote_open') {\n // Parse blockquote within list item\n const [blockquoteNode, newIndex] = parseBlockquote(tokens, k)\n itemChildren.push(blockquoteNode)\n k = newIndex\n }\n else if (\n tokens[k].type === 'bullet_list_open'\n || tokens[k].type === 'ordered_list_open'\n ) {\n if (tokens[k].markup === '*') {\n k++\n continue\n }\n // Parse nested list\n const [nestedListNode, newIndex] = parseNestedList(tokens, k)\n itemChildren.push(nestedListNode)\n k = newIndex\n }\n else if (tokens[k].type === 'code_block') {\n // Parse code block\n itemChildren.push(parseCodeBlock(tokens[k]))\n k += 1\n }\n else if (tokens[k].type === 'fence') {\n // Parse fenced code block\n itemChildren.push(parseFenceToken(tokens[k]))\n k += 1\n }\n else if (tokens[k].type === 'math_block') {\n // Parse math block\n itemChildren.push(parseMathBlock(tokens[k]))\n k += 1\n }\n else if (tokens[k].type === 'table_open') {\n // Parse table\n const [tableNode, newIndex] = parseTable(tokens, k)\n itemChildren.push(tableNode)\n k = newIndex\n }\n else if (tokens[k].type === 'dl_open') {\n // Parse definition list\n const [defListNode, newIndex] = parseDefinitionList(tokens, k)\n itemChildren.push(defListNode)\n k = newIndex\n }\n else if (tokens[k].type === 'footnote_open') {\n // Parse footnote\n const [footnoteNode, newIndex] = parseFootnote(tokens, k)\n itemChildren.push(footnoteNode)\n k = newIndex\n }\n else if (tokens[k].type === 'heading_open') {\n // Parse heading (though headings in lists are unusual)\n const headingNode = parseHeading(tokens, k)\n itemChildren.push(headingNode)\n k += 3 // Skip heading_open, inline, heading_close\n }\n else if (tokens[k].type === 'hr') {\n // Parse thematic break\n itemChildren.push(parseThematicBreak())\n k += 1\n }\n else if (tokens[k].type === 'container_open') {\n // Handle admonition containers (warning, info, note, tip, danger, caution)\n const match\n = /^::: ?(warning|info|note|tip|danger|caution) ?(.*)$/.exec(\n tokens[k].info || '',\n )\n if (match) {\n const [admonitionNode, newIndex] = parseAdmonition(tokens, k, match)\n itemChildren.push(admonitionNode)\n k = newIndex\n }\n else {\n k += 1 // Skip unknown container types\n }\n }\n else {\n k += 1\n }\n }\n\n listItems.push({\n type: 'list_item',\n children: itemChildren,\n raw: itemChildren.map(child => child.raw).join(''),\n })\n\n j = k + 1 // Move past list_item_close\n }\n else {\n j += 1\n }\n }\n\n const listNode: ListNode = {\n type: 'list',\n ordered: token.type === 'ordered_list_open',\n // markdown-it may include attrs like [['start','2']] on ordered_list_open\n start: (() => {\n if (token.attrs && token.attrs.length) {\n const found = token.attrs.find(a => a[0] === 'start')\n if (found)\n return Number(found[1]) || 1\n }\n return undefined\n })(),\n items: listItems,\n raw: listItems.map(item => item.raw).join('\\n'),\n }\n\n return [listNode, j + 1] // Move past list_close\n}\n\n// Enhanced function to handle nested lists properly\nfunction parseNestedList(\n tokens: MarkdownToken[],\n index: number,\n): [ListNode, number] {\n // We can directly use parseList since we're in the same file\n // This avoids circular dependency issues\n const nestedToken = tokens[index]\n const nestedItems: ListItemNode[] = []\n let j = index + 1\n\n while (\n j < tokens.length\n && tokens[j].type !== 'bullet_list_close'\n && tokens[j].type !== 'ordered_list_close'\n ) {\n if (tokens[j].type === 'list_item_open') {\n if (tokens[j].markup === '*') {\n j++\n continue\n }\n const itemChildren: ParsedNode[] = []\n let k = j + 1\n\n while (k < tokens.length && tokens[k].type !== 'list_item_close') {\n // Handle different block types inside list items\n if (tokens[k].type === 'paragraph_open') {\n const contentToken = tokens[k + 1]\n const preToken = tokens[k - 1]\n itemChildren.push({\n type: 'paragraph',\n children: parseInlineTokens(contentToken.children || [], contentToken.content || '', preToken),\n raw: contentToken.content || '',\n })\n k += 3 // Skip paragraph_open, inline, paragraph_close\n }\n else if (\n tokens[k].type === 'bullet_list_open'\n || tokens[k].type === 'ordered_list_open'\n ) {\n if (tokens[k].markup === '*') {\n k++\n continue\n }\n\n // Handle deeper nested lists\n const [deeperNestedListNode, newIndex] = parseNestedList(tokens, k)\n itemChildren.push(deeperNestedListNode)\n k = newIndex\n }\n else if (tokens[k].type === 'code_block') {\n itemChildren.push(parseCodeBlock(tokens[k]))\n k += 1\n }\n else if (tokens[k].type === 'fence') {\n itemChildren.push(parseFenceToken(tokens[k]))\n k += 1\n }\n else if (tokens[k].type === 'math_block') {\n // Parse math block in nested lists\n itemChildren.push(parseMathBlock(tokens[k]))\n k += 1\n }\n else {\n // Skip other token types in nested lists for simplicity\n k += 1\n }\n }\n\n nestedItems.push({\n type: 'list_item',\n children: itemChildren,\n raw: itemChildren.map(child => child.raw).join(''),\n })\n\n j = k + 1 // Move past list_item_close\n }\n else {\n j += 1\n }\n }\n\n const nestedListNode: ListNode = {\n type: 'list',\n ordered: nestedToken.type === 'ordered_list_open',\n start: (() => {\n if (nestedToken.attrs && nestedToken.attrs.length) {\n const found = nestedToken.attrs.find(a => a[0] === 'start')\n if (found)\n return Number(found[1]) || 1\n }\n return undefined\n })(),\n items: nestedItems,\n raw: nestedItems.map(item => item.raw).join('\\n'),\n }\n\n return [nestedListNode, j + 1] // Move past list_close\n}\n","import type { AdmonitionNode, MarkdownToken, ParsedNode } from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\nimport { parseList } from './list-parser'\n\nexport function parseAdmonition(\n tokens: MarkdownToken[],\n index: number,\n match: RegExpExecArray,\n): [AdmonitionNode, number] {\n const kind = match[1] || 'note'\n const title = match[2] || kind.charAt(0).toUpperCase() + kind.slice(1)\n const admonitionChildren: ParsedNode[] = []\n let j = index + 1\n\n while (j < tokens.length && tokens[j].type !== 'container_close') {\n if (tokens[j].type === 'paragraph_open') {\n const contentToken = tokens[j + 1]\n if (contentToken) {\n admonitionChildren.push({\n type: 'paragraph',\n children: parseInlineTokens(contentToken.children || []),\n raw: contentToken.content || '',\n })\n }\n j += 3 // Skip paragraph_open, inline, paragraph_close\n }\n else if (\n tokens[j].type === 'bullet_list_open'\n || tokens[j].type === 'ordered_list_open'\n ) {\n // Handle nested lists - use parseList directly for proper nested list support\n const [listNode, newIndex] = parseList(tokens, j)\n admonitionChildren.push(listNode)\n j = newIndex\n }\n else {\n j++\n }\n }\n\n const admonitionNode: AdmonitionNode = {\n type: 'admonition',\n kind,\n title,\n children: admonitionChildren,\n raw: `:::${kind} ${title}\\n${admonitionChildren\n .map(child => child.raw)\n .join('\\n')}\\n:::`,\n }\n\n return [admonitionNode, j + 1] // Skip container_close\n}\n","import type { AdmonitionNode, MarkdownToken, ParsedNode, TextNode } from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\nimport { parseList } from './list-parser'\n\nexport function parseContainer(\n tokens: MarkdownToken[],\n index: number,\n): [AdmonitionNode, number] {\n const openToken = tokens[index]\n\n // Determine kind and optional title\n let kind = 'note'\n let title = ''\n\n const typeMatch = openToken.type.match(/^container_(\\w+)_open$/)\n if (typeMatch) {\n kind = typeMatch[1]\n // some implementations set info to remaining title text\n const info = (openToken.info || '').trim()\n if (info && !info.startsWith(':::')) {\n // if info looks like 'warning title', drop leading kind token\n const maybe = info.replace(new RegExp(`^${kind}`), '').trim()\n if (maybe)\n title = maybe\n }\n }\n else {\n // container_open: info usually contains the marker like ' warning Title'\n const info = (openToken.info || '').trim()\n\n const match\n // eslint-disable-next-line regexp/no-super-linear-backtracking\n = /^:{1,3}\\s*(warning|info|note|tip|danger|caution)\\s*(.*)$/i.exec(info)\n if (match) {\n kind = match[1]\n title = match[2] || ''\n }\n }\n\n if (!title)\n title = kind.charAt(0).toUpperCase() + kind.slice(1)\n\n const children: ParsedNode[] = []\n let j = index + 1\n\n // Accept closing tokens: 'container_close' or 'container_<kind>_close'\n const closeType = new RegExp(`^container_${kind}_close$`)\n\n while (\n j < tokens.length\n && tokens[j].type !== 'container_close'\n && !closeType.test(tokens[j].type)\n ) {\n if (tokens[j].type === 'paragraph_open') {\n const contentToken = tokens[j + 1]\n if (contentToken) {\n const i = (contentToken.children as any).findLastIndex((t: TextNode) => t.type === 'text' && /:+/.test(t.content))\n const _children = i !== -1\n ? contentToken.children?.slice(0, i)\n : contentToken.children\n children.push({\n type: 'paragraph',\n children: parseInlineTokens(_children || []),\n raw: contentToken.content?.replace(/\\n:+$/, '').replace(/\\n\\s*:::\\s*$/, '') || '',\n })\n }\n j += 3\n }\n else if (\n tokens[j].type === 'bullet_list_open'\n || tokens[j].type === 'ordered_list_open'\n ) {\n const [listNode, newIndex] = parseList(tokens, j)\n children.push(listNode)\n j = newIndex\n }\n else {\n j++\n }\n }\n\n const admonitionNode: AdmonitionNode = {\n type: 'admonition',\n kind,\n title,\n children,\n raw: `:::${kind} ${title}\\n${children.map(c => c.raw).join('\\n')}\\n:::`,\n }\n\n // Skip the closing token\n const closingIndex = j\n return [admonitionNode, closingIndex + 1]\n}\n","import type { HardBreakNode } from '../../types'\n\nexport function parseHardBreak(): HardBreakNode {\n return {\n type: 'hardbreak',\n raw: '\\\\\\n',\n }\n}\n","import type { MarkdownToken, ParagraphNode } from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\n\nexport function parseParagraph(\n tokens: MarkdownToken[],\n index: number,\n): ParagraphNode {\n const paragraphContentToken = tokens[index + 1]\n const paragraphContent = paragraphContentToken.content || ''\n\n return {\n type: 'paragraph',\n children: parseInlineTokens(paragraphContentToken.children || [], paragraphContent),\n raw: paragraphContent,\n }\n}\n","import type MarkdownIt from 'markdown-it'\nimport type { MarkdownToken, ParsedNode, ParseOptions } from '../types'\nimport { fixTableTokens } from './fixTableTokens'\nimport { parseInlineTokens } from './inline-parsers'\nimport { parseFenceToken } from './inline-parsers/fence-parser'\nimport { parseAdmonition } from './node-parsers/admonition-parser'\nimport { parseBlockquote } from './node-parsers/blockquote-parser'\nimport { parseCodeBlock } from './node-parsers/code-block-parser'\nimport { parseContainer } from './node-parsers/container-parser'\nimport { parseDefinitionList } from './node-parsers/definition-list-parser'\nimport { parseFootnote } from './node-parsers/footnote-parser'\nimport { parseHardBreak } from './node-parsers/hardbreak-parser'\nimport { parseHeading } from './node-parsers/heading-parser'\nimport { parseList } from './node-parsers/list-parser'\nimport { parseMathBlock } from './node-parsers/math-block-parser'\nimport { parseParagraph } from './node-parsers/paragraph-parser'\nimport { parseTable } from './node-parsers/table-parser'\nimport { parseThematicBreak } from './node-parsers/thematic-break-parser'\n\nexport function parseMarkdownToStructure(\n markdown: string,\n md: MarkdownIt,\n options: ParseOptions = {},\n): ParsedNode[] {\n // Ensure markdown is a string — guard against null/undefined inputs from callers\n let safeMarkdown = (markdown ?? '').toString().replace(/([^\\\\])\\right/g, '$1\\\\right')\n if (safeMarkdown.endsWith('- *')) {\n // 放置markdown 解析 - * 会被处理成多个 ul >li 嵌套列表\n safeMarkdown = safeMarkdown.replace(/- \\*$/, '- \\\\*')\n }\n if (/\\n\\s*-\\s*$/.test(safeMarkdown)) {\n // 此时 markdown 解析会出错要跳过\n safeMarkdown = safeMarkdown.replace(/\\n\\s*-\\s*$/, '\\n')\n }\n else if (/\\n[[(]\\n*$/.test(safeMarkdown)) {\n // 此时 markdown 解析会出错要跳过\n safeMarkdown = safeMarkdown.replace(/(\\n\\[|\\n\\()+\\n*$/g, '\\n')\n }\n // Get tokens from markdown-it\n const tokens = md.parse(safeMarkdown, {}) as MarkdownToken[]\n // Defensive: ensure tokens is an array\n if (!tokens || !Array.isArray(tokens))\n return []\n\n // Allow consumers to transform tokens before processing\n const pre = options.preTransformTokens\n const post = options.postTransformTokens\n\n let transformedTokens = tokens\n if (pre && typeof pre === 'function') {\n transformedTokens = pre(tokens) || tokens\n }\n // Process the tokens into our structured format\n let result = processTokens(transformedTokens)\n\n // Backwards compatible token-level post hook: if provided and returns\n // a modified token array, re-process tokens and override node-level result.\n if (post && typeof post === 'function') {\n result = post(transformedTokens) || transformedTokens\n }\n return result\n}\n\n// Process markdown-it tokens into our structured format\nexport function processTokens(tokens: MarkdownToken[]): ParsedNode[] {\n // Defensive: ensure tokens is an array\n if (!tokens || !Array.isArray(tokens))\n return []\n\n const result: ParsedNode[] = []\n let i = 0\n tokens = fixTableTokens(tokens)\n while (i < tokens.length) {\n const token = tokens[i]\n switch (token.type) {\n case 'container_warning_open':\n case 'container_info_open':\n case 'container_note_open':\n case 'container_tip_open':\n case 'container_danger_open':\n case 'container_caution_open':\n case 'container_error_open': {\n const [warningNode, newIndex] = parseContainer(tokens, i)\n result.push(warningNode)\n i = newIndex\n break\n }\n\n case 'heading_open':\n result.push(parseHeading(tokens, i))\n i += 3 // Skip heading_open, inline, heading_close\n break\n\n case 'paragraph_open':\n result.push(parseParagraph(tokens, i))\n i += 3 // Skip paragraph_open, inline, paragraph_close\n break\n\n case 'html_block':\n case 'code_block':\n result.push(parseCodeBlock(tokens[i]))\n i += 1\n break\n\n case 'fence':\n result.push(parseFenceToken(tokens[i]))\n i += 1\n break\n\n case 'bullet_list_open':\n case 'ordered_list_open': {\n const [listNode, newIndex] = parseList(tokens, i)\n result.push(listNode)\n i = newIndex\n break\n }\n\n case 'hr':\n result.push(parseThematicBreak())\n i += 1\n break\n\n case 'blockquote_open': {\n const [blockquoteNode, newIndex] = parseBlockquote(tokens, i)\n result.push(blockquoteNode)\n i = newIndex\n break\n }\n\n case 'table_open': {\n const [tableNode, newIndex] = parseTable(tokens, i)\n result.push(tableNode)\n i = newIndex\n break\n }\n\n case 'dl_open': {\n const [definitionListNode, newIndex] = parseDefinitionList(tokens, i)\n result.push(definitionListNode)\n i = newIndex\n break\n }\n\n case 'footnote_open': {\n const [footnoteNode, newIndex] = parseFootnote(tokens, i)\n result.push(footnoteNode)\n i = newIndex\n break\n }\n\n case 'container_open': {\n const match\n = /^::: ?(warning|info|note|tip|danger|caution|error) ?(.*)$/.exec(\n token.info || '',\n )\n if (match) {\n const [admonitionNode, newIndex] = parseAdmonition(tokens, i, match)\n result.push(admonitionNode)\n i = newIndex\n }\n else {\n i += 1 // Not a container type we handle, skip\n }\n break\n }\n\n case 'hardbreak':\n result.push(parseHardBreak())\n i++\n break\n\n case 'math_block':\n result.push(parseMathBlock(tokens[i]))\n i += 1\n break\n\n default:\n // Handle other token types or skip them\n i += 1\n break\n }\n }\n\n return result\n}\n\nexport { parseInlineTokens }\n","import type { FactoryOptions } from './factory'\nimport MarkdownIt from 'markdown-it'\nimport { full as markdownItEmoji } from 'markdown-it-emoji'\nimport markdownItFootnote from 'markdown-it-footnote'\nimport markdownItIns from 'markdown-it-ins'\nimport markdownItMark from 'markdown-it-mark'\nimport markdownItSub from 'markdown-it-sub'\nimport markdownItSup from 'markdown-it-sup'\n\nimport * as markdownItCheckbox from 'markdown-it-task-checkbox'\nimport { factory } from './factory'\nimport {\n parseInlineTokens,\n parseMarkdownToStructure,\n processTokens,\n} from './parser'\n\n// Re-export config\nexport { setDefaultMathOptions } from './config'\n\n// Re-export parser functions\nexport { parseInlineTokens, parseMarkdownToStructure, processTokens }\nexport type { MathOptions } from './config'\n\n// Re-export utilities\nexport { findMatchingClose } from './findMatchingClose'\n\nexport { parseFenceToken } from './parser/inline-parsers/fence-parser'\n// Re-export plugins\nexport { applyContainers } from './plugins/containers'\n\nexport { ESCAPED_TEX_BRACE_COMMANDS, isMathLike, TEX_BRACE_COMMANDS } from './plugins/isMathLike'\nexport { applyMath, KATEX_COMMANDS, normalizeStandaloneBackslashT } from './plugins/math'\n// Re-export the node types for backward compatibility\nexport * from './types'\n\nexport interface GetMarkdownOptions extends FactoryOptions {\n plugin?: Array<any>\n apply?: Array<(md: MarkdownIt) => void>\n /**\n * Custom translation function or translation map for UI texts\n * @default { 'common.copy': 'Copy' }\n */\n i18n?: ((key: string) => string) | Record<string, string>\n}\n\nexport function getMarkdown(msgId: string = `editor-${Date.now()}`, options: GetMarkdownOptions = {}) {\n // keep legacy behaviour but delegate to new factory and reapply project-specific rules\n const md = factory(options)\n\n // Setup i18n translator function\n const defaultTranslations: Record<string, string> = {\n 'common.copy': 'Copy',\n }\n\n let t: (key: string) => string\n if (typeof options.i18n === 'function') {\n t = options.i18n\n }\n else if (options.i18n && typeof options.i18n === 'object') {\n const i18nMap = options.i18n as Record<string, string>\n t = (key: string) => i18nMap[key] ?? defaultTranslations[key] ?? key\n }\n else {\n t = (key: string) => defaultTranslations[key] ?? key\n }\n\n // apply user supplied plugins (md.use)\n if (Array.isArray(options.plugin)) {\n for (const p of options.plugin) {\n // allow both [plugin, opts] tuple or plugin function\n if (Array.isArray(p))\n md.use(p[0], p[1])\n else\n md.use(p)\n }\n }\n\n // apply user supplied apply functions to mutate the md instance (e.g. md.block.ruler.before(...))\n if (Array.isArray(options.apply)) {\n for (const fn of options.apply) {\n try {\n fn(md)\n }\n catch (e) {\n // swallow errors to preserve legacy behaviour; developers can see stack in console\n\n console.error('[getMarkdown] apply function threw an error', e)\n }\n }\n }\n\n // Re-apply a few project specific plugins that were previously always enabled\n md.use(markdownItSub)\n md.use(markdownItSup)\n md.use(markdownItMark)\n md.use(markdownItEmoji)\n const markdownItCheckboxPlugin\n = (markdownItCheckbox as any).default ?? markdownItCheckbox\n md.use(markdownItCheckboxPlugin)\n md.use(markdownItIns)\n md.use(markdownItFootnote)\n\n // Annotate fence tokens with unclosed meta using a lightweight line check\n md.core.ruler.after('block', 'mark_fence_closed', (state: any) => {\n const src: string = state.src as string\n const lines = src.split(/\\r?\\n/)\n for (const token of state.tokens) {\n if (token.type !== 'fence' || !token.map || !token.markup)\n continue\n const openLine: number = token.map[0]\n const endLine: number = token.map[1]\n const markup: string = token.markup\n const marker = markup[0]\n const minLen = markup.length\n // The closing line, if exists, should be the last line consumed by the block\n const lineIdx = Math.max(0, endLine - 1)\n const line = lines[lineIdx] ?? ''\n let i = 0\n while (i < line.length && (line[i] === ' ' || line[i] === '\\t')) i++\n let count = 0\n while (i + count < line.length && line[i + count] === marker) count++\n let j = i + count\n while (j < line.length && (line[j] === ' ' || line[j] === '\\t')) j++\n const closed = endLine > openLine + 1 && count >= minLen && j === line.length\n token.meta = token.meta || {}\n token.meta.unclosed = !closed\n // also set a explicit `closed` boolean for compatibility with plugins/tests\n token.meta.closed = !!closed\n }\n })\n\n // wave rule (legacy)\n const waveRule = (state: any, silent: boolean) => {\n const start = state.pos\n if (state.src[start] !== '~')\n return false\n const prevChar = state.src[start - 1]\n const nextChar = state.src[start + 1]\n if (/\\d/.test(prevChar) && /\\d/.test(nextChar)) {\n if (!silent) {\n const token = state.push('text', '', 0)\n token.content = '~'\n }\n state.pos += 1\n return true\n }\n return false\n }\n\n md.inline.ruler.before('sub', 'wave', waveRule)\n\n // custom fence that uses msgId for unique ids\n md.renderer.rules.fence = (tokens: any, idx: number) => {\n const token = tokens[idx]\n const info = token.info ? token.info.trim() : ''\n const str = token.content\n const encodedCode = btoa(unescape(encodeURIComponent(str)))\n const language = info || 'text'\n const uniqueId = `editor-${msgId}-${idx}-${language}`\n\n return `<div class=\"code-block\" data-code=\"${encodedCode}\" data-lang=\"${language}\" id=\"${uniqueId}\">\n <div class=\"code-header\">\n <span class=\"code-lang\">${language.toUpperCase()}</span>\n <button class=\"copy-button\" data-code=\"${encodedCode}\">${t(\n 'common.copy',\n )}</button>\n </div>\n <div class=\"code-editor\"></div>\n </div>`\n }\n\n // reference rule (legacy)\n const referenceInline = (state: any, silent: boolean) => {\n if (state.src[state.pos] !== '[')\n return false\n const match = /^\\[(\\d+)\\]/.exec(state.src.slice(state.pos))\n if (!match)\n return false\n if (!silent) {\n const id = match[1]\n const token = state.push('reference', 'span', 0)\n token.content = id\n token.markup = match[0]\n }\n state.pos += match[0].length\n return true\n }\n\n md.inline.ruler.before('escape', 'reference', referenceInline)\n md.renderer.rules.reference = (tokens: any, idx: number) => {\n const id = tokens[idx].content\n return `<span class=\"reference-link\" data-reference-id=\"${id}\" role=\"button\" tabindex=\"0\" title=\"Click to view reference\">${id}</span>`\n }\n\n return md\n}\n\nexport function getCommonMarkdown() {\n const md = new MarkdownIt({\n html: true,\n linkify: true,\n typographer: true,\n breaks: false,\n })\n return md\n}\n\nexport function renderMarkdown(md: MarkdownIt, content: string) {\n const html = md.render(content)\n return html\n}\n"],"mappings":";;;;;;;;;;;AAgBA,IAAIA;AAEJ,SAAgB,sBAAsB,MAA+B;AACnE,sBAAqB;;AAGvB,SAAgB,wBAAiD;AAC/D,QAAO;;;;;ACpBT,SAAgB,gBAAgB,IAAgB;AAC7C;EACC;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACD,CAAC,SAAS,SAAS;AAClB,KAAG,IAAI,qBAAqB,MAAM,EAChC,OAAO,QAAa,KAAa;AAE/B,OADc,OAAO,KACX,YAAY,EACpB,QAAO,2CAA2C,KAAK;OAGvD,QAAO;KAGZ,CAAC;GACF;AAGF,IAAG,MAAM,MAAM,OACb,SACA,2BACC,OAAY,WAAmB,SAAiB,WAAoB;EACnE,MAAM,WAAW,MAAM,OAAO,aAAa,MAAM,OAAO;EACxD,MAAM,UAAU,MAAM,OAAO;EAC7B,MAAM,cAAc,MAAM,IACvB,MAAM,UAAU,QAAQ,CACxB,MAAM,eAAe;AACxB,MAAI,CAAC,YACH,QAAO;AACT,MAAI,OACF,QAAO;EAET,MAAM,OAAO,YAAY;EACzB,IAAI,WAAW,YAAY;EAC3B,IAAI,QAAQ;AACZ,SAAO,YAAY,SAAS;GAC1B,MAAM,OAAO,MAAM,OAAO,YAAY,MAAM,OAAO;GACnD,MAAM,OAAO,MAAM,OAAO;AAC1B,OAAI,MAAM,IAAI,MAAM,MAAM,KAAK,CAAC,MAAM,KAAK,OAAO;AAChD,YAAQ;AACR;;AAEF;;AAEF,MAAI,CAAC,MACH,QAAO;AAGT,EADkB,MAAM,KAAK,sBAAsB,OAAO,EAAE,CAClD,QAAQ,SAAS,+BAA+B,OAAO;EAEjE,MAAMC,eAAyB,EAAE;AACjC,OAAK,IAAI,IAAI,YAAY,GAAG,IAAI,UAAU,KAAK;GAC7C,MAAM,OAAO,MAAM,OAAO,KAAK,MAAM,OAAO;GAC5C,MAAM,OAAO,MAAM,OAAO;AAC1B,gBAAa,KAAK,MAAM,IAAI,MAAM,MAAM,KAAK,CAAC;;AAIhD,QAAM,KAAK,kBAAkB,KAAK,EAAE;EACpC,MAAM,cAAc,MAAM,KAAK,UAAU,IAAI,EAAE;AAC/C,cAAY,UAAU,aAAa,KAAK,KAAK;AAC7C,cAAY,MAAM,CAAC,YAAY,GAAG,SAAS;AAG3C,cAAY,WAAW,EAAE;AACzB,QAAM,GAAG,OAAO,MAAM,YAAY,SAAS,MAAM,IAAI,MAAM,KAAK,YAAY,SAAS;AACrF,QAAM,KAAK,mBAAmB,KAAK,GAAG;AAEtC,QAAM,KAAK,uBAAuB,OAAO,GAAG;AAE5C,QAAM,OAAO,WAAW;AACxB,SAAO;GAEV;;;;;ACnFH,SAAgB,kBAAkB,KAAa,UAAkB,MAAc,OAAe;CAC5F,MAAM,MAAM,IAAI;AAGhB,KAAI,SAAS,QAAQ,UAAU,MAAM;EACnC,IAAIC,MAAI;AACR,SAAOA,MAAI,MAAM,GAAG;AAClB,OAAI,IAAIA,SAAO,OAAO,IAAIA,MAAI,OAAO,KAAK;IAExC,IAAI,IAAIA,MAAI;IACZ,IAAI,cAAc;AAClB,WAAO,KAAK,KAAK,IAAI,OAAO,MAAM;AAChC;AACA;;AAEF,QAAI,cAAc,MAAM,EACtB,QAAOA;;AAEX;;AAEF,SAAO;;CAGT,MAAM,WAAW,KAAK,KAAK,SAAS;CACpC,MAAM,WAAW;CACjB,IAAI,QAAQ;CACZ,IAAI,IAAI;AACR,QAAO,IAAI,KAAK;AAEd,MAAI,IAAI,MAAM,GAAG,IAAI,SAAS,OAAO,KAAK,UAAU;GAClD,IAAI,IAAI,IAAI;GACZ,IAAI,cAAc;AAClB,UAAO,KAAK,KAAK,IAAI,OAAO,MAAM;AAChC;AACA;;AAEF,OAAI,cAAc,MAAM,GAAG;AACzB,QAAI,UAAU,EACZ,QAAO;AACT;AACA,SAAK,SAAS;AACd;;;EAIJ,MAAM,KAAK,IAAI;AAEf,MAAI,OAAO,MAAM;AACf,QAAK;AACL;;AAGF,MAAI,OAAO,SACT;WAEO,OAAO,SAAS,SAAS,SAAS,IACzC;OAAI,QAAQ,EACV;;AAEJ;;AAEF,QAAO;;AAGT,gCAAe;;;;AChEf,MAAa,qBAAqB;CAChC;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD;AAED,MAAa,6BAA6B,mBAAmB,KAAI,MAAK,EAAE,QAAQ,wBAAwB,OAAO,CAAC,CAAC,KAAK,IAAI;AAE1H,MAAM,aAAa;AACnB,MAAM,eAAe;AACrB,MAAM,yBAAyB,IAAI,OAAO,GAAG,aAAa,KAAK,2BAA2B,mBAAmB,IAAI;AAKjH,MAAM,yBAAyB,IAAI,OAAO,MAAM,aAAa,OAAO,2BAA2B,SAAS,IAAI;AAC5G,MAAM,kBAAkB;AAOxB,MAAM,yBAAS,IAAI,OAAO,iFAAiF;AAC3G,MAAM,eAAe;AACrB,MAAM,WAAW;AAGjB,MAAM,eAAe;AACrB,SAAgB,WAAW,GAAW;AACpC,KAAI,CAAC,EACH,QAAO;CAQT,MAAM,OAAO,EAAE,QAAQ,WAAW,MAAM;CACxC,MAAM,WAAW,KAAK,MAAM;AAI5B,KAAI,aAAa,KAAK,SAAS,CAC7B,QAAO;AACT,KAAI,SAAS,SAAS,IACpB,QAAO;AAET,KAAI,sBAAsB,KAAK,EAAE,CAC/B,QAAO;CAIT,MAAM,SAAS,WAAW,KAAK,KAAK;CACpC,MAAM,mBAAmB,uBAAuB,KAAK,KAAK;CAC1D,MAAM,gBAAgB,uBAAuB,KAAK,KAAK;CAGvD,MAAM,cAAc,gBAAgB,KAAK,KAAK;CAG9C,MAAM,WAFmB,4DAES,KAAK,KAAK,IADjB,6DACwC,KAAK,KAAK;CAE7E,MAAM,MAAM,OAAO,KAAK,KAAK;CAE7B,MAAM,WAAW,aAAa,KAAK,KAAK;CAExC,MAAM,QAAQ,SAAS,KAAK,KAAK;AAKjC,QAAO,UAAU,oBAAoB,iBAAiB,eAAe,YAAY,OAAO,YAAY;;;;;ACvEtG,MAAa,iBAAiB;CAC5B;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD;AAOD,MAAa,yBAAyB,eACnC,OAAO,CACP,MAAM,GAAG,MAAM,EAAE,SAAS,EAAE,OAAO,CACnC,KAAI,MAAK,EAAE,QAAQ,yBAAyB,OAAO,CAAC,CACpD,KAAK,IAAI;AACZ,MAAM,sBAAsB;AAI5B,MAAMC,cAAsC;CAC1C,KAAM;CACN,MAAM;CACN,MAAM;CACN,MAAM;CACN,MAAM;CACP;AAED,SAAS,qBAAqB,GAAW;CACvC,MAAM,KAAK;CAEX,IAAI,IAAI;AAER,QAAY,GAAG,KAAK,EAAE,KAAM,KAC1B;AAEF,QAAO;;AAGT,SAAgB,8BAA8B,GAAW,MAAoB;CAC3E,MAAM,WAAW,MAAM,YAAY;CACnC,MAAM,oBAAoB,MAAM,qBAAqB;CAErD,MAAM,aAAa,MAAM,YAAY;CAGrC,IAAIC;AACJ,KAAI,WACF,MAAK,IAAI,OAAO,GAAG,oBAAoB,iBAAiB,uBAAuB,OAAO,IAAI;MAEvF;EACH,MAAM,iBAAiB,MAAM,SAAS,OAAO,CAAC,MAAM,GAAG,MAAM,EAAE,SAAS,EAAE,OAAO,CAAC,KAAI,MAAK,EAAE,QAAQ,0BAA0B,OAAO,CAAC,CAAC,KAAK,IAAI,CAAC;AAClJ,OAAK,IAAI,OAAO,GAAG,oBAAoB,iBAAiB,eAAe,OAAO,IAAI;;CAGpF,IAAI,MAAM,EAAE,QAAQ,KAAK,GAAW,QAAiB;AACnD,MAAI,YAAY,OAAO,OACrB,QAAO,KAAK,YAAY;AAC1B,MAAI,OAAO,SAAS,SAAS,IAAI,CAC/B,QAAO,KAAK;AACd,SAAO;GACP;AAGF,KAAI,kBACF,OAAM,IAAI,QAAQ,eAAe,QAAQ;CAS3C,MAAM,eAAe,aACjB,CAAC,4BAA4B,uBAAuB,CAAC,OAAO,QAAQ,CAAC,KAAK,IAAI,GAC9E,CAAC,SAAS,KAAI,MAAK,EAAE,QAAQ,yBAAyB,OAAO,CAAC,CAAC,KAAK,IAAI,EAAE,2BAA2B,CAAC,OAAO,QAAQ,CAAC,KAAK,IAAI;CACnI,IAAI,SAAS;AACb,KAAI,cAAc;EAChB,MAAM,aAAa,IAAI,OAAO,eAAe,aAAa,WAAW,IAAI;AACzE,WAAS,OAAO,QAAQ,aAAa,IAAY,IAAY,OAAe,GAAG,GAAG,IAAI,GAAG,GAAG;;AAE9F,UAAS,OAAO,QAAQ,mBAAmB,eAAe,CACvD,QAAQ,oDAAoD,+BAA+B;AAC9F,QAAO;;AAET,SAAgB,UAAU,IAAgB,UAAwB;CAEhE,MAAM,cAAc,OAAY,WAAoB;AAClD,MAAI,WAAW,KAAK,MAAM,IAAI,CAC5B,QAAO;EAET,MAAMC,aAAiC;GACrC,CAAC,MAAM,KAAK;GACZ,CAAC,OAAO,MAAM;GACd,CAAC,KAAM,IAAK;GACb;EAED,IAAI,YAAY;EAChB,IAAI,aAAa;AAEjB,OAAK,MAAM,CAAC,MAAM,UAAU,YAAY;GAEtC,MAAM,MAAM,MAAM;GAClB,IAAI,WAAW;GACf,MAAM,YAAY,SAAiB;AAEjC,QAAI,SAAS,eAAe,QAAQ,KAClC,QAAO;AAET,QAAI,SAAS,MAAM;AACjB,WAAM,MAAM,MAAM,MAAM,KAAK;AAC7B,iBAAY,MAAM;AAClB;;AAEF,QAAI,SAAS,SAAS,SAAS,OAAO;KACpC,MAAMC,MAAI,MAAM,KAAK,gBAAgB,IAAI,EAAE;AAC3C,SAAE,UAAU,SAAS,QAAQ,MAAM;AACnC,SAAE,SAAS;AACX,WAAM,MAAM,MAAM,MAAM,KAAK;AAC7B,iBAAY,MAAM;AAClB;;AAGF,QAAI,CAAC,KACH;IAsBF,MAAM,IAAI,MAAM,KAAK,QAAQ,IAAI,EAAE;AACnC,MAAE,UAAU;AACZ,UAAM,MAAM,MAAM,MAAM,KAAK;AAC7B,gBAAY,MAAM;;AAGpB,UAAO,MAAM;AACX,QAAI,aAAa,IAAI,OACnB;IACF,MAAM,QAAQ,IAAI,QAAQ,MAAM,UAAU;AAC1C,QAAI,UAAU,GACZ;AAMF,QAAI,QAAQ,GAAG;KACb,IAAI,IAAI,QAAQ;AAEhB,YAAO,KAAK,KAAK,IAAI,OAAO,IAC1B;AACF,SAAI,KAAK,KAAK,IAAI,OAAO,IACvB,QAAO;;IAMX,MAAM,SAASC,0BAAkB,KAAK,QAAQ,KAAK,QAAQ,MAAM,MAAM;AAEvE,QAAI,WAAW,IAAI;KAEjB,MAAMC,YAAU,IAAI,MAAM,QAAQ,KAAK,OAAO;AAC9C,SAAI,WAAWA,UAAQ,EAAE;AACvB,kBAAY,QAAQ,KAAK;AACzB,iBAAW;AACX,UAAI,CAAC,QAAQ;AACX,aAAM,UAAU;OAEhB,MAAM,iBAAiB,qBADF,aAAa,IAAI,MAAM,YAAY,UAAU,GAAG,IAAI,MAAM,GAAG,UAAU,CACnC,GAAG,MAAM;AAElE,WAAI,WACF,UAAS,IAAI,MAAM,YAAY,UAAU,CAAC;WAE1C,UAAS,IAAI,MAAM,GAAG,UAAU,CAAC;AACnC,WAAI,gBAAgB;QAClB,MAAM,cAAc,MAAM,KAAK,eAAe,IAAI,EAAE;AACpD,oBAAY,SAAS,IAAI,MAAM,GAAG,QAAQ,EAAE;QAC5C,MAAM,QAAQ,MAAM,KAAK,eAAe,QAAQ,EAAE;AAClD,cAAM,UAAU,8BAA8BA,WAAS,SAAS;AAChE,cAAM,SAAS,SAAS,OAAO,OAAO,SAAS,QAAQ,WAAW,SAAS,MAAM,MAAM;AACvF,cAAM,MAAM,GAAG,OAAOA,YAAU;AAChC,cAAM,UAAU;AAChB,oBAAY,UAAUA;AACtB,cAAM,KAAK,gBAAgB,IAAI,EAAE;cAE9B;QACH,MAAM,QAAQ,MAAM,KAAK,eAAe,QAAQ,EAAE;AAClD,cAAM,UAAU,8BAA8BA,WAAS,SAAS;AAChE,cAAM,SAAS,SAAS,OAAO,OAAO,SAAS,QAAQ,WAAW,SAAS,MAAM,MAAM;AACvF,cAAM,MAAM,GAAG,OAAOA,YAAU;AAChC,cAAM,UAAU;;AAGlB,aAAM,MAAM,IAAI;;AAElB,kBAAY,IAAI;AAChB,mBAAa;;AAEf;;IAEF,MAAM,UAAU,IAAI,MAAM,QAAQ,KAAK,QAAQ,OAAO;AACtD,QAAI,CAAC,WAAW,QAAQ,EAAE;AAGxB,iBAAY,SAAS,MAAM;KAC3B,MAAM,OAAO,IAAI,MAAM,MAAM,KAAK,UAAU;AAC5C,SAAI,CAAC,MAAM,QACT,UAAS,KAAK;AAChB;;AAEF,eAAW;AAEX,QAAI,CAAC,QAAQ;KAEX,MAAM,SAAS,IAAI,MAAM,GAAG,MAAM;KAUlC,IAAI,eAPiB,IAAI,MAAM,GAAG,UAAU,GAOV,IAAI,MAAM,YAAY,MAAM,GAAG;KACjE,MAAM,iBAAiB,qBAAqB,aAAa,GAAG,MAAM;AAClE,SAAI,UAAU,MAAM,OAAO,eACzB,gBAAe,MAAM,UAAU,IAAI,MAAM,MAAM,KAAK,MAAM;AAI5D,SAAI,MAAM,YAAY,cAAc;AAClC,YAAM,UAAU;AAChB,UAAI,gBAAgB;OAClB,MAAM,SAAS,aAAa,MAAM,QAAQ;OAC1C,MAAM,QAAQ,aAAa,MAAM,OAAQ,QAAS,OAAQ,GAAG,OAAO;AACpE,gBAAS,aAAa,MAAM,GAAG,OAAQ,MAAO,CAAC;OAC/C,MAAM,cAAc,MAAM,KAAK,eAAe,IAAI,EAAE;AACpD,mBAAY,SAAS,OAAQ;OAC7B,MAAM,YAAY,MAAM,KAAK,QAAQ,IAAI,EAAE;AAC3C,iBAAU,UAAU;AACpB,aAAM,KAAK,gBAAgB,IAAI,EAAE;YAGjC,UAAS,aAAa;;AAG1B,SAAI,gBAAgB;MAClB,MAAM,cAAc,MAAM,KAAK,eAAe,IAAI,EAAE;AACpD,kBAAY,SAAS;MACrB,MAAM,QAAQ,MAAM,KAAK,eAAe,QAAQ,EAAE;AAClD,YAAM,UAAU,8BAA8B,SAAS,SAAS;AAChE,YAAM,SAAS,SAAS,OAAO,OAAO,SAAS,QAAQ,WAAW,SAAS,MAAM,MAAM;AACvF,YAAM,MAAM,GAAG,OAAO,UAAU;AAChC,YAAM,UAAU;MAChB,MAAM,MAAM,IAAI,MAAM,SAAS,MAAM,OAAO;MAC5C,MAAM,gBAAgB,IAAI,WAAW,IAAI;AACzC,UAAI,cACF,OAAM,KAAK,gBAAgB,IAAI,EAAE;AAEnC,UAAI,KAAK;OACP,MAAM,mBAAmB,MAAM,KAAK,QAAQ,IAAI,EAAE;AAClD,wBAAiB,WAAW,OAAO,OAAO,KAAK,OAAO,IAAI,EAAE,QAAQ,QAAQ,GAAG;;AAEjF,UAAI,CAAC,cACH,OAAM,KAAK,gBAAgB,IAAI,EAAE;AACnC,YAAM,MAAM,IAAI;AAChB,kBAAY,IAAI;AAChB,mBAAa;AACb;YAEG;MACH,MAAM,QAAQ,MAAM,KAAK,eAAe,QAAQ,EAAE;AAClD,YAAM,UAAU,8BAA8B,SAAS,SAAS;AAChE,YAAM,SAAS,SAAS,OAAO,OAAO,SAAS,QAAQ,WAAW,SAAS,MAAM,MAAM;AACvF,YAAM,MAAM,GAAG,OAAO,UAAU;AAChC,YAAM,UAAU;;;AAIpB,gBAAY,SAAS,MAAM;AAC3B,iBAAa;AACb,UAAM,MAAM;;AAGd,OAAI,UAAU;AACZ,QAAI,CAAC,QAAQ;AAEX,SAAI,YAAY,IAAI,OAClB,UAAS,IAAI,MAAM,UAAU,CAAC;AAEhC,WAAM,MAAM,IAAI;UAIhB,OAAM,MAAM;AAGd,WAAO;;;AAIX,SAAO;;CAIT,MAAM,aACJ,OACA,WACA,SACA,WACG;EACH,MAAMH,aAAiC;GACrC,CAAC,OAAO,MAAM;GACd,CAAC,KAAM,IAAK;GACZ,CAAC,MAAM,KAAK;GACb;EAED,MAAM,WAAW,MAAM,OAAO,aAAa,MAAM,OAAO;EACxD,MAAM,WAAW,MAAM,IAAI,MAAM,UAAU,MAAM,OAAO,WAAW,CAAC,MAAM;EAC1E,IAAI,UAAU;EACd,IAAI,YAAY;EAChB,IAAI,aAAa;AACjB,OAAK,MAAM,CAAC,MAAM,UAAU,WAC1B,KAAI,aAAa,QAAQ,SAAS,WAAW,KAAK,CAChD,KAAI,KAAK,SAAS,IAAI,EACpB;OAAI,SAAS,QAAQ,MAAM,GAAG,KAAK,KAAK;AACtC,QAAI,YAAY,IAAI,SAAS;AAO3B,eAAU;AACV,iBAAY;AACZ,kBAAa;AACb;;AAEF;;SAGC;AACH,aAAU;AACV,eAAY;AACZ,gBAAa;AACb;;AAKN,MAAI,CAAC,QACH,QAAO;AACT,MAAI,OACF,QAAO;AAET,MACE,SAAS,SAAS,WAAW,IAC1B,SAAS,QAAQ,WAAW,GAAG,UAAU,QAC5C;GACA,MAAM,kBAAkB,SAAS,QAAQ,UAAU;GACnD,MAAM,gBAAgB,SAAS,QAC7B,YACA,kBAAkB,UAAU,OAC7B;GACD,MAAMG,YAAU,SAAS,MACvB,kBAAkB,UAAU,QAC5B,cACD;GAED,MAAMC,UAAa,MAAM,KAAK,cAAc,QAAQ,EAAE;AACtD,WAAM,UAAU,8BAA8BD,UAAQ;AACtD,WAAM,SACF,cAAc,OAAO,OAAO,cAAc,MAAM,OAAO;AAC3D,WAAM,MAAM,CAAC,WAAW,YAAY,EAAE;AACtC,WAAM,MAAM,GAAG,YAAYA,YAAU;AACrC,WAAM,QAAQ;AACd,WAAM,UAAU;AAChB,SAAM,OAAO,YAAY;AACzB,UAAO;;EAGT,IAAI,WAAW;EACf,IAAI,UAAU;EACd,IAAI,QAAQ;EAEZ,MAAM,mBACF,aAAa,YAAY,KAAK,SAAS,MAAM,UAAU,OAAO;AAElE,MAAI,iBAAiB,SAAS,WAAW,EAAE;GACzC,MAAM,WAAW,iBAAiB,QAAQ,WAAW;AACrD,aAAU,iBAAiB,MAAM,GAAG,SAAS;AAC7C,WAAQ;AACR,cAAW;SAER;AACH,OAAI,iBACF,WAAU;AAEZ,QAAK,WAAW,YAAY,GAAG,WAAW,SAAS,YAAY;IAC7D,MAAM,YAAY,MAAM,OAAO,YAAY,MAAM,OAAO;IACxD,MAAM,UAAU,MAAM,OAAO;IAC7B,MAAM,cAAc,MAAM,IAAI,MAAM,YAAY,GAAG,QAAQ;AAC3D,QAAI,YAAY,MAAM,KAAK,YAAY;AACrC,aAAQ;AACR;eAEO,YAAY,SAAS,WAAW,EAAE;AACzC,aAAQ;KACR,MAAM,WAAW,YAAY,QAAQ,WAAW;AAChD,iBAAY,UAAU,OAAO,MAAM,YAAY,MAAM,GAAG,SAAS;AACjE;;AAEF,gBAAY,UAAU,OAAO,MAAM;;;EAIvC,MAAMC,QAAa,MAAM,KAAK,cAAc,QAAQ,EAAE;AACtD,QAAM,UAAU,8BAA8B,QAAQ;AACtD,QAAM,SACF,cAAc,OAAO,OAAO,cAAc,MAAM,OAAO;AAC3D,QAAM,MAAM,GAAG,YAAY,UAAU,QAAQ,WAAW,KAAK,GAAG,OAAO,KAAK;AAC5E,QAAM,MAAM,CAAC,WAAW,WAAW,EAAE;AACrC,QAAM,QAAQ;AACd,QAAM,UAAU,CAAC;AACjB,QAAM,OAAO,WAAW;AACxB,SAAO;;AAOT,IAAG,OAAO,MAAM,OAAO,UAAU,QAAQ,WAAW;AACpD,IAAG,MAAM,MAAM,OAAO,aAAa,cAAc,WAAW,EAC1D,KAAK;EAAC;EAAa;EAAa;EAAc;EAAO,EACtD,CAAC;;;;;ACzgBJ,SAAgB,iBAAiB,IAAgB;CAC/C,MAAM,eACF,GAAG,SAAS,MAAM,SACf,SAAU,QAAa,KAAa,SAAc,KAAU,MAAW;AACxE,SAAO,KAAK,YAAY,QAAQ,KAAK,QAAQ;;AAGnD,IAAG,SAAS,MAAM,SAChB,QACA,KACA,SACA,KACA,SACG;AAEH,EADc,OAAO,KACf,UAAU,WAAW,OAAO;AAClC,SAAO,aAAa,QAAQ,KAAK,SAAS,KAAK,KAAK;;AAGtD,IAAG,SAAS,MAAM,QACd,GAAG,SAAS,MAAM,WACb,QAAa,QAAgB;EAChC,MAAM,QAAQ,OAAO;EACrB,MAAM,OAAO,MAAM,OAAO,MAAM,KAAK,MAAM,GAAG;AAK9C,SAAO,eAJW,OACd,YAAY,GAAG,MAAM,WAAW,KAAK,MAAM,OAAO,CAAC,GAAG,KACtD,GAE4B,UADnB,GAAG,MAAM,WAAW,MAAM,QAAQ,CACA;;;;;;AChBvD,SAAgB,QAAQ,OAAuB,EAAE,EAAc;CAC7D,MAAM,KAAK,IAAI,WAAW;EACxB,MAAM;EACN,SAAS;EACT,aAAa;EACb,GAAI,KAAK,qBAAqB,EAAE;EACjC,CAAC;AAEF,KAAI,KAAK,cAAc,KAErB,WAAU,IAD6B;EAAE,GAAI,uBAAuB,IAAI,EAAE;EAAG,GAAI,KAAK,eAAe,EAAE;EAAG,CAC1E;AAElC,KAAI,KAAK,oBAAoB,KAC3B,iBAAgB,GAAG;AACrB,kBAAiB,GAAG;AAEpB,QAAO;;;;;AC5BT,SAAS,cAAc;AACrB,QAAO;EACL;GACE,MAAM;GACN,KAAK;GACL,OAAO;GACP,KAAK;GACL,UAAU;GACV,SAAS;GACT,QAAQ;GACR,MAAM;GACN,OAAO;GACP,SAAS;GACT,MAAM;GACP;EACD;GACE,MAAM;GACN,KAAK;GACL,OAAO;GACP,OAAO;GACP,OAAO;GACP,UAAU;GACX;EACD;GACE,MAAM;GACN,KAAK;GACL,OAAO;GACP,OAAO;GACP,OAAO;GACP,UAAU;GACX;EAEF;;AAEH,SAAS,YAAY;AACnB,QAAO;EACL;GACE,MAAM;GACN,KAAK;GACL,OAAO;GACP,OAAO;GACP,OAAO;GACP,UAAU;GACX;EACD;GACE,MAAM;GACN,KAAK;GACL,OAAO;GACP,OAAO;GACP,OAAO;GACP,UAAU;GACX;EACD;GACE,MAAM;GACN,KAAK;GACL,OAAO;GACP,KAAK;GACL,UAAU;GACV,SAAS;GACT,QAAQ;GACR,MAAM;GACN,OAAO;GACP,MAAM;GACP;EACF;;AAEH,SAAS,SAAS,MAAc;AAC9B,QAAO;EAAC;GACN,MAAM;GACN,KAAK;GACL,OAAO;GACP,OAAO;GACP,OAAO;GACP,UAAU;GACX;EAAE;GACD,MAAM;GACN,KAAK;GACL,UAAU,CACR;IACE,KAAK;IACL,MAAM;IACN,OAAO;IACP,SAAS;IACT,UAAU;IACX,CACF;GACD,SAAS;GACT,OAAO;GACP,OAAO;GACP,OAAO;GACR;EAAE;GACD,MAAM;GACN,KAAK;GACL,OAAO;GACP,OAAO;GACP,OAAO;GACP,UAAU;GACX;EAAC;;AAEJ,SAAgB,eAAe,QAA0C;CACvE,MAAM,cAAc,CAAC,GAAG,OAAO;AAC/B,KAAI,OAAO,SAAS,EAClB,QAAO;CACT,MAAM,IAAI,OAAO,SAAS;CAC1B,MAAM,QAAQ,OAAO;AAErB,KAAI,MAAM,SAAS,UACjB;MAAI,qBAAqB,KAAK,MAAM,QAAS,EAAE;GAE7C,MAAM,OAAO,MAAM,SAAU,GAAG,QAAS,MAAM,EAAE,CAAC,MAAM,IAAI,CAAC,KAAI,QAAKC,IAAE,MAAM,CAAC,CAAC,OAAO,QAAQ,CAAC,SAAQ,QAAK,SAASA,IAAE,CAAC;GACzH,MAAM,SAAS;IACb,GAAG,aAAa;IAChB,GAAG;IACH,GAAG,WAAW;IACf;AACD,eAAY,OAAO,IAAI,GAAG,GAAG,GAAG,OAAO;aAEhC,2BAA2B,KAAK,MAAM,QAAS,EAAE;GAExD,MAAM,OAAO,MAAM,SAAU,GAAG,QAAS,MAAM,GAAG,GAAG,CAAC,MAAM,IAAI,CAAC,KAAI,QAAKA,IAAE,MAAM,CAAC,CAAC,SAAQ,QAAK,SAASA,IAAE,CAAC;GAC7G,MAAM,SAAS;IACb,GAAG,aAAa;IAChB,GAAG;IACH,GAAG,WAAW;IACf;AACD,eAAY,OAAO,IAAI,GAAG,GAAG,GAAG,OAAO;aAEhC,4BAA4B,KAAK,MAAM,QAAS,EAAE;AACzD,SAAM,UAAU,MAAM,QAAS,MAAM,GAAG,GAAG;AAC3C,SAAM,SAAU,OAAO,GAAG,EAAE;;;AAIhC,QAAO;;;;;ACrIT,SAAgB,mBAAmB,OAAoC;AACrE,QAAO;EACL,MAAM;EACN,SAAS,MAAM,MAAM,YAAY;EACjC,KAAK,MAAM,MAAM,UAAU,QAAQ;EACpC;;AAGH,SAAgB,wBAAwB,OAA+B;AACrE,QAAO;EACL,MAAM;EACN,SAAS,MAAM,QAAQ,UAAU,KAAK,MAAM,MAAM,QAAQ,UAAU,KAAK;EACzE,KAAK,MAAM,QAAQ,UAAU,KAAK,MAAM,MAAM,QAAQ,UAAU,KAAK,SAAS,QAAQ;EACvF;;;;;ACbH,SAAgB,gBAAgB,OAAiC;AAC/D,QAAO;EACL,MAAM;EACN,MAAM,MAAM,WAAW;EACvB,QAAQ,MAAM,UAAU;EACxB,KAAK,IAAI,MAAM,WAAW,GAAG;EAC9B;;;;;ACLH,SAAgB,mBACd,QACA,YAIA;CACA,MAAMC,WAAyB,EAAE;CACjC,IAAI,SAAS;CACb,IAAI,IAAI,aAAa;CACrB,MAAMC,cAA+B,EAAE;AAGvC,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,YAAY;AACzD,YAAU,OAAO,GAAG,WAAW;AAC/B,cAAY,KAAK,OAAO,GAAG;AAC3B;;AAIF,UAAS,KAAK,GAAG,kBAAkB,YAAY,CAAC;AAWhD,QAAO;EAAE,MATkB;GACzB,MAAM;GACN;GACA,KAAK,IAAI,OAAO;GACjB;EAKc,WAFG,IAAI,OAAO,SAAS,IAAI,IAAI,OAAO;EAE3B;;;;;AChC5B,SAAS,iBAAiB,SAAiB;CACzC,MAAMC,OAAiB,EAAE;CACzB,MAAMC,UAAoB,EAAE;AAC5B,MAAK,MAAM,WAAW,QAAQ,MAAM,QAAQ,EAAE;EAC5C,MAAM,OAAO;AAEb,MAAI,qCAAqC,KAAK,KAAK,CACjD;AAEF,MAAI,KAAK,WAAW,KAAK,CACvB,MAAK,KAAK,IAAI,KAAK,MAAM,EAAE,GAAG;WAEvB,KAAK,WAAW,KAAK,CAC5B,SAAQ,KAAK,IAAI,KAAK,MAAM,EAAE,GAAG;OAE9B;AAEH,QAAK,KAAK,KAAK;AACf,WAAQ,KAAK,KAAK;;;AAGtB,QAAO;EACL,UAAU,KAAK,KAAK,KAAK;EACzB,SAAS,QAAQ,KAAK,KAAK;EAC5B;;AAGH,SAAgB,gBAAgB,OAAqC;CACnE,MAAM,SAAS,MAAM,QAAQ,MAAM,IAAI,IAAI,MAAM,IAAI,WAAW;CAChE,MAAM,OAAQ,MAAc;CAC5B,MAAM,SAAS,OAAO,MAAM,WAAW,YAAY,KAAK,SAAS;CACjE,MAAM,OAAO,MAAM,MAAM,WAAW,OAAO,IAAI;CAC/C,MAAM,WAAW,OAAO,MAAM,MAAM,MAAM,IAAI,CAAC,MAAM,KAAK,MAAM,QAAQ;CAUxE,IAAI,UAAU,MAAM,WAAW;CAC/B,MAAM,oBAAoB;AAC1B,KAAI,kBAAkB,KAAK,QAAQ,CACjC,WAAU,QAAQ,QAAQ,mBAAmB,GAAG;AAElD,KAAI,MAAM;EACR,MAAM,EAAE,UAAU,YAAY,iBAAiB,QAAQ;AAEvD,SAAO;GACL,MAAM;GACN;GACA,MAAM,WAAW;GACjB,KAAK;GACL;GACA,SAAS,WAAW,OAAO,QAAQ,WAAW,QAAQ,OAAO,CAAC;GAC9D,cAAc;GACd,aAAa;GACd;;AAGH,QAAO;EACL,MAAM;EACN;EACA,MAAM,WAAW;EACjB,KAAK,WAAW;EAChB;EACA,SAAS,WAAW,OAAO,QAAQ,WAAW,QAAQ,OAAO,CAAC;EAC/D;;;;;ACrEH,SAAgB,aAAa,QAA0C;AACrE,KAAI,OAAO,SAAS,EAClB,QAAO;CACT,MAAM,QAAQ,OAAO,OAAO,SAAS;AACrC,KAAI,MAAM,SAAS,UAAU,CAAC,MAAM,QAAS,SAAS,IAAI,CACxD,QAAO,eAAe,OAAO;AAE/B,KADe,OAAO,OAAO,SAAS,GAC3B,QAAQ,KACjB,QAAO,eAAe,OAAO;CAC/B,MAAM,OAAO,OAAO,OAAO,SAAS;AACpC,KAAI,CAAC,KAAK,QAAS,WAAW,IAAI,CAChC,QAAO,eAAe,OAAO;CAE/B,MAAM,QAAQ,OAAO,OAAO,SAAS;CACrC,MAAM,OAAO,KAAK,QAAS,QAAQ,UAAU,GAAG;CAChD,MAAM,UAAU,CAAC,KAAK,QAAS,SAAS,IAAI;AAC5C,OAAM,UAAU,MAAM,QAAS,QAAQ,OAAO,GAAG;AACjD,QAAO,OAAO,OAAO,SAAS,GAAG,GAAG;EAClC,MAAM;EACN;EACA,MAAM,MAAM;EACZ,UAAU,CACR;GACE,MAAM;GACN,SAAS,MAAM;GACf,KAAK,MAAM;GACZ,CACF;EACD;EACD,CAAQ;AACT,QAAO,OAAO,OAAO,SAAS,GAAG,EAAE;AACnC,QAAO;;AAGT,SAAgB,eAAe,QAA0C;AACvE,KAAI,OAAO,SAAS,EAClB,QAAO;CACT,IAAI,SAAS,OAAO;CACpB,IAAI,OAAO,OAAO,SAAS;AAC3B,KAAI,KAAK,SAAS,cAAc;AAC9B;AACA,SAAO,OAAO,SAAS;AACvB,MAAI,KAAK,SAAS,aAChB,QAAO;;AAGX,KADe,OAAO,SAAS,GACpB,SAAS,UAClB,QAAO;CACT,MAAM,QAAQ,OAAO,SAAS;CAC9B,MAAM,QAAQ,OAAO,SAAS;CAE9B,IAAI,OAAO,OAAO,SAAS,GAAG;CAC9B,IAAI,QAAQ;AACZ,KAAI,WAAW,OAAO,QAAQ;AAE5B,UAAQ,KAAK,WAAW;AACxB;;AAEF,QAAO,OAAO,SAAS,GAAG,MAAM;CAChC,MAAM,UAAU,MAAM;AACtB,WAAU;AACV,OAAM,UAAU,MAAM,QAAS,QAAQ,OAAO,GAAG;AACjD,QAAO,OAAO,SAAS,GAAG,GAAG;EAC3B,MAAM;EACN;EACA,MAAM;EACN,UAAU,CACR;GACE,MAAM;GACN;GACA,KAAK;GACN,CACF;EACD,SAAS;EACV,CAAQ;AACT,QAAO;;;;;AC3ET,SAAgB,YAAY,QAA0C;CACpE,MAAM,OAAO,OAAO,OAAO,SAAS;AAEpC,KAAI,YAAY,KAAK,KAAK,WAAW,GAAG,IAAI,OAAO,OAAO,SAAS,IAAI,QAAQ,KAC7E,QAAO,OAAO,OAAO,SAAS,GAAG,EAAE;AAErC,QAAO;;;;;ACNT,SAAgB,gBAAgB,QAA0C;CACxE,MAAM,cAAc,CAAC,GAAG,OAAO;AAC/B,KAAI,OAAO,SAAS,EAClB,QAAO;CACT,MAAM,IAAI,OAAO,SAAS;CAC1B,MAAM,QAAQ,OAAO;CACrB,MAAM,YAAY,OAAO,IAAI;AAC7B,KAAI,MAAM,SAAS,UAAU,MAAM,SAAS,SAAS,IAAI,IAAI,UAAU,SAAS,WAAW;EAEzF,MAAM,aAAa,OAAO,IAAI;EAC9B,MAAM,QAAQ,YAAY,SAAS,SAAS,IAAI;EAChD,MAAM,SAAS;GACb;IACE,MAAM;IACN,KAAK;IACL,OAAO;IACP,KAAK;IACL,UAAU;IACV,SAAS;IACT,QAAQ;IACR,MAAM;IACN,MAAM;IACP;GACD;IACE,MAAM;IACN,SAAS,YAAY,SAAS,SAAS,WAAW,UAAU;IAC7D;GACD;IACE,MAAM;IACN,KAAK;IACL,OAAO;IACP,KAAK;IACL,UAAU;IACV,SAAS;IACT,QAAQ;IACR,MAAM;IACN,MAAM;IACP;GACF;EACD,MAAM,aAAa,MAAM,SAAS,MAAM,GAAG,GAAG;AAC9C,MAAI,WACF,QAAO,QAAQ;GACb,MAAM;GACN,SAAS;GACT,KAAK;GACN,CAAC;AAEJ,cAAY,OAAO,GAAG,OAAO,GAAG,OAAO;AACvC,SAAO;;AAGT,QAAO;;;;;ACnDT,SAAgB,sBACd,OACuB;AACvB,QAAO;EACL,MAAM;EACN,IAAI,MAAM,MAAM,SAAS;EACzB,KAAK,KAAK,MAAM,MAAM,SAAS,GAAG;EACnC;;;;;ACPH,SAAgB,sBAAqC;AACnD,QAAO;EACL,MAAM;EACN,KAAK;EACN;;;;;ACHH,SAAgB,oBACd,QACA,YAIA;CACA,MAAMC,WAAyB,EAAE;CACjC,IAAI,WAAW;CACf,IAAI,IAAI,aAAa;CACrB,MAAMC,cAA+B,EAAE;AAGvC,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,cAAc;AAC3D,cAAY,OAAO,GAAG,WAAW;AACjC,cAAY,KAAK,OAAO,GAAG;AAC3B;;AAIF,UAAS,KAAK,GAAG,kBAAkB,YAAY,CAAC;AAWhD,QAAO;EAAE,MATmB;GAC1B,MAAM;GACN;GACA,KAAK,KAAK,SAAS;GACpB;EAKc,WAFG,IAAI,OAAO,SAAS,IAAI,IAAI,OAAO;EAE3B;;;;;AChC5B,SAAgB,gBAAgB,OAAsB,UAAU,OAAkB;AAChF,QAAO;EACL,MAAM;EACN,KAAK,MAAM,OAAO,MAAK,SAAQ,KAAK,OAAO,MAAM,GAAG,MAAM;EAC1D,KAAK,MAAM,OAAO,MAAK,SAAQ,KAAK,OAAO,MAAM,GAAG,MAAM;EAC1D,OAAO,MAAM,OAAO,MAAK,SAAQ,KAAK,OAAO,QAAQ,GAAG,MAAM;EAC9D,KAAK,MAAM,WAAW;EACtB;EACD;;;;;ACRH,SAAgB,qBAAqB,OAAsC;AACzE,QAAO;EACL,MAAM;EACN,MAAM,MAAM,WAAW;EACvB,KAAK,MAAM,WAAW;EACvB;;;;;ACJH,SAAgB,iBACd,QACA,YAIA;CACA,MAAMC,WAAyB,EAAE;CACjC,IAAI,UAAU;CACd,IAAI,IAAI,aAAa;CACrB,MAAMC,cAA+B,EAAE;AAGvC,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,aAAa;AAC1D,aAAW,OAAO,GAAG,WAAW;AAChC,cAAY,KAAK,OAAO,GAAG;AAC3B;;AAIF,UAAS,KAAK,GAAG,kBAAkB,YAAY,CAAC;AAWhD,QAAO;EAAE,MATgB;GACvB,MAAM;GACN;GACA,KAAK,KAAK,QAAQ;GACnB;EAKc,WAFG,IAAI,OAAO,SAAS,IAAI,IAAI,OAAO;EAE3B;;;;;AC/B5B,SAAgB,eACd,QACA,YAIA;CACA,MAAM,YAAY,OAAO;CACzB,MAAM,OAAO,UAAU,OAAO,MAAK,SAAQ,KAAK,OAAO,OAAO,GAAG,MAAM;CACvE,MAAM,QACF,UAAU,OAAO,MAAK,SAAQ,KAAK,OAAO,QAAQ,GAAG,MAAM;CAE/D,IAAI,IAAI,aAAa;CACrB,MAAMC,aAA8B,EAAE;CACtC,MAAM,UAAU;AAGhB,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,cAAc;AAC3D,aAAW,KAAK,OAAO,GAAG;AAC1B;;CAIF,MAAM,WAAW,kBAAkB,WAAW;CAC9C,MAAM,WAAW,SACd,KAAK,SAAS;AACb,MAAI,aAAa,KACf,QAAO,KAAK;AACd,SAAO,KAAK;GACZ,CACD,KAAK,GAAG;AAeX,QAAO;EAAE,MAbc;GACrB,MAAM;GACN;GACA;GACA,MAAM;GACN;GACA,KAAK,IAAI,SAAS,IAAI,OAAO,QAAQ,KAAK,MAAM,KAAK,GAAG;GACxD;GACD;EAKc,WAFG,IAAI,OAAO,SAAS,IAAI,IAAI,OAAO;EAE3B;;;;;AC7C5B,SAAgB,qBAAqB,OAAsC;AACzE,QAAO;EACL,MAAM;EACN,SAAS,MAAM,WAAW;EAC1B,SAAS,CAAC,CAAC,MAAM;EACjB,KAAK,MAAM;EACZ;;;;;ACNH,SAAgB,oBAAoB,OAAqC;AACvE,QAAO;EACL,MAAM;EACN,IAAI,MAAM,WAAW;EACrB,KAAK,MAAM,UAAU,IAAI,MAAM,QAAQ;EACxC;;;;;ACDH,SAAgB,wBACd,QACA,YAIA;CACA,MAAMC,WAAyB,EAAE;CACjC,IAAI,QAAQ;CACZ,IAAI,IAAI,aAAa;CACrB,MAAMC,cAA+B,EAAE;AAGvC,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,WAAW;AACxD,WAAS,OAAO,GAAG,WAAW;AAC9B,cAAY,KAAK,OAAO,GAAG;AAC3B;;AAIF,UAAS,KAAK,GAAG,kBAAkB,YAAY,CAAC;AAWhD,QAAO;EAAE,MATuB;GAC9B,MAAM;GACN;GACA,KAAK,KAAK,MAAM;GACjB;EAKc,WAFG,IAAI,OAAO,SAAS,IAAI,IAAI,OAAO;EAE3B;;;;;ACnC5B,SAAgB,iBACd,QACA,YACA,KAIA;CACA,MAAMC,WAAyB,EAAE;CACjC,IAAI,aAAa;CACjB,IAAI,IAAI,aAAa;CACrB,MAAMC,cAA+B,EAAE;AAGvC,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,gBAAgB;AAC7D,gBAAc,OAAO,GAAG,WAAW;AACnC,cAAY,KAAK,OAAO,GAAG;AAC3B;;AAIF,UAAS,KAAK,GAAG,kBAAkB,aAAa,IAAI,CAAC;AAWrD,QAAO;EAAE,MATgB;GACvB,MAAM;GACN;GACA,KAAK,KAAK,WAAW;GACtB;EAKc,WAFG,IAAI,OAAO,SAAS,IAAI,IAAI,OAAO;EAE3B;;;;;AChC5B,SAAgB,oBACd,QACA,YAIA;CACA,MAAMC,WAAyB,EAAE;CACjC,IAAI,UAAU;CACd,IAAI,IAAI,aAAa;CACrB,MAAMC,cAA+B,EAAE;AAGvC,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,aAAa;AAC1D,aAAW,OAAO,GAAG,WAAW;AAChC,cAAY,KAAK,OAAO,GAAG;AAC3B;;AAIF,UAAS,KAAK,GAAG,kBAAkB,YAAY,CAAC;AAqBhD,QAAO;EAAE,MAnBmB;GAC1B,MAAM;GACN,UACE,SAAS,SAAS,IACd,WACA,CACE;IACE,MAAM;IAEN,SAAS,WAAW,OAAO,YAAY,WAAW;IAClD,KAAK,WAAW,OAAO,YAAY,WAAW;IAC/C,CACF;GACP,KAAK,IAAI,WAAW,OAAO,YAAY,WAAW,GAAG;GACtD;EAKc,WAFG,IAAI,OAAO,SAAS,IAAI,IAAI,OAAO;EAE3B;;;;;ACzC5B,SAAgB,sBACd,QACA,YAIA;CACA,MAAMC,WAAyB,EAAE;CACjC,IAAI,UAAU;CACd,IAAI,IAAI,aAAa;CACrB,MAAMC,cAA+B,EAAE;AAGvC,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,aAAa;AAC1D,aAAW,OAAO,GAAG,WAAW;AAChC,cAAY,KAAK,OAAO,GAAG;AAC3B;;AAIF,UAAS,KAAK,GAAG,kBAAkB,YAAY,CAAC;AAqBhD,QAAO;EAAE,MAnBqB;GAC5B,MAAM;GACN,UACE,SAAS,SAAS,IACd,WACA,CACE;IACE,MAAM;IAEN,SAAS,WAAW,OAAO,YAAY,WAAW;IAClD,KAAK,WAAW,OAAO,YAAY,WAAW;IAC/C,CACF;GACP,KAAK,IAAI,WAAW,OAAO,YAAY,WAAW,GAAG;GACtD;EAKc,WAFG,IAAI,OAAO,SAAS,IAAI,IAAI,OAAO;EAE3B;;;;;AC1C5B,SAAgB,eAAe,OAAgC;AAC7D,QAAO;EACL,MAAM;EACN,SAAS,MAAM,WAAW;EAC1B,KAAK,MAAM,WAAW;EACvB;;;;;ACiBH,SAAgB,kBAAkB,QAAyB,KAAc,WAAyC;AAChH,KAAI,CAAC,UAAU,OAAO,WAAW,EAC/B,QAAO,EAAE;CAEX,MAAMC,SAAuB,EAAE;CAC/B,IAAIC,kBAAmC;CAEvC,IAAI,IAAI;AACR,UAAS,gBAAgB,OAAO;AAChC,UAAS,YAAY,OAAO;AAC5B,UAAS,aAAa,OAAO;AAE7B,QAAO,IAAI,OAAO,QAAQ;EACxB,MAAM,QAAQ,OAAO;AACrB,UAAQ,MAAM,MAAd;GACE,KAAK,QAAQ;IACX,IAAI,UAAU,MAAM,QAAQ,QAAQ,OAAO,GAAG,IAAI;AAClD,QAAI,YAAY,OAAO,YAAY,OAAO,YAAY,OAAO,YAAY,OAAO,QAAQ,KAAK,QAAQ,IAAI,OAAO,KAAK,QAAQ,EAAE;AAC7H;AACA;;AAEF,QAAI,iBAAiB,KAAK,QAAQ,CAChC,WAAU,QAAQ,QAAQ,UAAU,GAAG;AAEzC,QAAI,KAAK,WAAW,IAAI,IAAI,WAAW,SAAS,kBAAkB;KAEhE,MAAM,IADW,QAAQ,MAAM,EAAE,CACd,MAAM,UAAU;AACnC,SAAI,MAAM,MAAM;AACd;AACA;;AAGF,SAAK,KAAK,KAAK,KAAK,EAAE,GAAG,IAAK,CAAC,GAAG;MAEhC,MAAM,UAAU,IAAK,EAAE,OAAO,OAAO,EAAE,OAAO,MAAO;AACrD,aAAO,KAAK;OACV,MAAM;OACN;OACA,KAAK,UAAU,QAAQ;OACxB,CAAC;AACF;AACA;;;AAGJ,QAAI,SAAS,KAAK,QAAQ,EAAE;AAE1B,uBAAkB;AAElB,YAAO,KAAK;MACV,MAAM;MACN,MAAM,QAAQ,QAAQ,MAAM,GAAG;MAC/B,KAAK,WAAW;MACjB,CAAC;AACF;AACA;;AAEF,QAAI,YAAY,KAAK;AACnB;AACA;;AAEF,QAAI,kBAAkB,KAAK,QAAQ,EAAE;KAEnC,MAAM,QAAQ,QAAQ,QAAQ,KAAK,IAAI;KACvC,MAAM,QAAQ,QAAQ,MAAM,GAAG,MAAM;AACrC,SAAI,MACF,KAAI,iBAAiB;AAEnB,sBAAgB,WAAW;AAC3B,sBAAgB,OAAO;YAEpB;AAEH,wBAAkB;OAChB,MAAM;OACN,SAAS,SAAS;OAClB,KAAK,MAAM,WAAW;OACvB;AACD,aAAO,KAAK,gBAAgB;;KAGhC,MAAM,uBAAuB,QAAQ,MAAM,MAAM;AAEjD,uBAAkB;KAGlB,MAAM,EAAE,SAAS,wBAAwB;MACvC;OACE,MAAM;OACN,KAAK;OACL,SAAS;OACT,QAAQ;OACR,MAAM;OACN,MAAM;OACP;MACD;OACE,MAAM;OACN,KAAK;OACL,SAAS,qBAAqB,QAAQ,MAAM,GAAG;OAC/C,QAAQ;OACR,MAAM;OACN,MAAM;OACP;MACD;OACE,MAAM;OACN,KAAK;OACL,SAAS;OACT,QAAQ;OACR,MAAM;OACN,MAAM;OACP;MACF,EAAE,EAAE;AACL,YAAO,KAAK,KAAK;AACjB;AACA;;AAEF,QAAI,iBAAiB,KAAK,QAAQ,EAAE;KAClC,MAAM,QAAQ,QAAQ,QAAQ,IAAI,IAAI;KACtC,MAAM,QAAQ,QAAQ,MAAM,GAAG,MAAM;AACrC,SAAI,MACF,KAAI,iBAAiB;AAEnB,sBAAgB,WAAW;AAC3B,sBAAgB,OAAO;YAEpB;AAEH,wBAAkB;OAChB,MAAM;OACN,SAAS,SAAS;OAClB,KAAK,MAAM,WAAW;OACvB;AACD,aAAO,KAAK,gBAAgB;;KAGhC,MAAM,gBAAgB,QAAQ,MAAM,MAAM;AAE1C,uBAAkB;KAGlB,MAAM,EAAE,SAAS,iBAAiB;MAChC;OACE,MAAM;OACN,KAAK;OACL,SAAS;OACT,QAAQ;OACR,MAAM;OACN,MAAM;OACP;MACD;OACE,MAAM;OACN,KAAK;OACL,SAAS,cAAc,QAAQ,OAAO,GAAG;OACzC,QAAQ;OACR,MAAM;OACN,MAAM;OACP;MACD;OACE,MAAM;OACN,KAAK;OACL,SAAS;OACT,QAAQ;OACR,MAAM;OACN,MAAM;OACP;MACF,EAAE,GAAG,IAAI;AACV,YAAO,KAAK,KAAK;AACjB;AACA;eAEO,eAAe,KAAK,QAAQ,EAAE;KACrC,MAAM,QAAQ,QAAQ,QAAQ,IAAI,IAAI;KACtC,MAAM,QAAQ,QAAQ,MAAM,GAAG,MAAM;AACrC,SAAI,MACF,KAAI,iBAAiB;AAEnB,sBAAgB,WAAW;AAC3B,sBAAgB,OAAO;YAEpB;AAEH,wBAAkB;OAChB,MAAM;OACN,SAAS,SAAS;OAClB,KAAK,MAAM,WAAW;OACvB;AACD,aAAO,KAAK,gBAAgB;;KAGhC,MAAM,kBAAkB,QAAQ,MAAM,MAAM;AAE5C,uBAAkB;KAGlB,MAAM,EAAE,SAAS,mBAAmB;MAClC;OACE,MAAM;OACN,KAAK;OACL,SAAS;OACT,QAAQ;OACR,MAAM;OACN,MAAM;OACP;MACD;OACE,MAAM;OACN,KAAK;OACL,SAAS,gBAAgB,QAAQ,OAAO,GAAG;OAC3C,QAAQ;OACR,MAAM;OACN,MAAM;OACP;MACD;OACE,MAAM;OACN,KAAK;OACL,SAAS;OACT,QAAQ;OACR,MAAM;OACN,MAAM;OACP;MACF,EAAE,EAAE;AACL,YAAO,KAAK,KAAK;AACjB;AACA;;IAEF,MAAM,aAAa,QAAQ,QAAQ,KAAK;AACxC,QAAI,eAAe,IAAI;KACrB,MAAM,kBAAkB,QAAQ,MAAM,GAAG,WAAW;AACpD,SAAI,CAAC,gBACH,mBAAkB;MAChB,MAAM;MACN,SAAS;MACT,KAAK;MACN;SAGD,iBAAgB,WAAW;AAE7B,YAAO,KAAK,gBAAgB;AAC5B,uBAAkB;AAClB,YAAO,KAAK,gBAAgB,OAAO,KAAK,CAAC;AACzC;AACA;;IAEF,MAAM,YAAY,QAAQ,QAAQ,IAAI;AAEtC,QAAI,QAAQ,SAAS,YAAY,IAAI,CAAC,KAAK,SAAS,YAAY,CAC9D,WAAU,QAAQ,MAAM,GAAG,GAAG;IAEhC,MAAM,WAAW,eAAe;KAAE,GAAG;KAAO;KAAS,CAAC;AAEtD,QAAI,cAAc,IAAI;KACpB,MAAM,kBAAkB,QAAQ,MAAM,GAAG,UAAU;KACnD,MAAM,UAAU,QAAQ,QAAQ,MAAM,UAAU;AAChD,SAAI,YAAY,IAAI;MAClB,MAAM,OAAO,QAAQ,MAAM,YAAY,GAAG,QAAQ;AAElD,UAAI,CAAC,UAAU,KAAK,KAAK,EAAE;AACzB,cAAO,KAAK;QACV,MAAM;QACN,SAAS;QACT,KAAK;QACN,CAAC;AACF,cAAO,KAAK;QACV,MAAM;QACN,MAAM;QACN;QACA,UAAU,CACR;SACE,MAAM;SACN,SAAS;SACT,KAAK;SACN,CACF;QACD,SAAS;QACV,CAAQ;AACT;AACA;;;;IAIN,MAAM,WAAW,OAAO,IAAI;AAC5B,QAAI,iBAAiB;AAEnB,qBAAgB,WAAW,SAAS,QAAQ,QAAQ,gBAAgB,GAAG;AACvE,qBAAgB,OAAO,SAAS;WAE7B;KACH,MAAM,YAAY,UAAU,QAAQ,QAAQ,OAAO,IAAI,IAAI,YAAY;AAEvE,cAAS,UAAU,SAAS,QAAQ,QAAQ,gBAAgB,GAAG;AAC/D,uBAAkB;AAClB,qBAAgB,SAAS;AACzB,YAAO,KAAK,gBAAgB;;AAE9B;AACA;;GAGF,KAAK;AACH,QAAI,iBAAiB;AAEnB,qBAAgB,WAAW;AAC3B,qBAAgB,OAAO;;AAGzB;AACA;GAEF,KAAK;AACH,sBAAkB;AAClB,WAAO,KAAK,qBAAqB,MAAM,CAAC;AACxC;AACA;GAEF,KAAK,aAAa;AAChB,sBAAkB;IAClB,MAAM,OAAO,MAAM,OAAO,MAAM,SAAc,KAAK,OAAO,OAAO,GAAG;AACpE,QAAI,OAAO,MAAM;KACf,MAAM,8BAAc,IAAI,OAAO,UAAU,KAAK,SAAS;KACvD,MAAMC,MAAW,OAAO,SAAS,IAAI,OAAO,OAAO,SAAS,KAAK;KACjE,MAAM,UAAU,CAAC,YAAY,KAAK,IAAI;AACtC,SAAI,WAAW,KAEb;2BADoB,IAAI,OAAO,MAAM,IAAI,KAAK,YAAY,EAC1C,KAAK,IAAI,EAAE;OACzB,MAAM,OAAO,KAAK,QAAS,KAAa,SAAS,MAAM,GAAG,GAAG,IAAI;AACjE,cAAO,OAAO,OAAO,SAAS,GAAG,GAAG;QAClC,MAAM;QACN,MAAM;QACN;QACA;QACD,CAAQ;AACT,YAAK;AACL,WAAI,OAAO,IAAI,YAAY,IACzB;AACF;;;;IAIN,MAAM,EAAE,MAAM,cAAc,eAAe,QAAQ,EAAE;AACrD,QAAI;AAEJ,SAAK,UAAU;AACf,WAAO,KAAK,KAAK;AACjB;;GAGF,KAAK;AACH,sBAAkB;AAClB,WAAO,KAAK,gBAAgB,MAAM,CAAC;AACnC;AACA;GAEF,KAAK,eAAe;AAClB,sBAAkB;IAClB,MAAM,EAAE,MAAM,cAAc,iBAAiB,QAAQ,GAAG,MAAM,QAAQ;AACtE,WAAO,KAAK,KAAK;AACjB,QAAI;AACJ;;GAGF,KAAK,WAAW;AACd,sBAAkB;IAClB,MAAM,EAAE,MAAM,cAAc,mBAAmB,QAAQ,EAAE;AACzD,WAAO,KAAK,KAAK;AACjB,QAAI;AACJ;;GAGF,KAAK,UAAU;AACb,sBAAkB;IAClB,MAAM,EAAE,MAAM,cAAc,wBAAwB,QAAQ,EAAE;AAC9D,WAAO,KAAK,KAAK;AACjB,QAAI;AACJ;;GAGF,KAAK,aAAa;AAChB,sBAAkB;IAClB,MAAM,EAAE,MAAM,cAAc,oBAAoB,QAAQ,EAAE;AAC1D,WAAO,KAAK,KAAK;AACjB,QAAI;AACJ;;GAGF,KAAK,YAAY;AACf,sBAAkB;IAClB,MAAM,EAAE,MAAM,cAAc,iBAAiB,QAAQ,EAAE;AACvD,WAAO,KAAK,KAAK;AACjB,QAAI;AACJ;;GAGF,KAAK,YAAY;AACf,sBAAkB;IAClB,MAAM,EAAE,MAAM,cAAc,oBAAoB,QAAQ,EAAE;AAC1D,WAAO,KAAK,KAAK;AACjB,QAAI;AACJ;;GAGF,KAAK,YAAY;AACf,sBAAkB;IAClB,MAAM,EAAE,MAAM,cAAc,sBAAsB,QAAQ,EAAE;AAC5D,WAAO,KAAK,KAAK;AACjB,QAAI;AACJ;;GAGF,KAAK;AACH,sBAAkB;AAClB,WAAO,KAAK;KACV,MAAM;KACN,UAAU,CACR;MACE,MAAM;MACN,SAAS,MAAM,WAAW;MAC1B,KAAK,MAAM,WAAW;MACvB,CACF;KACD,KAAK,IAAI,MAAM,WAAW,GAAG;KAC9B,CAAC;AACF;AACA;GAEF,KAAK;AACH,sBAAkB;AAClB,WAAO,KAAK;KACV,MAAM;KACN,UAAU,CACR;MACE,MAAM;MACN,SAAS,MAAM,WAAW;MAC1B,KAAK,MAAM,WAAW;MACvB,CACF;KACD,KAAK,IAAI,MAAM,WAAW,GAAG;KAC9B,CAAC;AACF;AACA;GAEF,KAAK,SAAS;AACZ,sBAAkB;IAElB,MAAM,WAAW,OAAO,IAAI;AAC5B,QAAI,UAAU,SAAS,UAAU,QAAQ,KAAK,SAAS,WAAW,GAAG,CAEnE,QAAO,KAAK;KACV,MAAM;KACN,SAAS;KACT,KAAK;KACN,CAAC;QAGF,QAAO,KAAK,gBAAgB,MAAM,CAAC;AAErC;AACA;;GAEF,KAAK;AACH,sBAAkB;AAClB,WAAO,KAAK,mBAAmB,MAAM,CAAC;AACtC;AACA;GACF,KAAK;AACH,sBAAkB;AAClB,WAAO,KAAK,wBAAwB,MAAM,CAAC;AAC3C;AACA;GACF,KAAK;AACH,sBAAkB;AAClB,WAAO,KAAK,sBAAsB,MAAM,CAAC;AACzC;AACA;GAEF,KAAK;AACH,sBAAkB;AAClB,WAAO,KAAK,qBAAqB,CAAC;AAClC;AACA;GAEF,KAAK;AACH,sBAAkB;AAElB,WAAO,KAAK,gBAAgB,OAAO,GAAG,CAAC;AACvC;AACA;GAGF,KAAK;AACH,sBAAkB;AAClB,WAAO,KAAK,qBAAqB,MAAM,CAAC;AACxC;AACA;GAGF,KAAK;AACH,sBAAkB;AAClB,WAAO,KAAK,oBAAoB,MAAM,CAAC;AACvC;AACA;GAGF;AAEE,WAAO,KAAK,MAAM;AAClB,sBAAkB;AAClB;AACA;;;AAIN,QAAO;;;;;AClhBT,SAAgB,gBACd,QACA,OAC0B;CAC1B,MAAMC,qBAAmC,EAAE;CAC3C,IAAI,IAAI,QAAQ;AAGhB,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,mBAC7C,KAAI,OAAO,GAAG,SAAS,kBAAkB;EACvC,MAAM,eAAe,OAAO,IAAI;AAChC,qBAAmB,KAAK;GACtB,MAAM;GACN,UAAU,kBAAkB,aAAa,YAAY,EAAE,CAAC;GACxD,KAAK,aAAa,WAAW;GAC9B,CAAC;AACF,OAAK;YAGL,OAAO,GAAG,SAAS,sBAChB,OAAO,GAAG,SAAS,qBACtB;EAEA,MAAM,CAAC,UAAU,YAAY,UAAU,QAAQ,EAAE;AACjD,qBAAmB,KAAK,SAAS;AACjC,MAAI;OAGJ;AAUJ,QAAO,CANgC;EACrC,MAAM;EACN,UAAU;EACV,KAAK,mBAAmB,KAAI,UAAS,MAAM,IAAI,CAAC,KAAK,KAAK;EAC3D,EAEuB,IAAI,EAAE;;;;;ACvChC,SAAgB,eAAe,OAAqC;AAIlE,KAAI,MAAM,MAAM,WAAW,OAAO,CAChC,QAAO,gBAAgB,MAAM;CAG/B,MAAM,QAAQ,MAAM,QAAS,MAAM,yCAAyC;AAC5E,KAAI,QAAQ,GAEV,OAAM,UAAU,MAAM,QACnB,QAAQ,uBAAuB,GAAG,CAClC,QAAQ,oBAAoB,GAAG;CAEpC,MAAM,SAAS,MAAM,QAAQ,MAAM,IAAI,IAAI,MAAM,IAAI,WAAW;AAChE,QAAO;EACL,MAAM;EACN,UAAU,QAAQ,MAAM,KAAM,MAAM,QAAQ;EAC5C,MAAM,MAAM,WAAW;EACvB,KAAK,MAAM,WAAW;EACtB,SAAS,CAAC;EACX;;;;;ACjBH,SAAgB,oBACd,QACA,OAC8B;CAC9B,MAAMC,QAA8B,EAAE;CACtC,IAAI,IAAI,QAAQ;CAChB,IAAIC,YAA0B,EAAE;CAChC,IAAIC,kBAAgC,EAAE;AAEtC,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,WAC7C,KAAI,OAAO,GAAG,SAAS,WAAW;EAEhC,MAAM,YAAY,OAAO,IAAI;AAC7B,cAAY,kBAAkB,UAAU,YAAY,EAAE,CAAC;AACvD,OAAK;YAEE,OAAO,GAAG,SAAS,WAAW;EAErC,IAAI,IAAI,IAAI;AACZ,oBAAkB,EAAE;AAEpB,SAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,WAC7C,KAAI,OAAO,GAAG,SAAS,kBAAkB;GACvC,MAAM,eAAe,OAAO,IAAI;AAChC,mBAAgB,KAAK;IACnB,MAAM;IACN,UAAU,kBAAkB,aAAa,YAAY,EAAE,EAAE,aAAa,WAAW,GAAG;IACpF,KAAK,aAAa,WAAW;IAC9B,CAAC;AACF,QAAK;QAGL;AAKJ,MAAI,UAAU,SAAS,GAAG;AACxB,SAAM,KAAK;IACT,MAAM;IACN,MAAM;IACN,YAAY;IACZ,KAAK,GAAG,UAAU,KAAI,SAAQ,KAAK,IAAI,CAAC,KAAK,GAAG,CAAC,IAAI,gBAClD,KAAI,QAAO,IAAI,IAAI,CACnB,KAAK,KAAK;IACd,CAAC;AAGF,eAAY,EAAE;;AAGhB,MAAI,IAAI;OAGR;AAUJ,QAAO,CANwC;EAC7C,MAAM;EACN;EACA,KAAK,MAAM,KAAI,SAAQ,KAAK,IAAI,CAAC,KAAK,KAAK;EAC5C,EAE2B,IAAI,EAAE;;;;;ACrEpC,SAAgB,cACd,QACA,OACwB;CAExB,MAAM,KADQ,OAAO,OACJ,MAAM,SAAS;CAChC,MAAMC,mBAAiC,EAAE;CACzC,IAAI,IAAI,QAAQ;AAEhB,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,iBAC7C,KAAI,OAAO,GAAG,SAAS,kBAAkB;EACvC,MAAM,eAAe,OAAO,IAAI;AAChC,mBAAiB,KAAK;GACpB,MAAM;GACN,UAAU,kBAAkB,aAAa,YAAY,EAAE,CAAC;GACxD,KAAK,aAAa,WAAW;GAC9B,CAAC;AACF,OAAK;OAGL;AAWJ,QAAO,CAP4B;EACjC,MAAM;EACN;EACA,UAAU;EACV,KAAK,KAAK,GAAG,KAAK,iBAAiB,KAAI,UAAS,MAAM,IAAI,CAAC,KAAK,KAAK;EACtE,EAEqB,IAAI,EAAE;;;;;AC/B9B,SAAgB,aACd,QACA,OACa;CACb,MAAM,QAAQ,OAAO;CACrB,MAAM,eAAe,OAAO,SAAS,MAAM,KAAK,UAAU,EAAE,IAAI,IAAI;CACpE,MAAM,sBAAsB,OAAO,QAAQ;CAC3C,MAAM,iBAAiB,oBAAoB,WAAW;AAEtD,QAAO;EACL,MAAM;EACN,OAAO;EACP,MAAM;EACN,UAAU,kBAAkB,oBAAoB,YAAY,EAAE,CAAC;EAC/D,KAAK;EACN;;;;;ACfH,SAAgB,eAAe,OAAqC;AAClE,QAAO;EACL,MAAM;EACN,SAAS,MAAM,WAAW;EAC1B,SAAS,CAAC,CAAC,MAAM;EACjB,KAAK,MAAM,OAAO;EACnB;;;;;ACDH,SAAgB,WACd,QACA,OACqB;CACrB,IAAI,IAAI,QAAQ;CAChB,IAAIC,YAAiC;CACrC,MAAMC,OAAuB,EAAE;CAC/B,IAAI,WAAW;AAEf,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,cAC7C,KAAI,OAAO,GAAG,SAAS,cAAc;AACnC,aAAW;AACX;YAEO,OAAO,GAAG,SAAS,eAAe;AACzC,aAAW;AACX;YAGA,OAAO,GAAG,SAAS,gBAChB,OAAO,GAAG,SAAS,cAEtB;UAEO,OAAO,GAAG,SAAS,WAAW;EACrC,MAAMC,QAAyB,EAAE;EACjC,IAAI,IAAI,IAAI;AAEZ,SAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,WAC7C,KAAI,OAAO,GAAG,SAAS,aAAa,OAAO,GAAG,SAAS,WAAW;GAChE,MAAM,eAAe,OAAO,GAAG,SAAS;GACxC,MAAM,eAAe,OAAO,IAAI;GAChC,MAAM,UAAU,aAAa,WAAW;AAExC,SAAM,KAAK;IACT,MAAM;IACN,QAAQ,gBAAgB;IACxB,UAAU,kBAAkB,aAAa,YAAY,EAAE,EAAE,QAAQ;IACjE,KAAK;IACN,CAAC;AAEF,QAAK;QAGL;EAIJ,MAAMC,UAAwB;GAC5B,MAAM;GACN;GACA,KAAK,MAAM,KAAI,SAAQ,KAAK,IAAI,CAAC,KAAK,IAAI;GAC3C;AAED,MAAI,SACF,aAAY;MAGZ,MAAK,KAAK,QAAQ;AAGpB,MAAI,IAAI;OAGR;AAIJ,KAAI,CAAC,UAEH,aAAY;EACV,MAAM;EACN,OAAO,EAAE;EACT,KAAK;EACN;AAWH,QAAO,CARsB;EAC3B,MAAM;EACN,QAAQ;EACR;EACA,SAAS,OAAO,OAAO,WAAW;EAClC,KAAK,CAAC,WAAW,GAAG,KAAK,CAAC,KAAI,QAAO,IAAI,IAAI,CAAC,KAAK,KAAK;EACzD,EAEkB,IAAI,EAAE;;;;;AC3F3B,SAAgB,qBAAwC;AACtD,QAAO;EACL,MAAM;EACN,KAAK;EACN;;;;;ACYH,SAAgB,UACd,QACA,OACoB;CACpB,MAAM,QAAQ,OAAO;CACrB,MAAMC,YAA4B,EAAE;CACpC,IAAI,IAAI,QAAQ;AAEhB,QACE,IAAI,OAAO,UACR,OAAO,GAAG,SAAS,uBACnB,OAAO,GAAG,SAAS,qBAEtB,KAAI,OAAO,GAAG,SAAS,kBAAkB;AACvC,MAAI,OAAO,GAAG,WAAW,KAAK;AAC5B;AACA;;EAEF,MAAMC,eAA6B,EAAE;EACrC,IAAI,IAAI,IAAI;AACZ,SAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,kBAE7C,KAAI,OAAO,GAAG,SAAS,kBAAkB;GACvC,MAAM,eAAe,OAAO,IAAI;GAChC,MAAM,WAAW,OAAO,IAAI;AAC5B,OAAI,SAAS,KAAK,aAAa,WAAW,GAAG,EAAE;AAC7C,iBAAa,UAAU,aAAa,SAAS,QAAQ,UAAU,GAAG;AAClE,iBAAa,UAAU,OAAO,IAAI,EAAE;;AAEtC,gBAAa,KAAK;IAChB,MAAM;IACN,UAAU,kBAAkB,aAAa,YAAY,EAAE,EAAE,aAAa,WAAW,IAAI,SAAS;IAC9F,KAAK,aAAa,WAAW;IAC9B,CAAC;AACF,QAAK;aAEE,OAAO,GAAG,SAAS,mBAAmB;GAE7C,MAAM,CAAC,gBAAgB,YAAY,gBAAgB,QAAQ,EAAE;AAC7D,gBAAa,KAAK,eAAe;AACjC,OAAI;aAGJ,OAAO,GAAG,SAAS,sBAChB,OAAO,GAAG,SAAS,qBACtB;AACA,OAAI,OAAO,GAAG,WAAW,KAAK;AAC5B;AACA;;GAGF,MAAM,CAAC,gBAAgB,YAAY,gBAAgB,QAAQ,EAAE;AAC7D,gBAAa,KAAK,eAAe;AACjC,OAAI;aAEG,OAAO,GAAG,SAAS,cAAc;AAExC,gBAAa,KAAK,eAAe,OAAO,GAAG,CAAC;AAC5C,QAAK;aAEE,OAAO,GAAG,SAAS,SAAS;AAEnC,gBAAa,KAAK,gBAAgB,OAAO,GAAG,CAAC;AAC7C,QAAK;aAEE,OAAO,GAAG,SAAS,cAAc;AAExC,gBAAa,KAAK,eAAe,OAAO,GAAG,CAAC;AAC5C,QAAK;aAEE,OAAO,GAAG,SAAS,cAAc;GAExC,MAAM,CAAC,WAAW,YAAY,WAAW,QAAQ,EAAE;AACnD,gBAAa,KAAK,UAAU;AAC5B,OAAI;aAEG,OAAO,GAAG,SAAS,WAAW;GAErC,MAAM,CAAC,aAAa,YAAY,oBAAoB,QAAQ,EAAE;AAC9D,gBAAa,KAAK,YAAY;AAC9B,OAAI;aAEG,OAAO,GAAG,SAAS,iBAAiB;GAE3C,MAAM,CAAC,cAAc,YAAY,cAAc,QAAQ,EAAE;AACzD,gBAAa,KAAK,aAAa;AAC/B,OAAI;aAEG,OAAO,GAAG,SAAS,gBAAgB;GAE1C,MAAM,cAAc,aAAa,QAAQ,EAAE;AAC3C,gBAAa,KAAK,YAAY;AAC9B,QAAK;aAEE,OAAO,GAAG,SAAS,MAAM;AAEhC,gBAAa,KAAK,oBAAoB,CAAC;AACvC,QAAK;aAEE,OAAO,GAAG,SAAS,kBAAkB;GAE5C,MAAM,QACF,sDAAsD,KACtD,OAAO,GAAG,QAAQ,GACnB;AACH,OAAI,OAAO;IACT,MAAM,CAAC,gBAAgB,YAAY,gBAAgB,QAAQ,GAAG,MAAM;AACpE,iBAAa,KAAK,eAAe;AACjC,QAAI;SAGJ,MAAK;QAIP,MAAK;AAIT,YAAU,KAAK;GACb,MAAM;GACN,UAAU;GACV,KAAK,aAAa,KAAI,UAAS,MAAM,IAAI,CAAC,KAAK,GAAG;GACnD,CAAC;AAEF,MAAI,IAAI;OAGR,MAAK;AAoBT,QAAO,CAhBoB;EACzB,MAAM;EACN,SAAS,MAAM,SAAS;EAExB,cAAc;AACZ,OAAI,MAAM,SAAS,MAAM,MAAM,QAAQ;IACrC,MAAM,QAAQ,MAAM,MAAM,MAAK,MAAK,EAAE,OAAO,QAAQ;AACrD,QAAI,MACF,QAAO,OAAO,MAAM,GAAG,IAAI;;MAG7B;EACJ,OAAO;EACP,KAAK,UAAU,KAAI,SAAQ,KAAK,IAAI,CAAC,KAAK,KAAK;EAChD,EAEiB,IAAI,EAAE;;AAI1B,SAAS,gBACP,QACA,OACoB;CAGpB,MAAM,cAAc,OAAO;CAC3B,MAAMC,cAA8B,EAAE;CACtC,IAAI,IAAI,QAAQ;AAEhB,QACE,IAAI,OAAO,UACR,OAAO,GAAG,SAAS,uBACnB,OAAO,GAAG,SAAS,qBAEtB,KAAI,OAAO,GAAG,SAAS,kBAAkB;AACvC,MAAI,OAAO,GAAG,WAAW,KAAK;AAC5B;AACA;;EAEF,MAAMD,eAA6B,EAAE;EACrC,IAAI,IAAI,IAAI;AAEZ,SAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,kBAE7C,KAAI,OAAO,GAAG,SAAS,kBAAkB;GACvC,MAAM,eAAe,OAAO,IAAI;GAChC,MAAM,WAAW,OAAO,IAAI;AAC5B,gBAAa,KAAK;IAChB,MAAM;IACN,UAAU,kBAAkB,aAAa,YAAY,EAAE,EAAE,aAAa,WAAW,IAAI,SAAS;IAC9F,KAAK,aAAa,WAAW;IAC9B,CAAC;AACF,QAAK;aAGL,OAAO,GAAG,SAAS,sBAChB,OAAO,GAAG,SAAS,qBACtB;AACA,OAAI,OAAO,GAAG,WAAW,KAAK;AAC5B;AACA;;GAIF,MAAM,CAAC,sBAAsB,YAAY,gBAAgB,QAAQ,EAAE;AACnE,gBAAa,KAAK,qBAAqB;AACvC,OAAI;aAEG,OAAO,GAAG,SAAS,cAAc;AACxC,gBAAa,KAAK,eAAe,OAAO,GAAG,CAAC;AAC5C,QAAK;aAEE,OAAO,GAAG,SAAS,SAAS;AACnC,gBAAa,KAAK,gBAAgB,OAAO,GAAG,CAAC;AAC7C,QAAK;aAEE,OAAO,GAAG,SAAS,cAAc;AAExC,gBAAa,KAAK,eAAe,OAAO,GAAG,CAAC;AAC5C,QAAK;QAIL,MAAK;AAIT,cAAY,KAAK;GACf,MAAM;GACN,UAAU;GACV,KAAK,aAAa,KAAI,UAAS,MAAM,IAAI,CAAC,KAAK,GAAG;GACnD,CAAC;AAEF,MAAI,IAAI;OAGR,MAAK;AAmBT,QAAO,CAf0B;EAC/B,MAAM;EACN,SAAS,YAAY,SAAS;EAC9B,cAAc;AACZ,OAAI,YAAY,SAAS,YAAY,MAAM,QAAQ;IACjD,MAAM,QAAQ,YAAY,MAAM,MAAK,MAAK,EAAE,OAAO,QAAQ;AAC3D,QAAI,MACF,QAAO,OAAO,MAAM,GAAG,IAAI;;MAG7B;EACJ,OAAO;EACP,KAAK,YAAY,KAAI,SAAQ,KAAK,IAAI,CAAC,KAAK,KAAK;EAClD,EAEuB,IAAI,EAAE;;;;;ACtQhC,SAAgB,gBACd,QACA,OACA,OAC0B;CAC1B,MAAM,OAAO,MAAM,MAAM;CACzB,MAAM,QAAQ,MAAM,MAAM,KAAK,OAAO,EAAE,CAAC,aAAa,GAAG,KAAK,MAAM,EAAE;CACtE,MAAME,qBAAmC,EAAE;CAC3C,IAAI,IAAI,QAAQ;AAEhB,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,kBAC7C,KAAI,OAAO,GAAG,SAAS,kBAAkB;EACvC,MAAM,eAAe,OAAO,IAAI;AAChC,MAAI,aACF,oBAAmB,KAAK;GACtB,MAAM;GACN,UAAU,kBAAkB,aAAa,YAAY,EAAE,CAAC;GACxD,KAAK,aAAa,WAAW;GAC9B,CAAC;AAEJ,OAAK;YAGL,OAAO,GAAG,SAAS,sBAChB,OAAO,GAAG,SAAS,qBACtB;EAEA,MAAM,CAAC,UAAU,YAAY,UAAU,QAAQ,EAAE;AACjD,qBAAmB,KAAK,SAAS;AACjC,MAAI;OAGJ;AAcJ,QAAO,CAVgC;EACrC,MAAM;EACN;EACA;EACA,UAAU;EACV,KAAK,MAAM,KAAK,GAAG,MAAM,IAAI,mBAC1B,KAAI,UAAS,MAAM,IAAI,CACvB,KAAK,KAAK,CAAC;EACf,EAEuB,IAAI,EAAE;;;;;AC9ChC,SAAgB,eACd,QACA,OAC0B;CAC1B,MAAM,YAAY,OAAO;CAGzB,IAAI,OAAO;CACX,IAAI,QAAQ;CAEZ,MAAM,YAAY,UAAU,KAAK,MAAM,yBAAyB;AAChE,KAAI,WAAW;AACb,SAAO,UAAU;EAEjB,MAAM,QAAQ,UAAU,QAAQ,IAAI,MAAM;AAC1C,MAAI,QAAQ,CAAC,KAAK,WAAW,MAAM,EAAE;GAEnC,MAAM,QAAQ,KAAK,wBAAQ,IAAI,OAAO,IAAI,OAAO,EAAE,GAAG,CAAC,MAAM;AAC7D,OAAI,MACF,SAAQ;;QAGT;EAEH,MAAM,QAAQ,UAAU,QAAQ,IAAI,MAAM;EAE1C,MAAM,QAEF,4DAA4D,KAAK,KAAK;AAC1E,MAAI,OAAO;AACT,UAAO,MAAM;AACb,WAAQ,MAAM,MAAM;;;AAIxB,KAAI,CAAC,MACH,SAAQ,KAAK,OAAO,EAAE,CAAC,aAAa,GAAG,KAAK,MAAM,EAAE;CAEtD,MAAMC,WAAyB,EAAE;CACjC,IAAI,IAAI,QAAQ;CAGhB,MAAM,4BAAY,IAAI,OAAO,cAAc,KAAK,SAAS;AAEzD,QACE,IAAI,OAAO,UACR,OAAO,GAAG,SAAS,qBACnB,CAAC,UAAU,KAAK,OAAO,GAAG,KAAK,CAElC,KAAI,OAAO,GAAG,SAAS,kBAAkB;EACvC,MAAM,eAAe,OAAO,IAAI;AAChC,MAAI,cAAc;GAChB,MAAM,IAAK,aAAa,SAAiB,eAAe,MAAgB,EAAE,SAAS,UAAU,KAAK,KAAK,EAAE,QAAQ,CAAC;GAClH,MAAM,YAAY,MAAM,KACpB,aAAa,UAAU,MAAM,GAAG,EAAE,GAClC,aAAa;AACjB,YAAS,KAAK;IACZ,MAAM;IACN,UAAU,kBAAkB,aAAa,EAAE,CAAC;IAC5C,KAAK,aAAa,SAAS,QAAQ,SAAS,GAAG,CAAC,QAAQ,gBAAgB,GAAG,IAAI;IAChF,CAAC;;AAEJ,OAAK;YAGL,OAAO,GAAG,SAAS,sBAChB,OAAO,GAAG,SAAS,qBACtB;EACA,MAAM,CAAC,UAAU,YAAY,UAAU,QAAQ,EAAE;AACjD,WAAS,KAAK,SAAS;AACvB,MAAI;OAGJ;AAcJ,QAAO,CAVgC;EACrC,MAAM;EACN;EACA;EACA;EACA,KAAK,MAAM,KAAK,GAAG,MAAM,IAAI,SAAS,KAAI,MAAK,EAAE,IAAI,CAAC,KAAK,KAAK,CAAC;EAClE,EAGoB,IACkB,EAAE;;;;;ACzF3C,SAAgB,iBAAgC;AAC9C,QAAO;EACL,MAAM;EACN,KAAK;EACN;;;;;ACHH,SAAgB,eACd,QACA,OACe;CACf,MAAM,wBAAwB,OAAO,QAAQ;CAC7C,MAAM,mBAAmB,sBAAsB,WAAW;AAE1D,QAAO;EACL,MAAM;EACN,UAAU,kBAAkB,sBAAsB,YAAY,EAAE,EAAE,iBAAiB;EACnF,KAAK;EACN;;;;;ACKH,SAAgB,yBACd,UACA,IACA,UAAwB,EAAE,EACZ;CAEd,IAAI,gBAAgB,YAAY,IAAI,UAAU,CAAC,QAAQ,kBAAkB,YAAY;AACrF,KAAI,aAAa,SAAS,MAAM,CAE9B,gBAAe,aAAa,QAAQ,SAAS,QAAQ;AAEvD,KAAI,aAAa,KAAK,aAAa,CAEjC,gBAAe,aAAa,QAAQ,cAAc,KAAK;UAEhD,aAAa,KAAK,aAAa,CAEtC,gBAAe,aAAa,QAAQ,qBAAqB,KAAK;CAGhE,MAAM,SAAS,GAAG,MAAM,cAAc,EAAE,CAAC;AAEzC,KAAI,CAAC,UAAU,CAAC,MAAM,QAAQ,OAAO,CACnC,QAAO,EAAE;CAGX,MAAM,MAAM,QAAQ;CACpB,MAAM,OAAO,QAAQ;CAErB,IAAI,oBAAoB;AACxB,KAAI,OAAO,OAAO,QAAQ,WACxB,qBAAoB,IAAI,OAAO,IAAI;CAGrC,IAAI,SAAS,cAAc,kBAAkB;AAI7C,KAAI,QAAQ,OAAO,SAAS,WAC1B,UAAS,KAAK,kBAAkB,IAAI;AAEtC,QAAO;;AAIT,SAAgB,cAAc,QAAuC;AAEnE,KAAI,CAAC,UAAU,CAAC,MAAM,QAAQ,OAAO,CACnC,QAAO,EAAE;CAEX,MAAMC,SAAuB,EAAE;CAC/B,IAAI,IAAI;AACR,UAAS,eAAe,OAAO;AAC/B,QAAO,IAAI,OAAO,QAAQ;EACxB,MAAM,QAAQ,OAAO;AACrB,UAAQ,MAAM,MAAd;GACE,KAAK;GACL,KAAK;GACL,KAAK;GACL,KAAK;GACL,KAAK;GACL,KAAK;GACL,KAAK,wBAAwB;IAC3B,MAAM,CAAC,aAAa,YAAY,eAAe,QAAQ,EAAE;AACzD,WAAO,KAAK,YAAY;AACxB,QAAI;AACJ;;GAGF,KAAK;AACH,WAAO,KAAK,aAAa,QAAQ,EAAE,CAAC;AACpC,SAAK;AACL;GAEF,KAAK;AACH,WAAO,KAAK,eAAe,QAAQ,EAAE,CAAC;AACtC,SAAK;AACL;GAEF,KAAK;GACL,KAAK;AACH,WAAO,KAAK,eAAe,OAAO,GAAG,CAAC;AACtC,SAAK;AACL;GAEF,KAAK;AACH,WAAO,KAAK,gBAAgB,OAAO,GAAG,CAAC;AACvC,SAAK;AACL;GAEF,KAAK;GACL,KAAK,qBAAqB;IACxB,MAAM,CAAC,UAAU,YAAY,UAAU,QAAQ,EAAE;AACjD,WAAO,KAAK,SAAS;AACrB,QAAI;AACJ;;GAGF,KAAK;AACH,WAAO,KAAK,oBAAoB,CAAC;AACjC,SAAK;AACL;GAEF,KAAK,mBAAmB;IACtB,MAAM,CAAC,gBAAgB,YAAY,gBAAgB,QAAQ,EAAE;AAC7D,WAAO,KAAK,eAAe;AAC3B,QAAI;AACJ;;GAGF,KAAK,cAAc;IACjB,MAAM,CAAC,WAAW,YAAY,WAAW,QAAQ,EAAE;AACnD,WAAO,KAAK,UAAU;AACtB,QAAI;AACJ;;GAGF,KAAK,WAAW;IACd,MAAM,CAAC,oBAAoB,YAAY,oBAAoB,QAAQ,EAAE;AACrE,WAAO,KAAK,mBAAmB;AAC/B,QAAI;AACJ;;GAGF,KAAK,iBAAiB;IACpB,MAAM,CAAC,cAAc,YAAY,cAAc,QAAQ,EAAE;AACzD,WAAO,KAAK,aAAa;AACzB,QAAI;AACJ;;GAGF,KAAK,kBAAkB;IACrB,MAAM,QACF,4DAA4D,KAC5D,MAAM,QAAQ,GACf;AACH,QAAI,OAAO;KACT,MAAM,CAAC,gBAAgB,YAAY,gBAAgB,QAAQ,GAAG,MAAM;AACpE,YAAO,KAAK,eAAe;AAC3B,SAAI;UAGJ,MAAK;AAEP;;GAGF,KAAK;AACH,WAAO,KAAK,gBAAgB,CAAC;AAC7B;AACA;GAEF,KAAK;AACH,WAAO,KAAK,eAAe,OAAO,GAAG,CAAC;AACtC,SAAK;AACL;GAEF;AAEE,SAAK;AACL;;;AAIN,QAAO;;;;;ACzIT,SAAgB,YAAY,QAAgB,UAAU,KAAK,KAAK,IAAI,UAA8B,EAAE,EAAE;CAEpG,MAAM,KAAK,QAAQ,QAAQ;CAG3B,MAAMC,sBAA8C,EAClD,eAAe,QAChB;CAED,IAAIC;AACJ,KAAI,OAAO,QAAQ,SAAS,WAC1B,KAAI,QAAQ;UAEL,QAAQ,QAAQ,OAAO,QAAQ,SAAS,UAAU;EACzD,MAAM,UAAU,QAAQ;AACxB,OAAK,QAAgB,QAAQ,QAAQ,oBAAoB,QAAQ;OAGjE,MAAK,QAAgB,oBAAoB,QAAQ;AAInD,KAAI,MAAM,QAAQ,QAAQ,OAAO,CAC/B,MAAK,MAAM,KAAK,QAAQ,OAEtB,KAAI,MAAM,QAAQ,EAAE,CAClB,IAAG,IAAI,EAAE,IAAI,EAAE,GAAG;KAElB,IAAG,IAAI,EAAE;AAKf,KAAI,MAAM,QAAQ,QAAQ,MAAM,CAC9B,MAAK,MAAM,MAAM,QAAQ,MACvB,KAAI;AACF,KAAG,GAAG;UAED,GAAG;AAGR,UAAQ,MAAM,+CAA+C,EAAE;;AAMrE,IAAG,IAAI,cAAc;AACrB,IAAG,IAAI,cAAc;AACrB,IAAG,IAAI,eAAe;AACtB,IAAG,IAAIC,KAAgB;CACvB,MAAM,2BACD,mBAA2B,WAAW;AAC3C,IAAG,IAAI,yBAAyB;AAChC,IAAG,IAAI,cAAc;AACrB,IAAG,IAAI,mBAAmB;AAG1B,IAAG,KAAK,MAAM,MAAM,SAAS,sBAAsB,UAAe;EAEhE,MAAM,QADc,MAAM,IACR,MAAM,QAAQ;AAChC,OAAK,MAAM,SAAS,MAAM,QAAQ;AAChC,OAAI,MAAM,SAAS,WAAW,CAAC,MAAM,OAAO,CAAC,MAAM,OACjD;GACF,MAAMC,WAAmB,MAAM,IAAI;GACnC,MAAMC,UAAkB,MAAM,IAAI;GAClC,MAAMC,SAAiB,MAAM;GAC7B,MAAM,SAAS,OAAO;GACtB,MAAM,SAAS,OAAO;GAGtB,MAAM,OAAO,MADG,KAAK,IAAI,GAAG,UAAU,EAAE,KACT;GAC/B,IAAI,IAAI;AACR,UAAO,IAAI,KAAK,WAAW,KAAK,OAAO,OAAO,KAAK,OAAO,KAAO;GACjE,IAAI,QAAQ;AACZ,UAAO,IAAI,QAAQ,KAAK,UAAU,KAAK,IAAI,WAAW,OAAQ;GAC9D,IAAI,IAAI,IAAI;AACZ,UAAO,IAAI,KAAK,WAAW,KAAK,OAAO,OAAO,KAAK,OAAO,KAAO;GACjE,MAAM,SAAS,UAAU,WAAW,KAAK,SAAS,UAAU,MAAM,KAAK;AACvE,SAAM,OAAO,MAAM,QAAQ,EAAE;AAC7B,SAAM,KAAK,WAAW,CAAC;AAEvB,SAAM,KAAK,SAAS,CAAC,CAAC;;GAExB;CAGF,MAAM,YAAY,OAAY,WAAoB;EAChD,MAAM,QAAQ,MAAM;AACpB,MAAI,MAAM,IAAI,WAAW,IACvB,QAAO;EACT,MAAM,WAAW,MAAM,IAAI,QAAQ;EACnC,MAAM,WAAW,MAAM,IAAI,QAAQ;AACnC,MAAI,KAAK,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS,EAAE;AAC9C,OAAI,CAAC,QAAQ;IACX,MAAM,QAAQ,MAAM,KAAK,QAAQ,IAAI,EAAE;AACvC,UAAM,UAAU;;AAElB,SAAM,OAAO;AACb,UAAO;;AAET,SAAO;;AAGT,IAAG,OAAO,MAAM,OAAO,OAAO,QAAQ,SAAS;AAG/C,IAAG,SAAS,MAAM,SAAS,QAAa,QAAgB;EACtD,MAAM,QAAQ,OAAO;EACrB,MAAM,OAAO,MAAM,OAAO,MAAM,KAAK,MAAM,GAAG;EAC9C,MAAM,MAAM,MAAM;EAClB,MAAM,cAAc,KAAK,SAAS,mBAAmB,IAAI,CAAC,CAAC;EAC3D,MAAM,WAAW,QAAQ;AAGzB,SAAO,sCAAsC,YAAY,eAAe,SAAS,QAFhE,UAAU,MAAM,GAAG,IAAI,GAAG,WAEuD;;kCAEpE,SAAS,aAAa,CAAC;iDACR,YAAY,IAAI,EACvD,cACD,CAAC;;;;;CAOR,MAAM,mBAAmB,OAAY,WAAoB;AACvD,MAAI,MAAM,IAAI,MAAM,SAAS,IAC3B,QAAO;EACT,MAAM,QAAQ,aAAa,KAAK,MAAM,IAAI,MAAM,MAAM,IAAI,CAAC;AAC3D,MAAI,CAAC,MACH,QAAO;AACT,MAAI,CAAC,QAAQ;GACX,MAAM,KAAK,MAAM;GACjB,MAAM,QAAQ,MAAM,KAAK,aAAa,QAAQ,EAAE;AAChD,SAAM,UAAU;AAChB,SAAM,SAAS,MAAM;;AAEvB,QAAM,OAAO,MAAM,GAAG;AACtB,SAAO;;AAGT,IAAG,OAAO,MAAM,OAAO,UAAU,aAAa,gBAAgB;AAC9D,IAAG,SAAS,MAAM,aAAa,QAAa,QAAgB;EAC1D,MAAM,KAAK,OAAO,KAAK;AACvB,SAAO,mDAAmD,GAAG,+DAA+D,GAAG;;AAGjI,QAAO;;AAGT,SAAgB,oBAAoB;AAOlC,QANW,IAAI,WAAW;EACxB,MAAM;EACN,SAAS;EACT,aAAa;EACb,QAAQ;EACT,CAAC;;AAIJ,SAAgB,eAAe,IAAgB,SAAiB;AAE9D,QADa,GAAG,OAAO,QAAQ"}
1
+ {"version":3,"file":"index.js","names":["defaultMathOptions: MathOptions | undefined","contentLines: string[]","i","CONTROL_MAP: Record<string, string>","re: RegExp","delimiters: [string, string][]","t","findMatchingClose","content","token: any","i","children: ParsedNode[]","innerTokens: MarkdownToken[]","orig: string[]","updated: string[]","children: ParsedNode[]","innerTokens: MarkdownToken[]","children: ParsedNode[]","innerTokens: MarkdownToken[]","linkTokens: MarkdownToken[]","children: ParsedNode[]","innerTokens: MarkdownToken[]","children: ParsedNode[]","innerTokens: MarkdownToken[]","children: ParsedNode[]","innerTokens: MarkdownToken[]","children: ParsedNode[]","innerTokens: MarkdownToken[]","result: ParsedNode[]","currentTextNode: TextNode | null","index","loading","pre: any","blockquoteChildren: ParsedNode[]","items: DefinitionItemNode[]","termNodes: ParsedNode[]","definitionNodes: ParsedNode[]","footnoteChildren: ParsedNode[]","headerRow: TableRowNode | null","rows: TableRowNode[]","cells: TableCellNode[]","rowNode: TableRowNode","listItems: ListItemNode[]","itemChildren: ParsedNode[]","nestedItems: ListItemNode[]","admonitionChildren: ParsedNode[]","children: ParsedNode[]","result: ParsedNode[]","defaultTranslations: Record<string, string>","t: (key: string) => string","markdownItEmoji","openLine: number","endLine: number","markup: string"],"sources":["../src/config.ts","../src/plugins/containers.ts","../src/findMatchingClose.ts","../src/plugins/isMathLike.ts","../src/plugins/math.ts","../src/renderers/index.ts","../src/factory.ts","../src/parser/fixTableTokens.ts","../src/parser/inline-parsers/checkbox-parser.ts","../src/parser/inline-parsers/emoji-parser.ts","../src/parser/inline-parsers/emphasis-parser.ts","../src/parser/inline-parsers/fence-parser.ts","../src/parser/inline-parsers/fixLinkToken.ts","../src/parser/inline-parsers/fixListItem.ts","../src/parser/inline-parsers/fixStrongTokens.ts","../src/parser/inline-parsers/footnote-ref-parser.ts","../src/parser/inline-parsers/hardbreak-parser.ts","../src/parser/inline-parsers/highlight-parser.ts","../src/parser/inline-parsers/image-parser.ts","../src/parser/inline-parsers/inline-code-parser.ts","../src/parser/inline-parsers/insert-parser.ts","../src/parser/inline-parsers/link-parser.ts","../src/parser/inline-parsers/math-inline-parser.ts","../src/parser/inline-parsers/reference-parser.ts","../src/parser/inline-parsers/strikethrough-parser.ts","../src/parser/inline-parsers/strong-parser.ts","../src/parser/inline-parsers/subscript-parser.ts","../src/parser/inline-parsers/superscript-parser.ts","../src/parser/inline-parsers/text-parser.ts","../src/parser/inline-parsers/index.ts","../src/parser/node-parsers/blockquote-parser.ts","../src/parser/node-parsers/code-block-parser.ts","../src/parser/node-parsers/definition-list-parser.ts","../src/parser/node-parsers/footnote-parser.ts","../src/parser/node-parsers/heading-parser.ts","../src/parser/node-parsers/math-block-parser.ts","../src/parser/node-parsers/table-parser.ts","../src/parser/node-parsers/thematic-break-parser.ts","../src/parser/node-parsers/list-parser.ts","../src/parser/node-parsers/admonition-parser.ts","../src/parser/node-parsers/container-parser.ts","../src/parser/node-parsers/hardbreak-parser.ts","../src/parser/node-parsers/paragraph-parser.ts","../src/parser/index.ts","../src/index.ts"],"sourcesContent":["/**\n * MathOptions control how the math plugin normalizes content before\n * handing it to KaTeX (or other math renderers).\n *\n * - commands: list of command words that should be auto-prefixed with a\n * backslash if not already escaped (e.g. 'infty' -> '\\\\infty'). Use a\n * conservative list to avoid false positives in prose.\n * - escapeExclamation: whether to escape standalone '!' to '\\\\!' (default true).\n */\nexport interface MathOptions {\n /** List of command words to auto-escape. */\n commands?: readonly string[]\n /** Whether to escape standalone '!' (default: true). */\n escapeExclamation?: boolean\n}\n\nlet defaultMathOptions: MathOptions | undefined\n\nexport function setDefaultMathOptions(opts: MathOptions | undefined) {\n defaultMathOptions = opts\n}\n\nexport function getDefaultMathOptions(): MathOptions | undefined {\n return defaultMathOptions\n}\n","import type MarkdownIt from 'markdown-it'\nimport markdownItContainer from 'markdown-it-container'\n\nexport function applyContainers(md: MarkdownIt) {\n ;[\n 'admonition',\n 'info',\n 'warning',\n 'error',\n 'tip',\n 'danger',\n 'note',\n 'caution',\n ].forEach((name) => {\n md.use(markdownItContainer, name, {\n render(tokens: any, idx: number) {\n const token = tokens[idx]\n if (token.nesting === 1) {\n return `<div class=\"vmr-container vmr-container-${name}\">`\n }\n else {\n return '</div>\\n'\n }\n },\n })\n })\n\n // fallback for simple ::: blocks (kept for backwards compat)\n md.block.ruler.before(\n 'fence',\n 'vmr_container_fallback',\n (state: any, startLine: number, endLine: number, silent: boolean) => {\n const startPos = state.bMarks[startLine] + state.tShift[startLine]\n const lineMax = state.eMarks[startLine]\n const markerMatch = state.src\n .slice(startPos, lineMax)\n .match(/^:::\\s*(\\w+)/)\n if (!markerMatch)\n return false\n if (silent)\n return true\n\n const name = markerMatch[1]\n let nextLine = startLine + 1\n let found = false\n while (nextLine <= endLine) {\n const sPos = state.bMarks[nextLine] + state.tShift[nextLine]\n const ePos = state.eMarks[nextLine]\n if (state.src.slice(sPos, ePos).trim() === ':::') {\n found = true\n break\n }\n nextLine++\n }\n if (!found)\n return false\n\n const tokenOpen = state.push('vmr_container_open', 'div', 1)\n tokenOpen.attrSet('class', `vmr-container vmr-container-${name}`)\n\n const contentLines: string[] = []\n for (let i = startLine + 1; i < nextLine; i++) {\n const sPos = state.bMarks[i] + state.tShift[i]\n const ePos = state.eMarks[i]\n contentLines.push(state.src.slice(sPos, ePos))\n }\n\n // Open a paragraph, push inline content and then close paragraph\n state.push('paragraph_open', 'p', 1)\n const inlineToken = state.push('inline', '', 0)\n inlineToken.content = contentLines.join('\\n')\n inlineToken.map = [startLine + 1, nextLine]\n // Ensure children exist and parse the inline content into them so the renderer\n // won't encounter a null children array (which causes .length read errors).\n inlineToken.children = []\n state.md.inline.parse(inlineToken.content, state.md, state.env, inlineToken.children)\n state.push('paragraph_close', 'p', -1)\n\n state.push('vmr_container_close', 'div', -1)\n\n state.line = nextLine + 1\n return true\n },\n )\n}\n","export function findMatchingClose(src: string, startIdx: number, open: string, close: string) {\n const len = src.length\n // Special-case $$ since it's a two-char delimiter that shouldn't\n // be interpreted as nested parentheses.\n if (open === '$$' && close === '$$') {\n let i = startIdx\n while (i < len - 1) {\n if (src[i] === '$' && src[i + 1] === '$') {\n // ensure not escaped\n let k = i - 1\n let backslashes = 0\n while (k >= 0 && src[k] === '\\\\') {\n backslashes++\n k--\n }\n if (backslashes % 2 === 0)\n return i\n }\n i++\n }\n return -1\n }\n\n const openChar = open[open.length - 1]\n const closeSeq = close\n let depth = 0\n let i = startIdx\n while (i < len) {\n // If there's an unescaped close sequence here\n if (src.slice(i, i + closeSeq.length) === closeSeq) {\n let k = i - 1\n let backslashes = 0\n while (k >= 0 && src[k] === '\\\\') {\n backslashes++\n k--\n }\n if (backslashes % 2 === 0) {\n if (depth === 0)\n return i\n depth--\n i += closeSeq.length\n continue\n }\n }\n\n const ch = src[i]\n // skip escaped characters\n if (ch === '\\\\') {\n i += 2\n continue\n }\n\n if (ch === openChar) {\n depth++\n }\n else if (ch === closeSeq[closeSeq.length - 1]) {\n if (depth > 0)\n depth--\n }\n i++\n }\n return -1\n}\n\nexport default findMatchingClose\n","export const TEX_BRACE_COMMANDS = [\n 'mathbf',\n 'boldsymbol',\n 'mathbb',\n 'mathcal',\n 'mathfrak',\n 'mathrm',\n 'mathit',\n 'mathsf',\n 'vec',\n 'hat',\n 'bar',\n 'tilde',\n 'overline',\n 'underline',\n 'mathscr',\n 'mathnormal',\n 'operatorname',\n 'mathbf*',\n]\n\nexport const ESCAPED_TEX_BRACE_COMMANDS = TEX_BRACE_COMMANDS.map(c => c.replace(/[.*+?^${}()|[\\\\]\"\\]/g, '\\\\$&')).join('|')\n\nconst TEX_CMD_RE = /\\\\[a-z]+/i\nconst PREFIX_CLASS = '(?:\\\\\\\\|\\\\u0008)'\nconst TEX_CMD_WITH_BRACES_RE = new RegExp(`${PREFIX_CLASS}(?:${ESCAPED_TEX_BRACE_COMMANDS})\\\\s*\\\\{[^}]+\\\\}`, 'i')\n// Detect brace-taking TeX commands even when the leading backslash or the\n// closing brace/content is missing (e.g. \"operatorname{\" or \"operatorname{span\").\n// This helps the heuristic treat incomplete but clearly TeX-like fragments\n// as math-like instead of plain text.\nconst TEX_BRACE_CMD_START_RE = new RegExp(`(?:${PREFIX_CLASS})?(?:${ESCAPED_TEX_BRACE_COMMANDS})\\s*\\{`, 'i')\nconst TEX_SPECIFIC_RE = /\\\\(?:text|frac|left|right|times)/\n// Match common math operator symbols or named commands.\n// Avoid treating the C/C++ increment operator (\"++\") as a math operator by\n// ensuring a lone '+' isn't matched when it's part of a '++' sequence.\n// Use a RegExp constructed from a string to avoid issues escaping '/' in a\n// regex literal on some platforms/linters.\n// eslint-disable-next-line prefer-regex-literals\nconst OPS_RE = new RegExp('(?<!\\\\+)\\\\+(?!\\\\+)|[=\\\\-*/^<>]|\\\\\\\\times|\\\\\\\\pm|\\\\\\\\cdot|\\\\\\\\le|\\\\\\\\ge|\\\\\\\\neq')\nconst FUNC_CALL_RE = /[A-Z]+\\s*\\([^)]+\\)/i\nconst WORDS_RE = /\\b(?:sin|cos|tan|log|ln|exp|sqrt|frac|sum|lim|int|prod)\\b/\n// Heuristic to detect common date/time patterns like 2025/9/30 21:37:24 and\n// avoid classifying them as math merely because they contain '/' or ':'\nconst DATE_TIME_RE = /\\b\\d{4}\\/\\d{1,2}\\/\\d{1,2}(?:[ T]\\d{1,2}:\\d{2}(?::\\d{2})?)?\\b/\nexport function isMathLike(s: string) {\n if (!s)\n return false\n\n // Normalize accidental control characters that may appear if a single\n // backslash sequence was interpreted in a JS string literal (for example\n // '\\\\b' becoming a backspace U+0008). Convert such control characters\n // back into their two-character escaped forms so our regexes can match\n // TeX commands reliably.\n // eslint-disable-next-line no-control-regex\n const norm = s.replace(/\\u0008/g, '\\\\b')\n const stripped = norm.trim()\n\n // quick bailouts\n // If the content looks like a timestamp or date, it's not math.\n if (DATE_TIME_RE.test(stripped))\n return false\n if (stripped.length > 2000)\n return true // very long blocks likely math\n\n if (/[./]\\s*\\D|\\D\\s*[./]/.test(s)) {\n return false\n }\n\n // TeX commands e.g. \\frac, \\alpha\n const texCmd = TEX_CMD_RE.test(norm)\n const texCmdWithBraces = TEX_CMD_WITH_BRACES_RE.test(norm)\n const texBraceStart = TEX_BRACE_CMD_START_RE.test(norm)\n\n // Explicit common TeX tokens (keeps compatibility with previous heuristic)\n const texSpecific = TEX_SPECIFIC_RE.test(norm)\n const subscriptPattern = /(?:^|[^\\w\\\\])(?:[A-Z]|\\\\[A-Z]+)_(?:\\{[^}]+\\}|[A-Z0-9\\\\])/i\n const superscriptPattern = /(?:^|[^\\w\\\\])(?:[A-Z]|\\\\[A-Z]+)\\^(?:\\{[^}]+\\}|[A-Z0-9\\\\])/i\n const superSub = subscriptPattern.test(norm) || superscriptPattern.test(norm)\n // common math operator symbols or named commands\n const ops = OPS_RE.test(norm)\n // function-like patterns: f(x), sin(x)\n const funcCall = FUNC_CALL_RE.test(norm)\n // common math words\n const words = WORDS_RE.test(norm)\n // 纯单个英文字命,也渲染成数学公式\n // e.g. (w) (x) (y) (z)\n // const pureWord = /^\\([a-zA-Z]\\)$/i.test(stripped)\n\n return texCmd || texCmdWithBraces || texBraceStart || texSpecific || superSub || ops || funcCall || words\n}\n","import type MarkdownIt from 'markdown-it'\nimport type { MathOptions } from '../config'\n\nimport findMatchingClose from '../findMatchingClose'\nimport { ESCAPED_TEX_BRACE_COMMANDS, isMathLike } from './isMathLike'\n\n// Heuristic to decide whether a piece of text is likely math.\n// Matches common TeX commands, math operators, function-call patterns like f(x),\n// superscripts/subscripts, and common math words.\n// Common TeX formatting commands that take a brace argument, e.g. \\boldsymbol{...}\n// Keep this list in a single constant so it's easy to extend/test.\n\n// Precompute an escaped, |-joined string of TEX brace commands so we don't\n// rebuild it on every call to `isMathLike`.\n\n// Common KaTeX/TeX command names that might lose their leading backslash.\n// Keep this list conservative to avoid false-positives in normal text.\nexport const KATEX_COMMANDS = [\n 'ldots',\n 'cdots',\n 'quad',\n 'in',\n 'infty',\n 'perp',\n 'mid',\n 'operatorname',\n 'to',\n 'rightarrow',\n 'leftarrow',\n 'math',\n 'mathrm',\n 'mathbf',\n 'mathit',\n 'mathbb',\n 'mathcal',\n 'mathfrak',\n 'alpha',\n 'beta',\n 'gamma',\n 'delta',\n 'epsilon',\n 'lambda',\n 'sum',\n 'prod',\n 'int',\n 'sqrt',\n 'fbox',\n 'boxed',\n 'color',\n 'rule',\n 'edef',\n 'fcolorbox',\n 'hline',\n 'hdashline',\n 'cdot',\n 'times',\n 'pm',\n 'le',\n 'ge',\n 'neq',\n 'sin',\n 'cos',\n 'tan',\n 'log',\n 'ln',\n 'exp',\n 'lim',\n 'frac',\n 'text',\n 'left',\n 'right',\n 'times',\n]\n\n// Precompute escaped KATEX commands and default regex used by\n// `normalizeStandaloneBackslashT` when no custom commands are provided.\n// Sort commands by length (desc) before joining so longer commands like\n// 'operatorname' are preferred over shorter substrings like 'to'. This\n// avoids accidental partial matches when building the regex.\nexport const ESCAPED_KATEX_COMMANDS = KATEX_COMMANDS\n .slice()\n .sort((a, b) => b.length - a.length)\n .map(c => c.replace(/[.*+?^${}()|[\\\\]\\\\\\]/g, '\\\\$&'))\n .join('|')\nconst CONTROL_CHARS_CLASS = '[\\t\\r\\b\\f\\v]'\n\n// Hoisted map of control characters -> escaped letter (e.g. '\\t' -> 't').\n// Kept at module scope to avoid recreating on every normalization call.\nconst CONTROL_MAP: Record<string, string> = {\n '\\t': 't',\n '\\r': 'r',\n '\\b': 'b',\n '\\f': 'f',\n '\\v': 'v',\n}\n\nfunction countUnescapedStrong(s: string) {\n const re = /(^|[^\\\\])(__|\\*\\*)/g\n let m: RegExpExecArray | null\n let c = 0\n // eslint-disable-next-line unused-imports/no-unused-vars\n while ((m = re.exec(s)) !== null) {\n c++\n }\n return c\n}\n\nexport function normalizeStandaloneBackslashT(s: string, opts?: MathOptions) {\n const commands = opts?.commands ?? KATEX_COMMANDS\n const escapeExclamation = opts?.escapeExclamation ?? true\n\n const useDefault = opts?.commands == null\n\n // Build or reuse regex: match control chars or unescaped command words.\n let re: RegExp\n if (useDefault) {\n re = new RegExp(`${CONTROL_CHARS_CLASS}|(?<!\\\\\\\\|\\\\w)(${ESCAPED_KATEX_COMMANDS})\\\\b`, 'g')\n }\n else {\n const commandPattern = `(?:${commands.slice().sort((a, b) => b.length - a.length).map(c => c.replace(/[.*+?^${}()|[\\\\]\\\\\"\\]/g, '\\\\$&')).join('|')})`\n re = new RegExp(`${CONTROL_CHARS_CLASS}|(?<!\\\\\\\\|\\\\w)(${commandPattern})\\\\b`, 'g')\n }\n\n let out = s.replace(re, (m: string, cmd?: string) => {\n if (CONTROL_MAP[m] !== undefined)\n return `\\\\${CONTROL_MAP[m]}`\n if (cmd && commands.includes(cmd))\n return `\\\\${cmd}`\n return m\n })\n\n // Escape standalone '!' but don't double-escape already escaped ones.\n if (escapeExclamation)\n out = out.replace(/(^|[^\\\\])!/g, '$1\\\\!')\n\n // Final pass: some TeX command names take a brace argument and may have\n // lost their leading backslash, e.g. \"operatorname{span}\". Ensure we\n // restore a backslash before known brace-taking commands when they are\n // followed by '{' and are not already escaped.\n // Use default escaped list when possible. Include TEX_BRACE_COMMANDS so\n // known brace-taking TeX commands (e.g. `text`, `boldsymbol`) are also\n // restored when their leading backslash was lost.\n const braceEscaped = useDefault\n ? [ESCAPED_TEX_BRACE_COMMANDS, ESCAPED_KATEX_COMMANDS].filter(Boolean).join('|')\n : [commands.map(c => c.replace(/[.*+?^${}()|[\\\\]\\\\\\]/g, '\\\\$&')).join('|'), ESCAPED_TEX_BRACE_COMMANDS].filter(Boolean).join('|')\n let result = out\n if (braceEscaped) {\n const braceCmdRe = new RegExp(`(^|[^\\\\\\\\])(${braceEscaped})\\\\s*\\\\{`, 'g')\n result = result.replace(braceCmdRe, (_m: string, p1: string, p2: string) => `${p1}\\\\${p2}{`)\n }\n result = result.replace(/span\\{([^}]+)\\}/, 'span\\\\{$1\\\\}')\n .replace(/\\\\operatorname\\{span\\}\\{((?:[^{}]|\\{[^}]*\\})+)\\}/, '\\\\operatorname{span}\\\\{$1\\\\}')\n return result\n}\nexport function applyMath(md: MarkdownIt, mathOpts?: MathOptions) {\n // Inline rule for \\(...\\) and $$...$$ and $...$\n const mathInline = (state: any, silent: boolean) => {\n if (/^\\*[^*]+/.test(state.src)) {\n return false\n }\n const delimiters: [string, string][] = [\n ['$$', '$$'],\n ['\\\\(', '\\\\)'],\n ['\\(', '\\)'],\n ]\n\n let searchPos = 0\n let preMathPos = 0\n // use findMatchingClose from util\n for (const [open, close] of delimiters) {\n // We'll scan the entire inline source and tokenize all occurrences\n const src = state.src\n let foundAny = false\n const pushText = (text: string) => {\n // sanitize unexpected values\n if (text === 'undefined' || text == null) {\n text = ''\n }\n if (text === '\\\\') {\n state.pos = state.pos + text.length\n searchPos = state.pos\n return\n }\n if (text === '\\\\)' || text === '\\\\(') {\n const t = state.push('text_special', '', 0)\n t.content = text === '\\\\)' ? ')' : '('\n t.markup = text\n state.pos = state.pos + text.length\n searchPos = state.pos\n return\n }\n\n if (!text)\n return\n // const strongMatch = text.match(/^(\\*+)([^*]+)(\\**)/)\n // if (strongMatch) {\n // const strongToken = state.push('strong_open', '', 0)\n // strongToken.markup = strongMatch[1]\n // const strongTextToken = state.push('text', '', 0)\n // // guard against unexpected undefined values\n // strongTextToken.content = strongMatch[2] == null ? '' : String(strongMatch[2])\n // const strongCloseToken = state.push('strong_close', '', 0)\n // strongCloseToken.markup = strongMatch[1]\n // if (!strongMatch[3])\n // return\n // text = text.slice(strongMatch[0].length)\n // if (text) {\n // const t = state.push('text', '', 0)\n // t.content = text\n // }\n // state.pos = state.src.length\n // searchPos = state.pos\n // return\n // }\n\n const t = state.push('text', '', 0)\n t.content = text\n state.pos = state.pos + text.length\n searchPos = state.pos\n }\n\n while (true) {\n if (searchPos >= src.length)\n break\n const index = src.indexOf(open, searchPos)\n if (index === -1)\n break\n\n // If the delimiter is immediately preceded by a ']' (possibly with\n // intervening spaces), it's likely part of a markdown link like\n // `[text](...)`, so we should not treat this '(' as the start of\n // an inline math span. Also guard the index to avoid OOB access.\n if (index > 0) {\n let i = index - 1\n // skip spaces between ']' and the delimiter\n while (i >= 0 && src[i] === ' ')\n i--\n if (i >= 0 && src[i] === ']')\n return false\n }\n // 有可能遇到 \\((\\operatorname{span}\\\\{\\boldsymbol{\\alpha}\\\\})^\\perp\\)\n // 这种情况,前面的 \\( 是数学公式的开始,后面的 ( 是普通括号\n // endIndex 需要找到与 open 对应的 close\n // 不能简单地用 indexOf 找到第一个 close — 需要处理嵌套与转义字符\n const endIdx = findMatchingClose(src, index + open.length, open, close)\n\n if (endIdx === -1) {\n // no matching close for this opener; skip forward\n const content = src.slice(index + open.length)\n if (isMathLike(content)) {\n searchPos = index + open.length\n foundAny = true\n if (!silent) {\n state.pending = ''\n const toPushBefore = preMathPos ? src.slice(preMathPos, searchPos) : src.slice(0, searchPos)\n const isStrongPrefix = countUnescapedStrong(toPushBefore) % 2 === 1\n\n if (preMathPos)\n pushText(src.slice(preMathPos, searchPos))\n else\n pushText(src.slice(0, searchPos))\n if (isStrongPrefix) {\n const strongToken = state.push('strong_open', '', 0)\n strongToken.markup = src.slice(0, index + 2)\n const token = state.push('math_inline', 'math', 0)\n token.content = normalizeStandaloneBackslashT(content, mathOpts)\n token.markup = open === '$$' ? '$$' : open === '\\\\(' ? '\\\\(\\\\)' : open === '$' ? '$' : '()'\n token.raw = `${open}${content}${close}`\n token.loading = true\n strongToken.content = content\n state.push('strong_close', '', 0)\n }\n else {\n const token = state.push('math_inline', 'math', 0)\n token.content = normalizeStandaloneBackslashT(content, mathOpts)\n token.markup = open === '$$' ? '$$' : open === '\\\\(' ? '\\\\(\\\\)' : open === '$' ? '$' : '()'\n token.raw = `${open}${content}${close}`\n token.loading = true\n }\n // consume the full inline source\n state.pos = src.length\n }\n searchPos = src.length\n preMathPos = searchPos\n }\n break\n }\n const content = src.slice(index + open.length, endIdx)\n if (!isMathLike(content)) {\n // push remaining text after last match\n // not math-like; skip this match and continue scanning\n searchPos = endIdx + close.length\n const text = src.slice(state.pos, searchPos)\n if (!state.pending)\n pushText(text)\n continue\n }\n foundAny = true\n\n if (!silent) {\n // push text before this math\n const before = src.slice(0, index)\n // If we already consumed some content, avoid duplicating the prefix\n // Only push the portion from previous search position\n const prevConsumed = src.slice(0, searchPos)\n // Determine whether there's an unclosed strong opener (**) or (__)\n // before this math delimiter. We only want to treat a prefix as a\n // strong-open when the number of unescaped strong markers in the\n // preceding segment is odd (i.e. there's an unmatched opener). This\n // avoids treating a fully paired `**bold**` as an open prefix.\n\n let toPushBefore = prevConsumed ? src.slice(preMathPos, index) : before\n const isStrongPrefix = countUnescapedStrong(toPushBefore) % 2 === 1\n if (index !== state.pos && isStrongPrefix) {\n toPushBefore = state.pending + src.slice(state.pos, index)\n }\n\n // strong prefix handling (preserve previous behavior)\n if (state.pending !== toPushBefore) {\n state.pending = ''\n if (isStrongPrefix) {\n const _match = toPushBefore.match(/(\\*+)/)\n const after = toPushBefore.slice(_match!.index! + _match![0].length)\n pushText(toPushBefore.slice(0, _match!.index!))\n const strongToken = state.push('strong_open', '', 0)\n strongToken.markup = _match![0]\n const textToken = state.push('text', '', 0)\n textToken.content = after\n state.push('strong_close', '', 0)\n }\n else {\n pushText(toPushBefore)\n }\n }\n if (isStrongPrefix) {\n const strongToken = state.push('strong_open', '', 0)\n strongToken.markup = '**'\n const token = state.push('math_inline', 'math', 0)\n token.content = normalizeStandaloneBackslashT(content, mathOpts)\n token.markup = open === '$$' ? '$$' : open === '\\\\(' ? '\\\\(\\\\)' : open === '$' ? '$' : '()'\n token.raw = `${open}${content}${close}`\n token.loading = false\n const raw = src.slice(endIdx + close.length)\n const isBeforeClose = raw.startsWith('*')\n if (isBeforeClose) {\n state.push('strong_close', '', 0)\n }\n if (raw) {\n const textContentToken = state.push('text', '', 0)\n textContentToken.content = (raw == null ? '' : String(raw)).replace(/^\\*+/, '')\n }\n if (!isBeforeClose)\n state.push('strong_close', '', 0)\n state.pos = src.length\n searchPos = src.length\n preMathPos = searchPos\n continue\n }\n else {\n const token = state.push('math_inline', 'math', 0)\n token.content = normalizeStandaloneBackslashT(content, mathOpts)\n token.markup = open === '$$' ? '$$' : open === '\\\\(' ? '\\\\(\\\\)' : open === '$' ? '$' : '()'\n token.raw = `${open}${content}${close}`\n token.loading = false\n }\n }\n\n searchPos = endIdx + close.length\n preMathPos = searchPos\n state.pos = searchPos\n }\n\n if (foundAny) {\n if (!silent) {\n // push remaining text after last match\n if (searchPos < src.length)\n pushText(src.slice(searchPos))\n // consume the full inline source\n state.pos = src.length\n }\n else {\n // in silent mode, advance position past what we scanned\n state.pos = searchPos\n }\n\n return true\n }\n }\n\n return false\n }\n\n // Block math rule similar to previous implementation\n const mathBlock = (\n state: any,\n startLine: number,\n endLine: number,\n silent: boolean,\n ) => {\n const delimiters: [string, string][] = [\n ['\\\\[', '\\\\]'],\n ['\\[', '\\]'],\n ['$$', '$$'],\n ]\n\n const startPos = state.bMarks[startLine] + state.tShift[startLine]\n const lineText = state.src.slice(startPos, state.eMarks[startLine]).trim()\n let matched = false\n let openDelim = ''\n let closeDelim = ''\n for (const [open, close] of delimiters) {\n if (lineText === open || lineText.startsWith(open)) {\n if (open.includes('[')) {\n if (lineText.replace('\\\\', '') === '[') {\n if (startLine + 1 < endLine) {\n // const nextLineStart\n // = state.bMarks[startLine + 1] + state.tShift[startLine + 1]\n // const nextLineText = state.src.slice(\n // nextLineStart,\n // state.eMarks[startLine + 1],\n // )\n matched = true\n openDelim = open\n closeDelim = close\n break\n }\n continue\n }\n }\n else {\n matched = true\n openDelim = open\n closeDelim = close\n break\n }\n }\n }\n\n if (!matched)\n return false\n if (silent)\n return true\n\n if (\n lineText.includes(closeDelim)\n && lineText.indexOf(closeDelim) > openDelim.length\n ) {\n const startDelimIndex = lineText.indexOf(openDelim)\n const endDelimIndex = lineText.indexOf(\n closeDelim,\n startDelimIndex + openDelim.length,\n )\n const content = lineText.slice(\n startDelimIndex + openDelim.length,\n endDelimIndex,\n )\n\n const token: any = state.push('math_block', 'math', 0)\n token.content = normalizeStandaloneBackslashT(content)\n token.markup\n = openDelim === '$$' ? '$$' : openDelim === '[' ? '[]' : '\\\\[\\\\]'\n token.map = [startLine, startLine + 1]\n token.raw = `${openDelim}${content}${closeDelim}`\n token.block = true\n token.loading = false\n state.line = startLine + 1\n return true\n }\n\n let nextLine = startLine\n let content = ''\n let found = false\n\n const firstLineContent\n = lineText === openDelim ? '' : lineText.slice(openDelim.length)\n\n if (firstLineContent.includes(closeDelim)) {\n const endIndex = firstLineContent.indexOf(closeDelim)\n content = firstLineContent.slice(0, endIndex)\n found = true\n nextLine = startLine\n }\n else {\n if (firstLineContent)\n content = firstLineContent\n\n for (nextLine = startLine + 1; nextLine < endLine; nextLine++) {\n const lineStart = state.bMarks[nextLine] + state.tShift[nextLine]\n const lineEnd = state.eMarks[nextLine]\n const currentLine = state.src.slice(lineStart - 1, lineEnd)\n if (currentLine.trim() === closeDelim) {\n found = true\n break\n }\n else if (currentLine.includes(closeDelim)) {\n found = true\n const endIndex = currentLine.indexOf(closeDelim)\n content += (content ? '\\n' : '') + currentLine.slice(0, endIndex)\n break\n }\n content += (content ? '\\n' : '') + currentLine\n }\n }\n\n const token: any = state.push('math_block', 'math', 0)\n token.content = normalizeStandaloneBackslashT(content)\n token.markup\n = openDelim === '$$' ? '$$' : openDelim === '[' ? '[]' : '\\\\[\\\\]'\n token.raw = `${openDelim}${content}${content.startsWith('\\n') ? '\\n' : ''}${closeDelim}`\n token.map = [startLine, nextLine + 1]\n token.block = true\n token.loading = !found\n state.line = nextLine + 1\n return true\n }\n\n // Register math before the escape rule so inline math is tokenized\n // before markdown-it processes backslash escapes. This preserves\n // backslashes inside math content (e.g. \"\\\\{\") instead of having\n // the escape rule remove them from the token content.\n md.inline.ruler.before('escape', 'math', mathInline)\n md.block.ruler.before('paragraph', 'math_block', mathBlock, {\n alt: ['paragraph', 'reference', 'blockquote', 'list'],\n })\n}\n","import type MarkdownIt from 'markdown-it'\n\nexport function applyRenderRules(md: MarkdownIt) {\n const defaultImage\n = md.renderer.rules.image\n || function (tokens: any, idx: number, options: any, env: any, self: any) {\n return self.renderToken(tokens, idx, options)\n }\n\n md.renderer.rules.image = (\n tokens: any,\n idx: number,\n options: any,\n env: any,\n self: any,\n ) => {\n const token = tokens[idx]\n token.attrSet?.('loading', 'lazy')\n return defaultImage(tokens, idx, options, env, self)\n }\n\n md.renderer.rules.fence\n = md.renderer.rules.fence\n || ((tokens: any, idx: number) => {\n const token = tokens[idx]\n const info = token.info ? token.info.trim() : ''\n const langClass = info\n ? `language-${md.utils.escapeHtml(info.split(/\\s+/g)[0])}`\n : ''\n const code = md.utils.escapeHtml(token.content)\n return `<pre class=\"${langClass}\"><code>${code}</code></pre>`\n })\n}\n","import type { MathOptions } from './config'\nimport MarkdownIt from 'markdown-it'\nimport { getDefaultMathOptions } from './config'\nimport { applyContainers } from './plugins/containers'\nimport { applyMath } from './plugins/math'\nimport { applyRenderRules } from './renderers'\n\nexport interface FactoryOptions extends Record<string, any> {\n markdownItOptions?: Record<string, any>\n enableMath?: boolean\n enableContainers?: boolean\n mathOptions?: { commands?: string[], escapeExclamation?: boolean }\n}\n\nexport function factory(opts: FactoryOptions = {}): MarkdownIt {\n const md = new MarkdownIt({\n html: true,\n linkify: true,\n typographer: true,\n ...(opts.markdownItOptions ?? {}),\n })\n\n if (opts.enableMath ?? true) {\n const mergedMathOptions: MathOptions = { ...(getDefaultMathOptions() ?? {}), ...(opts.mathOptions ?? {}) }\n applyMath(md, mergedMathOptions)\n }\n if (opts.enableContainers ?? true)\n applyContainers(md)\n applyRenderRules(md)\n\n return md\n}\n","import type { MarkdownToken } from '../types'\n\nfunction createStart() {\n return [\n {\n type: 'table_open',\n tag: 'table',\n attrs: null,\n map: null,\n children: null,\n content: '',\n markup: '',\n info: '',\n level: 0,\n loading: true,\n meta: null,\n },\n {\n type: 'thead_open',\n tag: 'thead',\n attrs: null,\n block: true,\n level: 1,\n children: null,\n },\n {\n type: 'tr_open',\n tag: 'tr',\n attrs: null,\n block: true,\n level: 2,\n children: null,\n },\n\n ]\n}\nfunction createEnd() {\n return [\n {\n type: 'tr_close',\n tag: 'tr',\n attrs: null,\n block: true,\n level: 2,\n children: null,\n },\n {\n type: 'thead_close',\n tag: 'thead',\n attrs: null,\n block: true,\n level: 1,\n children: null,\n },\n {\n type: 'table_close',\n tag: 'table',\n attrs: null,\n map: null,\n children: null,\n content: '',\n markup: '',\n info: '',\n level: 0,\n meta: null,\n },\n ]\n}\nfunction createTh(text: string) {\n return [{\n type: 'th_open',\n tag: 'th',\n attrs: null,\n block: true,\n level: 3,\n children: null,\n }, {\n type: 'inline',\n tag: '',\n children: [\n {\n tag: '',\n type: 'text',\n block: false,\n content: text,\n children: null,\n },\n ],\n content: text,\n level: 4,\n attrs: null,\n block: true,\n }, {\n type: 'th_close',\n tag: 'th',\n attrs: null,\n block: true,\n level: 3,\n children: null,\n }]\n}\nexport function fixTableTokens(tokens: MarkdownToken[]): MarkdownToken[] {\n const fixedTokens = [...tokens]\n if (tokens.length < 3)\n return fixedTokens\n const i = tokens.length - 2\n const token = tokens[i]\n\n if (token.type === 'inline') {\n if (/^\\|(?:[^|\\n]+\\|?)+/.test(token.content!)) {\n // 解析 table\n const body = token.children![0].content!.slice(1).split('|').map(i => i.trim()).filter(Boolean).flatMap(i => createTh(i))\n const insert = [\n ...createStart(),\n ...body,\n ...createEnd(),\n ] as any\n fixedTokens.splice(i - 1, 3, ...insert)\n }\n else if (/^\\|(?:[^|\\n]+\\|)+\\n\\|:?-/.test(token.content!)) {\n // 解析 table\n const body = token.children![0].content!.slice(1, -1).split('|').map(i => i.trim()).flatMap(i => createTh(i))\n const insert = [\n ...createStart(),\n ...body,\n ...createEnd(),\n ] as any\n fixedTokens.splice(i - 1, 3, ...insert)\n }\n else if (/^\\|(?:[^|\\n:]+\\|)+\\n\\|:?$/.test(token.content!)) {\n token.content = token.content!.slice(0, -2)\n token.children!.splice(2, 1)\n }\n }\n\n return fixedTokens\n}\n","import type { CheckboxInputNode, CheckboxNode, MarkdownToken } from '../../types'\n\nexport function parseCheckboxToken(token: MarkdownToken): CheckboxNode {\n return {\n type: 'checkbox',\n checked: token.meta?.checked === true,\n raw: token.meta?.checked ? '[x]' : '[ ]',\n }\n}\n\nexport function parseCheckboxInputToken(token: any): CheckboxInputNode {\n return {\n type: 'checkbox_input',\n checked: token.attrGet('checked') === '' || token.attrGet('checked') === 'true',\n raw: token.attrGet('checked') === '' || token.attrGet('checked') === 'true' ? '[x]' : '[ ]',\n }\n}\n","import type { EmojiNode, MarkdownToken } from '../../types'\n\nexport function parseEmojiToken(token: MarkdownToken): EmojiNode {\n return {\n type: 'emoji',\n name: token.content || '',\n markup: token.markup || '',\n raw: `:${token.content || ''}:`,\n }\n}\n","import type { EmphasisNode, MarkdownToken, ParsedNode } from '../../types'\nimport { parseInlineTokens } from '../index'\n\nexport function parseEmphasisToken(\n tokens: MarkdownToken[],\n startIndex: number,\n): {\n node: EmphasisNode\n nextIndex: number\n} {\n const children: ParsedNode[] = []\n let emText = ''\n let i = startIndex + 1\n const innerTokens: MarkdownToken[] = []\n\n // Process tokens between em_open and em_close\n while (i < tokens.length && tokens[i].type !== 'em_close') {\n emText += tokens[i].content || ''\n innerTokens.push(tokens[i])\n i++\n }\n\n // Parse inner tokens to handle nested elements\n children.push(...parseInlineTokens(innerTokens))\n\n const node: EmphasisNode = {\n type: 'emphasis',\n children,\n raw: `*${emText}*`,\n }\n\n // Skip to after em_close\n const nextIndex = i < tokens.length ? i + 1 : tokens.length\n\n return { node, nextIndex }\n}\n","import type { CodeBlockNode, MarkdownToken } from '../../types'\n\nfunction splitUnifiedDiff(content: string) {\n const orig: string[] = []\n const updated: string[] = []\n for (const rawLine of content.split(/\\r?\\n/)) {\n const line = rawLine\n // skip diff metadata lines\n if (/^(?:diff |index |--- |\\+\\+\\+ |@@ )/.test(line))\n continue\n\n if (line.startsWith('- ')) {\n orig.push(` ${line.slice(1)}`)\n }\n else if (line.startsWith('+ ')) {\n updated.push(` ${line.slice(1)}`)\n }\n else {\n // fallback: treat as context (no prefix)\n orig.push(line)\n updated.push(line)\n }\n }\n return {\n original: orig.join('\\n'),\n updated: updated.join('\\n'),\n }\n}\n\nexport function parseFenceToken(token: MarkdownToken): CodeBlockNode {\n const hasMap = Array.isArray(token.map) && token.map.length === 2\n const meta = (token as any).meta\n const closed = typeof meta?.closed === 'boolean' ? meta.closed : undefined\n const diff = token.info?.startsWith('diff') || false\n const language = diff ? token.info?.split(' ')[1] || '' : token.info || ''\n\n // Defensive sanitization: sometimes a closing fence line (e.g. ``` or ``)\n // can accidentally end up inside `token.content` (for example when\n // the parser/mapping is confused). Remove a trailing line that only\n // contains backticks and optional whitespace so we don't render stray\n // ` or `` characters at the end of the code output. This is a\n // conservative cleanup and only strips a final line that looks like a\n // fence marker (starts with optional spaces then one or more ` and\n // only whitespace until end-of-string).\n let content = token.content || ''\n const trailingFenceLine = /\\r?\\n[ \\t]*`+\\s*$/\n if (trailingFenceLine.test(content))\n content = content.replace(trailingFenceLine, '')\n\n if (diff) {\n const { original, updated } = splitUnifiedDiff(content)\n // 返回时保留原来的 code 字段为 updated(编辑后代码),并额外附加原始与更新的文本\n return {\n type: 'code_block',\n language,\n code: updated || '',\n raw: content,\n diff,\n loading: closed === true ? false : closed === false ? true : !hasMap,\n originalCode: original,\n updatedCode: updated,\n }\n }\n\n return {\n type: 'code_block',\n language,\n code: content || '',\n raw: content || '',\n diff,\n loading: closed === true ? false : closed === false ? true : !hasMap,\n }\n}\n","import type { MarkdownToken } from '../../types'\n\nexport function fixLinkToken(tokens: MarkdownToken[]): MarkdownToken[] {\n if (tokens.length < 5)\n return tokens\n const first = tokens[tokens.length - 5]\n if (first.type !== 'text' && !first.content!.endsWith('['))\n return fixLinkTokens2(tokens)\n const second = tokens[tokens.length - 4]\n if (second.tag !== 'em')\n return fixLinkTokens2(tokens)\n const last = tokens[tokens.length - 1]\n if (last!.type === 'text' && !last.content!.startsWith(']'))\n return fixLinkTokens2(tokens)\n\n const third = tokens[tokens.length - 3]\n const href = last.content!.replace(/^\\]\\(*/, '')\n const loading = !last.content!.includes(')')\n first.content = first.content!.replace(/\\[$/, '')\n tokens.splice(tokens.length - 3, 1, {\n type: 'link',\n href,\n text: third.content,\n children: [\n {\n type: 'text',\n content: third.content,\n raw: third.content,\n },\n ],\n loading,\n } as any)\n tokens.splice(tokens.length - 1, 1)\n return tokens\n}\n\nexport function fixLinkTokens2(tokens: MarkdownToken[]): MarkdownToken[] {\n if (tokens.length < 8)\n return tokens\n let length = tokens.length\n let last = tokens[length - 1]\n if (last.type !== 'link_close') {\n length--\n last = tokens[length - 1]\n if (last.type !== 'link_close')\n return tokens\n }\n const second = tokens[length - 7]\n if (second.type !== 'em_open')\n return tokens\n const third = tokens[length - 6]\n const first = tokens[length - 8]\n if (first.type !== 'text') {\n return tokens\n }\n\n let href = tokens[length - 2].content\n let count = 4\n if (length !== tokens.length) {\n // 合并 last 到 href\n href += last.content || ''\n count++\n }\n tokens.splice(length - 4, count)\n const content = third.content\n length -= 4\n first.content = first.content!.replace(/\\[$/, '')\n tokens.splice(length - 2, 1, {\n type: 'link',\n href,\n text: content,\n children: [\n {\n type: 'text',\n content,\n raw: content,\n },\n ],\n loading: true,\n } as any)\n return tokens\n}\n","import type { MarkdownToken } from '../../types'\n\nexport function fixListItem(tokens: MarkdownToken[]): MarkdownToken[] {\n const last = tokens[tokens.length - 1] as any\n\n if (last?.type === 'text' && (/\\d+\\.\\s*$/.test(last.content || '') && tokens[tokens.length - 2]?.tag === 'br')) {\n tokens.splice(tokens.length - 1, 1)\n }\n return tokens\n}\n","import type { MarkdownToken } from '../../types'\n\nexport function fixStrongTokens(tokens: MarkdownToken[]): MarkdownToken[] {\n const fixedTokens = [...tokens]\n if (tokens.length < 4)\n return fixedTokens\n const i = tokens.length - 4\n const token = tokens[i]\n const nextToken = tokens[i + 1]\n if (token.type === 'text' && token.content?.endsWith('*') && nextToken.type === 'em_open') {\n // 解析有问题,要合并 emphasis 和 前面的 * 为 strong\n const _nextToken = tokens[i + 2]\n const count = _nextToken?.type === 'text' ? 4 : 3\n const insert = [\n {\n type: 'strong_open',\n tag: 'strong',\n attrs: null,\n map: null,\n children: null,\n content: '',\n markup: '**',\n info: '',\n meta: null,\n },\n {\n type: 'text',\n content: _nextToken?.type === 'text' ? _nextToken.content : '',\n },\n {\n type: 'strong_close',\n tag: 'strong',\n attrs: null,\n map: null,\n children: null,\n content: '',\n markup: '**',\n info: '',\n meta: null,\n },\n ] as any\n const beforeText = token.content?.slice(0, -1)\n if (beforeText) {\n insert.unshift({\n type: 'text',\n content: beforeText,\n raw: beforeText,\n })\n }\n fixedTokens.splice(i, count, ...insert)\n return fixedTokens\n }\n\n return fixedTokens\n}\n","import type { FootnoteReferenceNode, MarkdownToken } from '../../types'\n\nexport function parseFootnoteRefToken(\n token: MarkdownToken,\n): FootnoteReferenceNode {\n return {\n type: 'footnote_reference',\n id: token.meta?.label || '',\n raw: `[^${token.meta?.label || ''}]`,\n }\n}\n","import type { HardBreakNode } from '../../types'\n\nexport function parseHardbreakToken(): HardBreakNode {\n return {\n type: 'hardbreak',\n raw: '\\\\\\n',\n }\n}\n","import type { HighlightNode, MarkdownToken, ParsedNode } from '../../types'\nimport { parseInlineTokens } from '../index'\n\nexport function parseHighlightToken(\n tokens: MarkdownToken[],\n startIndex: number,\n): {\n node: HighlightNode\n nextIndex: number\n} {\n const children: ParsedNode[] = []\n let markText = ''\n let i = startIndex + 1\n const innerTokens: MarkdownToken[] = []\n\n // Process tokens between mark_open and mark_close\n while (i < tokens.length && tokens[i].type !== 'mark_close') {\n markText += tokens[i].content || ''\n innerTokens.push(tokens[i])\n i++\n }\n\n // Parse inner tokens to handle nested elements\n children.push(...parseInlineTokens(innerTokens))\n\n const node: HighlightNode = {\n type: 'highlight',\n children,\n raw: `==${markText}==`,\n }\n\n // Skip to after mark_close\n const nextIndex = i < tokens.length ? i + 1 : tokens.length\n\n return { node, nextIndex }\n}\n","import type { ImageNode, MarkdownToken } from '../../types'\n\nexport function parseImageToken(token: MarkdownToken, loading = false): ImageNode {\n return {\n type: 'image',\n src: token.attrs?.find(attr => attr[0] === 'src')?.[1] || '',\n alt: token.attrs?.find(attr => attr[0] === 'alt')?.[1] || '',\n title: token.attrs?.find(attr => attr[0] === 'title')?.[1] || null,\n raw: token.content || '',\n loading,\n }\n}\n","import type { InlineCodeNode, MarkdownToken } from '../../types'\n\nexport function parseInlineCodeToken(token: MarkdownToken): InlineCodeNode {\n return {\n type: 'inline_code',\n code: token.content || '',\n raw: token.content || '',\n }\n}\n","import type { InsertNode, MarkdownToken, ParsedNode } from '../../types'\nimport { parseInlineTokens } from '../index'\n\nexport function parseInsertToken(\n tokens: MarkdownToken[],\n startIndex: number,\n): {\n node: InsertNode\n nextIndex: number\n} {\n const children: ParsedNode[] = []\n let insText = ''\n let i = startIndex + 1\n const innerTokens: MarkdownToken[] = []\n\n // Process tokens between ins_open and ins_close\n while (i < tokens.length && tokens[i].type !== 'ins_close') {\n insText += tokens[i].content || ''\n innerTokens.push(tokens[i])\n i++\n }\n\n // Parse inner tokens to handle nested elements\n children.push(...parseInlineTokens(innerTokens))\n\n const node: InsertNode = {\n type: 'insert',\n children,\n raw: `++${insText}++`,\n }\n\n // Skip to after ins_close\n const nextIndex = i < tokens.length ? i + 1 : tokens.length\n\n return { node, nextIndex }\n}\n","import type { LinkNode, MarkdownToken } from '../../types'\nimport { parseInlineTokens } from '../index'\n\nexport function parseLinkToken(\n tokens: MarkdownToken[],\n startIndex: number,\n): {\n node: LinkNode\n nextIndex: number\n} {\n const openToken = tokens[startIndex]\n const href = openToken.attrs?.find(attr => attr[0] === 'href')?.[1] || ''\n const title\n = openToken.attrs?.find(attr => attr[0] === 'title')?.[1] || null\n\n let i = startIndex + 1\n const linkTokens: MarkdownToken[] = []\n const loading = true\n\n // Collect all tokens between link_open and link_close\n while (i < tokens.length && tokens[i].type !== 'link_close') {\n linkTokens.push(tokens[i])\n i++\n }\n\n // Parse the collected tokens as inline content\n const children = parseInlineTokens(linkTokens)\n const linkText = children\n .map((node) => {\n if ('content' in node)\n return node.content\n return node.raw\n })\n .join('')\n\n const node: LinkNode = {\n type: 'link',\n href,\n title,\n text: linkText,\n children,\n raw: `[${linkText}](${href}${title ? ` \"${title}\"` : ''})`,\n loading,\n }\n\n // Skip to after link_close\n const nextIndex = i < tokens.length ? i + 1 : tokens.length\n\n return { node, nextIndex }\n}\n","import type { MarkdownToken, MathInlineNode } from '../../types'\n\n// Parse a math_inline token (inline math expressions)\nexport function parseMathInlineToken(token: MarkdownToken): MathInlineNode {\n return {\n type: 'math_inline',\n content: token.content || '',\n loading: !!token.loading,\n raw: token.raw!,\n }\n}\n","import type { MarkdownToken, ReferenceNode } from '../../types'\n\n// Parse a reference token from markdown-it\nexport function parseReferenceToken(token: MarkdownToken): ReferenceNode {\n return {\n type: 'reference',\n id: token.content || '',\n raw: token.markup || `[${token.content}]`,\n }\n}\n","import type {\n MarkdownToken,\n ParsedNode,\n StrikethroughNode,\n} from '../../types'\nimport { parseInlineTokens } from '../index'\n\nexport function parseStrikethroughToken(\n tokens: MarkdownToken[],\n startIndex: number,\n): {\n node: StrikethroughNode\n nextIndex: number\n} {\n const children: ParsedNode[] = []\n let sText = ''\n let i = startIndex + 1\n const innerTokens: MarkdownToken[] = []\n\n // Process tokens between s_open and s_close\n while (i < tokens.length && tokens[i].type !== 's_close') {\n sText += tokens[i].content || ''\n innerTokens.push(tokens[i])\n i++\n }\n\n // Parse inner tokens to handle nested elements\n children.push(...parseInlineTokens(innerTokens))\n\n const node: StrikethroughNode = {\n type: 'strikethrough',\n children,\n raw: `~~${sText}~~`,\n }\n\n // Skip to after s_close\n const nextIndex = i < tokens.length ? i + 1 : tokens.length\n\n return { node, nextIndex }\n}\n","import type { MarkdownToken, ParsedNode, StrongNode } from '../../types'\nimport { parseInlineTokens } from '../index'\n\nexport function parseStrongToken(\n tokens: MarkdownToken[],\n startIndex: number,\n raw?: string,\n): {\n node: StrongNode\n nextIndex: number\n} {\n const children: ParsedNode[] = []\n let strongText = ''\n let i = startIndex + 1\n const innerTokens: MarkdownToken[] = []\n\n // Process tokens between strong_open and strong_close\n while (i < tokens.length && tokens[i].type !== 'strong_close') {\n strongText += tokens[i].content || ''\n innerTokens.push(tokens[i])\n i++\n }\n\n // Parse inner tokens to handle nested elements\n children.push(...parseInlineTokens(innerTokens, raw))\n\n const node: StrongNode = {\n type: 'strong',\n children,\n raw: `**${strongText}**`,\n }\n\n // Skip to after strong_close\n const nextIndex = i < tokens.length ? i + 1 : tokens.length\n\n return { node, nextIndex }\n}\n","import type { MarkdownToken, ParsedNode, SubscriptNode } from '../../types'\nimport { parseInlineTokens } from '../index'\n\nexport function parseSubscriptToken(\n tokens: MarkdownToken[],\n startIndex: number,\n): {\n node: SubscriptNode\n nextIndex: number\n} {\n const children: ParsedNode[] = []\n let subText = ''\n let i = startIndex + 1\n const innerTokens: MarkdownToken[] = []\n\n // Process tokens between sub_open and sub_close (if applicable)\n while (i < tokens.length && tokens[i].type !== 'sub_close') {\n subText += tokens[i].content || ''\n innerTokens.push(tokens[i])\n i++\n }\n\n // Parse inner tokens to handle nested elements\n children.push(...parseInlineTokens(innerTokens))\n\n const node: SubscriptNode = {\n type: 'subscript',\n children:\n children.length > 0\n ? children\n : [\n {\n type: 'text',\n // Fallback to the collected inner text (e.g., \"2\" in H~2~O)\n content: subText || tokens[startIndex].content || '',\n raw: subText || tokens[startIndex].content || '',\n },\n ],\n raw: `~${subText || tokens[startIndex].content || ''}~`,\n }\n\n // Skip to after sub_close\n const nextIndex = i < tokens.length ? i + 1 : tokens.length\n\n return { node, nextIndex }\n}\n","import type { MarkdownToken, ParsedNode, SuperscriptNode } from '../../types'\nimport { parseInlineTokens } from '../index'\n\nexport function parseSuperscriptToken(\n tokens: MarkdownToken[],\n startIndex: number,\n): {\n node: SuperscriptNode\n nextIndex: number\n} {\n const children: ParsedNode[] = []\n let supText = ''\n let i = startIndex + 1\n const innerTokens: MarkdownToken[] = []\n\n // Process tokens between sup_open and sup_close (if applicable)\n while (i < tokens.length && tokens[i].type !== 'sup_close') {\n supText += tokens[i].content || ''\n innerTokens.push(tokens[i])\n i++\n }\n\n // Parse inner tokens to handle nested elements\n children.push(...parseInlineTokens(innerTokens))\n\n const node: SuperscriptNode = {\n type: 'superscript',\n children:\n children.length > 0\n ? children\n : [\n {\n type: 'text',\n // Fallback to the collected inner text (e.g., \"2\" in x^2^)\n content: supText || tokens[startIndex].content || '',\n raw: supText || tokens[startIndex].content || '',\n },\n ],\n raw: `^${supText || tokens[startIndex].content || ''}^`,\n }\n\n // Skip to after sup_close\n const nextIndex = i < tokens.length ? i + 1 : tokens.length\n\n return { node, nextIndex }\n}\n","import type { MarkdownToken, TextNode } from '../../types'\n\nexport function parseTextToken(token: MarkdownToken): TextNode {\n return {\n type: 'text',\n content: token.content || '',\n raw: token.content || '',\n }\n}\n","import type { MarkdownToken, ParsedNode, TextNode } from '../../types'\nimport { parseCheckboxInputToken, parseCheckboxToken } from './checkbox-parser'\nimport { parseEmojiToken } from './emoji-parser'\nimport { parseEmphasisToken } from './emphasis-parser'\nimport { parseFenceToken } from './fence-parser'\nimport { fixLinkToken } from './fixLinkToken'\nimport { fixListItem } from './fixListItem'\nimport { fixStrongTokens } from './fixStrongTokens'\nimport { parseFootnoteRefToken } from './footnote-ref-parser'\nimport { parseHardbreakToken } from './hardbreak-parser'\nimport { parseHighlightToken } from './highlight-parser'\nimport { parseImageToken } from './image-parser'\nimport { parseInlineCodeToken } from './inline-code-parser'\nimport { parseInsertToken } from './insert-parser'\nimport { parseLinkToken } from './link-parser'\nimport { parseMathInlineToken } from './math-inline-parser'\nimport { parseReferenceToken } from './reference-parser'\nimport { parseStrikethroughToken } from './strikethrough-parser'\nimport { parseStrongToken } from './strong-parser'\nimport { parseSubscriptToken } from './subscript-parser'\nimport { parseSuperscriptToken } from './superscript-parser'\nimport { parseTextToken } from './text-parser'\n\n// Process inline tokens (for text inside paragraphs, headings, etc.)\nexport function parseInlineTokens(tokens: MarkdownToken[], raw?: string, pPreToken?: MarkdownToken): ParsedNode[] {\n if (!tokens || tokens.length === 0)\n return []\n\n const result: ParsedNode[] = []\n let currentTextNode: TextNode | null = null\n\n let i = 0\n tokens = fixStrongTokens(tokens)\n tokens = fixListItem(tokens)\n tokens = fixLinkToken(tokens)\n\n while (i < tokens.length) {\n const token = tokens[i] as any\n handleToken(token)\n }\n\n function handleToken(token: any) {\n switch (token.type) {\n case 'text': {\n // 合并连续的 text 节点\n let index = result.length - 1\n let content = token.content.replace(/\\\\/g, '') || ''\n for (index; index >= 0; index--) {\n const item = result[index]\n if (item.type === 'text') {\n currentTextNode = null\n content = item.content + content\n continue\n }\n break\n }\n if (index < result.length - 1)\n result.splice(index + 1)\n\n const nextToken = tokens[i + 1]\n if (content === '`' || content === '|' || content === '$' || content === '1' || /^\\*+$/.test(content) || /^\\d$/.test(content)) {\n i++\n break\n }\n if (!nextToken && /[^\\]]\\s*\\(\\s*$/.test(content)) {\n content = content.replace(/\\(\\s*$/, '')\n }\n if (raw?.startsWith('[') && pPreToken?.type === 'list_item_open') {\n const _content = content.slice(1)\n const w = _content.match(/[^\\s\\]]/)\n if (w === null) {\n i++\n break\n }\n // 如果 里面不是 w, 应该不处理\n if ((w && /x/i.test(w[0])) || !w) {\n // 转换成 checkbox_input\n const checked = w ? (w[0] === 'x' || w[0] === 'X') : false\n result.push({\n type: 'checkbox_input',\n checked,\n raw: checked ? '[x]' : '[ ]',\n })\n i++\n break\n }\n }\n if (/`[^`]*/.test(content)) {\n currentTextNode = null // Reset current text node\n const index = content.indexOf('`')\n const _text = content.slice(0, index)\n const codeContent = content.slice(index)\n if (_text) {\n result.push({\n type: 'text',\n content: _text || '',\n raw: _text || '',\n })\n }\n\n // 包含了 `, 需要特殊处理 code\n\n result.push({\n type: 'inline_code',\n code: codeContent.replace(/`/g, ''),\n raw: codeContent || '',\n })\n i++\n break\n }\n if (content === '[') {\n i++\n break\n }\n if (/[^~]*~{2,}[^~]+/.test(content)) {\n // 处理成 parseStrikethroughToken\n const index = content.indexOf('~~') || 0\n const _text = content.slice(0, index)\n if (_text) {\n if (currentTextNode) {\n // Merge with the previous text node\n currentTextNode.content += _text\n currentTextNode.raw += _text\n }\n else {\n // Start a new text node\n currentTextNode = {\n type: 'text',\n content: _text || '',\n raw: token.content || '',\n }\n result.push(currentTextNode)\n }\n }\n const strikethroughContent = content.slice(index)\n // 处理成 strikethrough parseStrikethroughToken\n currentTextNode = null // Reset current text node\n // 如果 * 是一个用 parseStrikethroughToken, 否则应该用 parseStrongToken\n // 将 text 包装成 strikethrough token 进行处理\n const { node } = parseStrikethroughToken([\n {\n type: 's_open',\n tag: 's',\n content: '',\n markup: '*',\n info: '',\n meta: null,\n },\n {\n type: 'text',\n tag: '',\n content: strikethroughContent.replace(/~/g, ''),\n markup: '',\n info: '',\n meta: null,\n },\n {\n type: 's_close',\n tag: 's',\n content: '',\n markup: '*',\n info: '',\n meta: null,\n },\n ], 0)\n result.push(node)\n i++\n break\n }\n if (/[^*]*\\*\\*[^*]+/.test(content)) {\n const index = content.indexOf('*') || 0\n const _text = content.slice(0, index)\n if (_text) {\n if (currentTextNode) {\n // Merge with the previous text node\n currentTextNode.content += _text\n currentTextNode.raw += _text\n }\n else {\n // Start a new text node\n currentTextNode = {\n type: 'text',\n content: _text || '',\n raw: token.content || '',\n }\n result.push(currentTextNode)\n }\n }\n const strongContent = content.slice(index)\n // 处理成 em parseEmphasisToken\n currentTextNode = null // Reset current text node\n // 如果 * 是一个用 parseEmphasisToken, 否则应该用 parseStrongToken\n // 将 text 包装成 emphasis token 进行处理\n const { node } = parseStrongToken([\n {\n type: 'strong_open',\n tag: 'strong',\n content: '',\n markup: '*',\n info: '',\n meta: null,\n },\n {\n type: 'text',\n tag: '',\n content: strongContent.replace(/\\*/g, ''),\n markup: '',\n info: '',\n meta: null,\n },\n {\n type: 'strong_close',\n tag: 'strong',\n content: '',\n markup: '*',\n info: '',\n meta: null,\n },\n ], 0, raw)\n result.push(node)\n i++\n break\n }\n else if (/[^*]*\\*[^*]+/.test(content)) {\n const index = content.indexOf('*') || 0\n const _text = content.slice(0, index)\n if (_text) {\n if (currentTextNode) {\n // Merge with the previous text node\n currentTextNode.content += _text\n currentTextNode.raw += _text\n }\n else {\n // Start a new text node\n currentTextNode = {\n type: 'text',\n content: _text || '',\n raw: token.content || '',\n }\n result.push(currentTextNode)\n }\n }\n const emphasisContent = content.slice(index)\n // 处理成 em parseEmphasisToken\n currentTextNode = null // Reset current text node\n // 如果 * 是一个用 parseEmphasisToken, 否则应该用 parseStrongToken\n // 将 text 包装成 emphasis token 进行处理\n const { node } = parseEmphasisToken([\n {\n type: 'em_open',\n tag: 'em',\n content: '',\n markup: '*',\n info: '',\n meta: null,\n },\n {\n type: 'text',\n tag: '',\n content: emphasisContent.replace(/\\*/g, ''),\n markup: '',\n info: '',\n meta: null,\n },\n {\n type: 'em_close',\n tag: 'em',\n content: '',\n markup: '*',\n info: '',\n meta: null,\n },\n ], 0)\n result.push(node)\n i++\n break\n }\n const imageStart = content.indexOf('![')\n if (imageStart !== -1) {\n const textNodeContent = content.slice(0, imageStart)\n if (!currentTextNode) {\n currentTextNode = {\n type: 'text',\n content: textNodeContent,\n raw: textNodeContent,\n }\n }\n else {\n currentTextNode.content += textNodeContent\n }\n result.push(currentTextNode)\n currentTextNode = null // Reset current text node\n result.push(parseImageToken(token, true))\n i++\n break\n }\n const linkStart = content.indexOf('[')\n\n if (content.endsWith('undefined') && !raw?.endsWith('undefined')) {\n content = content.slice(0, -9)\n }\n const textNode = parseTextToken({ ...token, content })\n\n if (linkStart !== -1) {\n const textNodeContent = content.slice(0, linkStart)\n const linkEnd = content.indexOf('](', linkStart)\n if (linkEnd !== -1) {\n const textToken = tokens[i + 2]\n const text = content.slice(linkStart + 1, linkEnd)\n if (!/[[\\]]/.test(text)) {\n if (content.endsWith('](') && nextToken?.type === 'link_open' && textToken) {\n // 特殊处理,把当前内容塞到后面link_open 后的 text,并且跳过当前的 text 处理\n const last = tokens[i + 4]\n let index = 4\n let loading = true\n if (last?.type === 'text' && last.content === ')') {\n index++\n loading = false\n }\n else if (last?.type === 'text' && last.content === '.') {\n i++\n }\n result.push({\n type: 'link',\n href: textToken.content || '',\n text,\n children: [\n {\n type: 'text',\n content: text,\n raw: text,\n },\n ],\n loading,\n } as any)\n i += index\n break\n }\n const linkContentEnd = content.indexOf(')', linkEnd)\n const href = linkContentEnd !== -1 ? content.slice(linkEnd + 2, linkContentEnd) : ''\n const loading = linkContentEnd === -1\n // 过滤一些奇怪的情况\n\n if (textNodeContent) {\n result.push({\n type: 'text',\n content: textNodeContent,\n raw: textNodeContent,\n })\n }\n result.push({\n type: 'link',\n href,\n text,\n children: [\n {\n type: 'text',\n content: text,\n raw: text,\n },\n ],\n loading,\n } as any)\n\n const afterText = linkContentEnd !== -1 ? content.slice(linkContentEnd + 1) : ''\n if (afterText) {\n handleToken({\n type: 'text',\n content: afterText,\n raw: afterText,\n })\n i--\n }\n i++\n break\n }\n }\n }\n const preToken = tokens[i - 1]\n if (currentTextNode) {\n // Merge with the previous text node\n currentTextNode.content += textNode.content.replace(/(\\*+|\\(|\\\\)$/, '')\n currentTextNode.raw += textNode.raw\n }\n else {\n const maybeMath = preToken?.tag === 'br' && tokens[i - 2]?.content === '['\n // Start a new text node\n const nextToken = tokens[i + 1]\n if (!nextToken)\n textNode.content = textNode.content.replace(/(\\*+|\\(|\\\\)$/, '')\n\n currentTextNode = textNode\n currentTextNode.center = maybeMath\n result.push(currentTextNode)\n }\n i++\n break\n }\n\n case 'softbreak':\n if (currentTextNode) {\n // Append newline to the current text node\n currentTextNode.content += '\\n'\n currentTextNode.raw += '\\n' // Assuming raw should also reflect the newline\n }\n // Don't create a node for softbreak itself, just modify text\n i++\n break\n\n case 'code_inline':\n currentTextNode = null // Reset current text node\n result.push(parseInlineCodeToken(token))\n i++\n break\n\n case 'link_open': {\n currentTextNode = null // Reset current text node\n const href = token.attrs?.find((attr: any) => attr[0] === 'href')?.[1]\n // 如果 text 不在[]里说明,它不是一个link, 当 text 处理\n if (raw && tokens[i + 1].type === 'text') {\n const text = tokens[i + 1]?.content || ''\n const reg = new RegExp(`\\\\[${text}\\\\s*\\\\]`)\n if (!reg.test(raw)) {\n result.push({\n type: 'text',\n content: text,\n raw: text,\n })\n i += 3\n break\n }\n }\n if (raw && href) {\n const loadingMath = new RegExp(`\\\\(\\\\s*${href}\\\\s*\\\\)`)\n const pre: any = result.length > 0 ? result[result.length - 1] : null\n const loading = !loadingMath.test(raw)\n if (loading && pre) {\n const isLinkMatch = new RegExp(`\\\\[${pre.text}\\\\s*\\\\]\\\\(`)\n if (isLinkMatch.test(raw)) {\n const text = pre?.text || (pre as any)?.content?.slice(1, -1) || ''\n result.splice(result.length - 1, 1, {\n type: 'link',\n href: '',\n text,\n loading,\n } as any) // remove the pre node\n i += 3\n if (tokens[i]?.content === '.')\n i++\n break\n }\n }\n }\n const { node, nextIndex } = parseLinkToken(tokens, i)\n i = nextIndex\n\n node.loading = false\n result.push(node)\n break\n }\n\n case 'image':\n currentTextNode = null // Reset current text node\n result.push(parseImageToken(token))\n i++\n break\n\n case 'strong_open': {\n currentTextNode = null // Reset current text node\n const { node, nextIndex } = parseStrongToken(tokens, i, token.content)\n result.push(node)\n i = nextIndex\n break\n }\n\n case 'em_open': {\n currentTextNode = null // Reset current text node\n const { node, nextIndex } = parseEmphasisToken(tokens, i)\n result.push(node)\n i = nextIndex\n break\n }\n\n case 's_open': {\n currentTextNode = null // Reset current text node\n const { node, nextIndex } = parseStrikethroughToken(tokens, i)\n result.push(node)\n i = nextIndex\n break\n }\n\n case 'mark_open': {\n currentTextNode = null // Reset current text node\n const { node, nextIndex } = parseHighlightToken(tokens, i)\n result.push(node)\n i = nextIndex\n break\n }\n\n case 'ins_open': {\n currentTextNode = null // Reset current text node\n const { node, nextIndex } = parseInsertToken(tokens, i)\n result.push(node)\n i = nextIndex\n break\n }\n\n case 'sub_open': {\n currentTextNode = null // Reset current text node\n const { node, nextIndex } = parseSubscriptToken(tokens, i)\n result.push(node)\n i = nextIndex\n break\n }\n\n case 'sup_open': {\n currentTextNode = null // Reset current text node\n const { node, nextIndex } = parseSuperscriptToken(tokens, i)\n result.push(node)\n i = nextIndex\n break\n }\n\n case 'sub':\n currentTextNode = null // Reset current text node\n result.push({\n type: 'subscript',\n children: [\n {\n type: 'text',\n content: token.content || '',\n raw: token.content || '',\n },\n ],\n raw: `~${token.content || ''}~`,\n })\n i++\n break\n\n case 'sup':\n currentTextNode = null // Reset current text node\n result.push({\n type: 'superscript',\n children: [\n {\n type: 'text',\n content: token.content || '',\n raw: token.content || '',\n },\n ],\n raw: `^${token.content || ''}^`,\n })\n i++\n break\n\n case 'emoji': {\n currentTextNode = null // Reset current text node\n\n const preToken = tokens[i - 1]\n if (preToken?.type === 'text' && /\\|:-+/.test(preToken.content || '')) {\n // 处理表格中的 emoji,跳过\n result.push({\n type: 'text',\n content: '',\n raw: '',\n })\n }\n else {\n result.push(parseEmojiToken(token))\n }\n i++\n break\n }\n case 'checkbox':\n currentTextNode = null // Reset current text node\n result.push(parseCheckboxToken(token))\n i++\n break\n case 'checkbox_input':\n currentTextNode = null // Reset current text node\n result.push(parseCheckboxInputToken(token))\n i++\n break\n case 'footnote_ref':\n currentTextNode = null // Reset current text node\n result.push(parseFootnoteRefToken(token))\n i++\n break\n\n case 'hardbreak':\n currentTextNode = null // Reset current text node\n result.push(parseHardbreakToken())\n i++\n break\n\n case 'fence': {\n currentTextNode = null // Reset current text node\n // Handle fenced code blocks with language specifications\n result.push(parseFenceToken(tokens[i]))\n i++\n break\n }\n\n case 'math_inline': {\n currentTextNode = null // Reset current text node\n result.push(parseMathInlineToken(token))\n i++\n break\n }\n\n case 'reference': {\n currentTextNode = null // Reset current text node\n const nextToken = tokens[i + 1]\n if (!nextToken?.content?.startsWith('(')) {\n result.push(parseReferenceToken(token))\n }\n i++\n break\n }\n\n default:\n // Skip unknown token types, ensure text merging stops\n result.push(token)\n currentTextNode = null // Reset current text node\n i++\n break\n }\n }\n\n return result\n}\n","import type { BlockquoteNode, MarkdownToken, ParsedNode } from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\nimport { parseList } from './list-parser'\n\nexport function parseBlockquote(\n tokens: MarkdownToken[],\n index: number,\n): [BlockquoteNode, number] {\n const blockquoteChildren: ParsedNode[] = []\n let j = index + 1\n\n // Process blockquote content until closing tag is found\n while (j < tokens.length && tokens[j].type !== 'blockquote_close') {\n if (tokens[j].type === 'paragraph_open') {\n const contentToken = tokens[j + 1]\n blockquoteChildren.push({\n type: 'paragraph',\n children: parseInlineTokens(contentToken.children || []),\n raw: contentToken.content || '',\n })\n j += 3 // Skip paragraph_open, inline, paragraph_close\n }\n else if (\n tokens[j].type === 'bullet_list_open'\n || tokens[j].type === 'ordered_list_open'\n ) {\n // Handle nested lists - use parseList directly for proper nested list support\n const [listNode, newIndex] = parseList(tokens, j)\n blockquoteChildren.push(listNode)\n j = newIndex\n }\n else {\n j++\n }\n }\n\n const blockquoteNode: BlockquoteNode = {\n type: 'blockquote',\n children: blockquoteChildren,\n raw: blockquoteChildren.map(child => child.raw).join('\\n'),\n }\n\n return [blockquoteNode, j + 1] // Skip blockquote_close\n}\n","import type { CodeBlockNode, MarkdownToken } from '../../types'\nimport { parseFenceToken } from '../inline-parsers/fence-parser'\n\nexport function parseCodeBlock(token: MarkdownToken): CodeBlockNode {\n // If this code block is actually a diff (some markdown-it backends\n // classify fences vs code_block differently), delegate to the\n // fence parser to preserve original/updated fields.\n if (token.info?.startsWith('diff')) {\n return parseFenceToken(token)\n }\n\n const match = token.content!.match(/ type=\"application\\/vnd\\.ant\\.([^\"]+)\"/)\n if (match?.[1]) {\n // 需要把 <antArtifact> 标签去掉\n token.content = token.content!\n .replace(/<antArtifact[^>]*>/g, '')\n .replace(/<\\/antArtifact>/g, '')\n }\n const hasMap = Array.isArray(token.map) && token.map.length === 2\n return {\n type: 'code_block',\n language: match ? match[1] : (token.info || ''),\n code: token.content || '',\n raw: token.content || '',\n loading: !hasMap,\n }\n}\n","import type {\n DefinitionItemNode,\n DefinitionListNode,\n MarkdownToken,\n ParsedNode,\n} from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\n\nexport function parseDefinitionList(\n tokens: MarkdownToken[],\n index: number,\n): [DefinitionListNode, number] {\n const items: DefinitionItemNode[] = []\n let j = index + 1\n let termNodes: ParsedNode[] = []\n let definitionNodes: ParsedNode[] = []\n\n while (j < tokens.length && tokens[j].type !== 'dl_close') {\n if (tokens[j].type === 'dt_open') {\n // Process term\n const termToken = tokens[j + 1]\n termNodes = parseInlineTokens(termToken.children || [])\n j += 3 // Skip dt_open, inline, dt_close\n }\n else if (tokens[j].type === 'dd_open') {\n // Process definition\n let k = j + 1\n definitionNodes = []\n\n while (k < tokens.length && tokens[k].type !== 'dd_close') {\n if (tokens[k].type === 'paragraph_open') {\n const contentToken = tokens[k + 1]\n definitionNodes.push({\n type: 'paragraph',\n children: parseInlineTokens(contentToken.children || [], contentToken.content || ''),\n raw: contentToken.content || '',\n })\n k += 3 // Skip paragraph_open, inline, paragraph_close\n }\n else {\n k++\n }\n }\n\n // Add definition item\n if (termNodes.length > 0) {\n items.push({\n type: 'definition_item',\n term: termNodes,\n definition: definitionNodes,\n raw: `${termNodes.map(term => term.raw).join('')}: ${definitionNodes\n .map(def => def.raw)\n .join('\\n')}`,\n })\n\n // Reset term nodes\n termNodes = []\n }\n\n j = k + 1 // Skip dd_close\n }\n else {\n j++\n }\n }\n\n const definitionListNode: DefinitionListNode = {\n type: 'definition_list',\n items,\n raw: items.map(item => item.raw).join('\\n'),\n }\n\n return [definitionListNode, j + 1] // Skip dl_close\n}\n","import type { FootnoteNode, MarkdownToken, ParsedNode } from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\n\nexport function parseFootnote(\n tokens: MarkdownToken[],\n index: number,\n): [FootnoteNode, number] {\n const token = tokens[index]\n const id = token.meta?.label ?? 0\n const footnoteChildren: ParsedNode[] = []\n let j = index + 1\n\n while (j < tokens.length && tokens[j].type !== 'footnote_close') {\n if (tokens[j].type === 'paragraph_open') {\n const contentToken = tokens[j + 1]\n footnoteChildren.push({\n type: 'paragraph',\n children: parseInlineTokens(contentToken.children || []),\n raw: contentToken.content || '',\n })\n j += 3 // Skip paragraph_open, inline, paragraph_close\n }\n else {\n j++\n }\n }\n\n const footnoteNode: FootnoteNode = {\n type: 'footnote',\n id,\n children: footnoteChildren,\n raw: `[^${id}]: ${footnoteChildren.map(child => child.raw).join('\\n')}`,\n }\n\n return [footnoteNode, j + 1] // Skip footnote_close\n}\n","import type { HeadingNode, MarkdownToken } from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\n\nexport function parseHeading(\n tokens: MarkdownToken[],\n index: number,\n): HeadingNode {\n const token = tokens[index]\n const headingLevel = Number.parseInt(token.tag?.substring(1) || '1')\n const headingContentToken = tokens[index + 1]\n const headingContent = headingContentToken.content || ''\n\n return {\n type: 'heading',\n level: headingLevel,\n text: headingContent,\n children: parseInlineTokens(headingContentToken.children || []),\n raw: headingContent,\n }\n}\n","import type { MarkdownToken, MathBlockNode } from '../../types'\n\n// Parse a math_block token (block/display math expressions)\nexport function parseMathBlock(token: MarkdownToken): MathBlockNode {\n return {\n type: 'math_block',\n content: token.content || '',\n loading: !!token.loading,\n raw: token.raw || '',\n }\n}\n","import type {\n MarkdownToken,\n TableCellNode,\n TableNode,\n TableRowNode,\n} from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\n\nexport function parseTable(\n tokens: MarkdownToken[],\n index: number,\n): [TableNode, number] {\n let j = index + 1\n let headerRow: TableRowNode | null = null\n const rows: TableRowNode[] = []\n let isHeader = false\n\n while (j < tokens.length && tokens[j].type !== 'table_close') {\n if (tokens[j].type === 'thead_open') {\n isHeader = true\n j++\n }\n else if (tokens[j].type === 'thead_close') {\n isHeader = false\n j++\n }\n else if (\n tokens[j].type === 'tbody_open'\n || tokens[j].type === 'tbody_close'\n ) {\n j++\n }\n else if (tokens[j].type === 'tr_open') {\n const cells: TableCellNode[] = []\n let k = j + 1\n\n while (k < tokens.length && tokens[k].type !== 'tr_close') {\n if (tokens[k].type === 'th_open' || tokens[k].type === 'td_open') {\n const isHeaderCell = tokens[k].type === 'th_open'\n const contentToken = tokens[k + 1]\n const content = contentToken.content || ''\n\n cells.push({\n type: 'table_cell',\n header: isHeaderCell || isHeader,\n children: parseInlineTokens(contentToken.children || [], content),\n raw: content,\n })\n\n k += 3 // Skip th_open/td_open, inline, th_close/td_close\n }\n else {\n k++\n }\n }\n\n const rowNode: TableRowNode = {\n type: 'table_row',\n cells,\n raw: cells.map(cell => cell.raw).join('|'),\n }\n\n if (isHeader) {\n headerRow = rowNode\n }\n else {\n rows.push(rowNode)\n }\n\n j = k + 1 // Skip tr_close\n }\n else {\n j++\n }\n }\n\n if (!headerRow) {\n // Default empty header if none found\n headerRow = {\n type: 'table_row',\n cells: [],\n raw: '',\n }\n }\n\n const tableNode: TableNode = {\n type: 'table',\n header: headerRow,\n rows,\n loading: tokens[index].loading || false,\n raw: [headerRow, ...rows].map(row => row.raw).join('\\n'),\n }\n\n return [tableNode, j + 1] // Skip table_close\n}\n","import type { ThematicBreakNode } from '../../types'\n\nexport function parseThematicBreak(): ThematicBreakNode {\n return {\n type: 'thematic_break',\n raw: '---',\n }\n}\n","import type {\n ListItemNode,\n ListNode,\n MarkdownToken,\n ParsedNode,\n} from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\nimport { parseFenceToken } from '../inline-parsers/fence-parser'\nimport { parseAdmonition } from './admonition-parser'\nimport { parseBlockquote } from './blockquote-parser'\nimport { parseCodeBlock } from './code-block-parser'\nimport { parseDefinitionList } from './definition-list-parser'\nimport { parseFootnote } from './footnote-parser'\nimport { parseHeading } from './heading-parser'\nimport { parseMathBlock } from './math-block-parser'\nimport { parseTable } from './table-parser'\nimport { parseThematicBreak } from './thematic-break-parser'\n\nexport function parseList(\n tokens: MarkdownToken[],\n index: number,\n): [ListNode, number] {\n const token = tokens[index]\n const listItems: ListItemNode[] = []\n let j = index + 1\n\n while (\n j < tokens.length\n && tokens[j].type !== 'bullet_list_close'\n && tokens[j].type !== 'ordered_list_close'\n ) {\n if (tokens[j].type === 'list_item_open') {\n if (tokens[j].markup === '*') {\n j++\n continue\n }\n const itemChildren: ParsedNode[] = []\n let k = j + 1\n while (k < tokens.length && tokens[k].type !== 'list_item_close') {\n // Handle different block types inside list items\n if (tokens[k].type === 'paragraph_open') {\n const contentToken = tokens[k + 1]\n const preToken = tokens[k - 1]\n if (/\\n\\d+$/.test(contentToken.content || '')) {\n contentToken.content = contentToken.content?.replace(/\\n\\d+$/, '')\n contentToken.children?.splice(-1, 1)\n }\n itemChildren.push({\n type: 'paragraph',\n children: parseInlineTokens(contentToken.children || [], contentToken.content || '', preToken),\n raw: contentToken.content || '',\n })\n k += 3 // Skip paragraph_open, inline, paragraph_close\n }\n else if (tokens[k].type === 'blockquote_open') {\n // Parse blockquote within list item\n const [blockquoteNode, newIndex] = parseBlockquote(tokens, k)\n itemChildren.push(blockquoteNode)\n k = newIndex\n }\n else if (\n tokens[k].type === 'bullet_list_open'\n || tokens[k].type === 'ordered_list_open'\n ) {\n if (tokens[k].markup === '*') {\n k++\n continue\n }\n // Parse nested list\n const [nestedListNode, newIndex] = parseNestedList(tokens, k)\n itemChildren.push(nestedListNode)\n k = newIndex\n }\n else if (tokens[k].type === 'code_block') {\n // Parse code block\n itemChildren.push(parseCodeBlock(tokens[k]))\n k += 1\n }\n else if (tokens[k].type === 'fence') {\n // Parse fenced code block\n itemChildren.push(parseFenceToken(tokens[k]))\n k += 1\n }\n else if (tokens[k].type === 'math_block') {\n // Parse math block\n itemChildren.push(parseMathBlock(tokens[k]))\n k += 1\n }\n else if (tokens[k].type === 'table_open') {\n // Parse table\n const [tableNode, newIndex] = parseTable(tokens, k)\n itemChildren.push(tableNode)\n k = newIndex\n }\n else if (tokens[k].type === 'dl_open') {\n // Parse definition list\n const [defListNode, newIndex] = parseDefinitionList(tokens, k)\n itemChildren.push(defListNode)\n k = newIndex\n }\n else if (tokens[k].type === 'footnote_open') {\n // Parse footnote\n const [footnoteNode, newIndex] = parseFootnote(tokens, k)\n itemChildren.push(footnoteNode)\n k = newIndex\n }\n else if (tokens[k].type === 'heading_open') {\n // Parse heading (though headings in lists are unusual)\n const headingNode = parseHeading(tokens, k)\n itemChildren.push(headingNode)\n k += 3 // Skip heading_open, inline, heading_close\n }\n else if (tokens[k].type === 'hr') {\n // Parse thematic break\n itemChildren.push(parseThematicBreak())\n k += 1\n }\n else if (tokens[k].type === 'container_open') {\n // Handle admonition containers (warning, info, note, tip, danger, caution)\n const match\n = /^::: ?(warning|info|note|tip|danger|caution) ?(.*)$/.exec(\n tokens[k].info || '',\n )\n if (match) {\n const [admonitionNode, newIndex] = parseAdmonition(tokens, k, match)\n itemChildren.push(admonitionNode)\n k = newIndex\n }\n else {\n k += 1 // Skip unknown container types\n }\n }\n else {\n k += 1\n }\n }\n\n listItems.push({\n type: 'list_item',\n children: itemChildren,\n raw: itemChildren.map(child => child.raw).join(''),\n })\n\n j = k + 1 // Move past list_item_close\n }\n else {\n j += 1\n }\n }\n\n const listNode: ListNode = {\n type: 'list',\n ordered: token.type === 'ordered_list_open',\n // markdown-it may include attrs like [['start','2']] on ordered_list_open\n start: (() => {\n if (token.attrs && token.attrs.length) {\n const found = token.attrs.find(a => a[0] === 'start')\n if (found)\n return Number(found[1]) || 1\n }\n return undefined\n })(),\n items: listItems,\n raw: listItems.map(item => item.raw).join('\\n'),\n }\n\n return [listNode, j + 1] // Move past list_close\n}\n\n// Enhanced function to handle nested lists properly\nfunction parseNestedList(\n tokens: MarkdownToken[],\n index: number,\n): [ListNode, number] {\n // We can directly use parseList since we're in the same file\n // This avoids circular dependency issues\n const nestedToken = tokens[index]\n const nestedItems: ListItemNode[] = []\n let j = index + 1\n\n while (\n j < tokens.length\n && tokens[j].type !== 'bullet_list_close'\n && tokens[j].type !== 'ordered_list_close'\n ) {\n if (tokens[j].type === 'list_item_open') {\n if (tokens[j].markup === '*') {\n j++\n continue\n }\n const itemChildren: ParsedNode[] = []\n let k = j + 1\n\n while (k < tokens.length && tokens[k].type !== 'list_item_close') {\n // Handle different block types inside list items\n if (tokens[k].type === 'paragraph_open') {\n const contentToken = tokens[k + 1]\n const preToken = tokens[k - 1]\n itemChildren.push({\n type: 'paragraph',\n children: parseInlineTokens(contentToken.children || [], contentToken.content || '', preToken),\n raw: contentToken.content || '',\n })\n k += 3 // Skip paragraph_open, inline, paragraph_close\n }\n else if (\n tokens[k].type === 'bullet_list_open'\n || tokens[k].type === 'ordered_list_open'\n ) {\n if (tokens[k].markup === '*') {\n k++\n continue\n }\n\n // Handle deeper nested lists\n const [deeperNestedListNode, newIndex] = parseNestedList(tokens, k)\n itemChildren.push(deeperNestedListNode)\n k = newIndex\n }\n else if (tokens[k].type === 'code_block') {\n itemChildren.push(parseCodeBlock(tokens[k]))\n k += 1\n }\n else if (tokens[k].type === 'fence') {\n itemChildren.push(parseFenceToken(tokens[k]))\n k += 1\n }\n else if (tokens[k].type === 'math_block') {\n // Parse math block in nested lists\n itemChildren.push(parseMathBlock(tokens[k]))\n k += 1\n }\n else {\n // Skip other token types in nested lists for simplicity\n k += 1\n }\n }\n\n nestedItems.push({\n type: 'list_item',\n children: itemChildren,\n raw: itemChildren.map(child => child.raw).join(''),\n })\n\n j = k + 1 // Move past list_item_close\n }\n else {\n j += 1\n }\n }\n\n const nestedListNode: ListNode = {\n type: 'list',\n ordered: nestedToken.type === 'ordered_list_open',\n start: (() => {\n if (nestedToken.attrs && nestedToken.attrs.length) {\n const found = nestedToken.attrs.find(a => a[0] === 'start')\n if (found)\n return Number(found[1]) || 1\n }\n return undefined\n })(),\n items: nestedItems,\n raw: nestedItems.map(item => item.raw).join('\\n'),\n }\n\n return [nestedListNode, j + 1] // Move past list_close\n}\n","import type { AdmonitionNode, MarkdownToken, ParsedNode } from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\nimport { parseList } from './list-parser'\n\nexport function parseAdmonition(\n tokens: MarkdownToken[],\n index: number,\n match: RegExpExecArray,\n): [AdmonitionNode, number] {\n const kind = match[1] || 'note'\n const title = match[2] || kind.charAt(0).toUpperCase() + kind.slice(1)\n const admonitionChildren: ParsedNode[] = []\n let j = index + 1\n\n while (j < tokens.length && tokens[j].type !== 'container_close') {\n if (tokens[j].type === 'paragraph_open') {\n const contentToken = tokens[j + 1]\n if (contentToken) {\n admonitionChildren.push({\n type: 'paragraph',\n children: parseInlineTokens(contentToken.children || []),\n raw: contentToken.content || '',\n })\n }\n j += 3 // Skip paragraph_open, inline, paragraph_close\n }\n else if (\n tokens[j].type === 'bullet_list_open'\n || tokens[j].type === 'ordered_list_open'\n ) {\n // Handle nested lists - use parseList directly for proper nested list support\n const [listNode, newIndex] = parseList(tokens, j)\n admonitionChildren.push(listNode)\n j = newIndex\n }\n else {\n j++\n }\n }\n\n const admonitionNode: AdmonitionNode = {\n type: 'admonition',\n kind,\n title,\n children: admonitionChildren,\n raw: `:::${kind} ${title}\\n${admonitionChildren\n .map(child => child.raw)\n .join('\\n')}\\n:::`,\n }\n\n return [admonitionNode, j + 1] // Skip container_close\n}\n","import type { AdmonitionNode, MarkdownToken, ParsedNode, TextNode } from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\nimport { parseList } from './list-parser'\n\nexport function parseContainer(\n tokens: MarkdownToken[],\n index: number,\n): [AdmonitionNode, number] {\n const openToken = tokens[index]\n\n // Determine kind and optional title\n let kind = 'note'\n let title = ''\n\n const typeMatch = openToken.type.match(/^container_(\\w+)_open$/)\n if (typeMatch) {\n kind = typeMatch[1]\n // some implementations set info to remaining title text\n const info = (openToken.info || '').trim()\n if (info && !info.startsWith(':::')) {\n // if info looks like 'warning title', drop leading kind token\n const maybe = info.replace(new RegExp(`^${kind}`), '').trim()\n if (maybe)\n title = maybe\n }\n }\n else {\n // container_open: info usually contains the marker like ' warning Title'\n const info = (openToken.info || '').trim()\n\n const match\n // eslint-disable-next-line regexp/no-super-linear-backtracking\n = /^:{1,3}\\s*(warning|info|note|tip|danger|caution)\\s*(.*)$/i.exec(info)\n if (match) {\n kind = match[1]\n title = match[2] || ''\n }\n }\n\n if (!title)\n title = kind.charAt(0).toUpperCase() + kind.slice(1)\n\n const children: ParsedNode[] = []\n let j = index + 1\n\n // Accept closing tokens: 'container_close' or 'container_<kind>_close'\n const closeType = new RegExp(`^container_${kind}_close$`)\n\n while (\n j < tokens.length\n && tokens[j].type !== 'container_close'\n && !closeType.test(tokens[j].type)\n ) {\n if (tokens[j].type === 'paragraph_open') {\n const contentToken = tokens[j + 1]\n if (contentToken) {\n const i = (contentToken.children as any).findLastIndex((t: TextNode) => t.type === 'text' && /:+/.test(t.content))\n const _children = i !== -1\n ? contentToken.children?.slice(0, i)\n : contentToken.children\n children.push({\n type: 'paragraph',\n children: parseInlineTokens(_children || []),\n raw: contentToken.content?.replace(/\\n:+$/, '').replace(/\\n\\s*:::\\s*$/, '') || '',\n })\n }\n j += 3\n }\n else if (\n tokens[j].type === 'bullet_list_open'\n || tokens[j].type === 'ordered_list_open'\n ) {\n const [listNode, newIndex] = parseList(tokens, j)\n children.push(listNode)\n j = newIndex\n }\n else {\n j++\n }\n }\n\n const admonitionNode: AdmonitionNode = {\n type: 'admonition',\n kind,\n title,\n children,\n raw: `:::${kind} ${title}\\n${children.map(c => c.raw).join('\\n')}\\n:::`,\n }\n\n // Skip the closing token\n const closingIndex = j\n return [admonitionNode, closingIndex + 1]\n}\n","import type { HardBreakNode } from '../../types'\n\nexport function parseHardBreak(): HardBreakNode {\n return {\n type: 'hardbreak',\n raw: '\\\\\\n',\n }\n}\n","import type { MarkdownToken, ParagraphNode } from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\n\nexport function parseParagraph(\n tokens: MarkdownToken[],\n index: number,\n): ParagraphNode {\n const paragraphContentToken = tokens[index + 1]\n const paragraphContent = paragraphContentToken.content || ''\n\n return {\n type: 'paragraph',\n children: parseInlineTokens(paragraphContentToken.children || [], paragraphContent),\n raw: paragraphContent,\n }\n}\n","import type MarkdownIt from 'markdown-it'\nimport type { MarkdownToken, ParsedNode, ParseOptions } from '../types'\nimport { fixTableTokens } from './fixTableTokens'\nimport { parseInlineTokens } from './inline-parsers'\nimport { parseFenceToken } from './inline-parsers/fence-parser'\nimport { parseAdmonition } from './node-parsers/admonition-parser'\nimport { parseBlockquote } from './node-parsers/blockquote-parser'\nimport { parseCodeBlock } from './node-parsers/code-block-parser'\nimport { parseContainer } from './node-parsers/container-parser'\nimport { parseDefinitionList } from './node-parsers/definition-list-parser'\nimport { parseFootnote } from './node-parsers/footnote-parser'\nimport { parseHardBreak } from './node-parsers/hardbreak-parser'\nimport { parseHeading } from './node-parsers/heading-parser'\nimport { parseList } from './node-parsers/list-parser'\nimport { parseMathBlock } from './node-parsers/math-block-parser'\nimport { parseParagraph } from './node-parsers/paragraph-parser'\nimport { parseTable } from './node-parsers/table-parser'\nimport { parseThematicBreak } from './node-parsers/thematic-break-parser'\n\nexport function parseMarkdownToStructure(\n markdown: string,\n md: MarkdownIt,\n options: ParseOptions = {},\n): ParsedNode[] {\n // Ensure markdown is a string — guard against null/undefined inputs from callers\n let safeMarkdown = (markdown ?? '').toString().replace(/([^\\\\])\\right/g, '$1\\\\right')\n if (safeMarkdown.endsWith('- *')) {\n // 放置markdown 解析 - * 会被处理成多个 ul >li 嵌套列表\n safeMarkdown = safeMarkdown.replace(/- \\*$/, '- \\\\*')\n }\n if (/\\n\\s*-\\s*$/.test(safeMarkdown)) {\n // 此时 markdown 解析会出错要跳过\n safeMarkdown = safeMarkdown.replace(/\\n\\s*-\\s*$/, '\\n')\n }\n else if (/\\n[[(]\\n*$/.test(safeMarkdown)) {\n // 此时 markdown 解析会出错要跳过\n safeMarkdown = safeMarkdown.replace(/(\\n\\[|\\n\\()+\\n*$/g, '\\n')\n }\n // Get tokens from markdown-it\n const tokens = md.parse(safeMarkdown, {}) as MarkdownToken[]\n // Defensive: ensure tokens is an array\n if (!tokens || !Array.isArray(tokens))\n return []\n\n // Allow consumers to transform tokens before processing\n const pre = options.preTransformTokens\n const post = options.postTransformTokens\n let transformedTokens = tokens\n if (pre && typeof pre === 'function') {\n transformedTokens = pre(tokens) || tokens\n }\n // Process the tokens into our structured format\n let result = processTokens(transformedTokens)\n\n // Backwards compatible token-level post hook: if provided and returns\n // a modified token array, re-process tokens and override node-level result.\n if (post && typeof post === 'function') {\n result = post(transformedTokens) || transformedTokens\n }\n return result\n}\n\n// Process markdown-it tokens into our structured format\nexport function processTokens(tokens: MarkdownToken[]): ParsedNode[] {\n // Defensive: ensure tokens is an array\n if (!tokens || !Array.isArray(tokens))\n return []\n\n const result: ParsedNode[] = []\n let i = 0\n tokens = fixTableTokens(tokens)\n while (i < tokens.length) {\n const token = tokens[i]\n switch (token.type) {\n case 'container_warning_open':\n case 'container_info_open':\n case 'container_note_open':\n case 'container_tip_open':\n case 'container_danger_open':\n case 'container_caution_open':\n case 'container_error_open': {\n const [warningNode, newIndex] = parseContainer(tokens, i)\n result.push(warningNode)\n i = newIndex\n break\n }\n\n case 'heading_open':\n result.push(parseHeading(tokens, i))\n i += 3 // Skip heading_open, inline, heading_close\n break\n\n case 'paragraph_open':\n result.push(parseParagraph(tokens, i))\n i += 3 // Skip paragraph_open, inline, paragraph_close\n break\n\n case 'html_block':\n case 'code_block':\n result.push(parseCodeBlock(tokens[i]))\n i += 1\n break\n\n case 'fence':\n result.push(parseFenceToken(tokens[i]))\n i += 1\n break\n\n case 'bullet_list_open':\n case 'ordered_list_open': {\n const [listNode, newIndex] = parseList(tokens, i)\n result.push(listNode)\n i = newIndex\n break\n }\n\n case 'hr':\n result.push(parseThematicBreak())\n i += 1\n break\n\n case 'blockquote_open': {\n const [blockquoteNode, newIndex] = parseBlockquote(tokens, i)\n result.push(blockquoteNode)\n i = newIndex\n break\n }\n\n case 'table_open': {\n const [tableNode, newIndex] = parseTable(tokens, i)\n result.push(tableNode)\n i = newIndex\n break\n }\n\n case 'dl_open': {\n const [definitionListNode, newIndex] = parseDefinitionList(tokens, i)\n result.push(definitionListNode)\n i = newIndex\n break\n }\n\n case 'footnote_open': {\n const [footnoteNode, newIndex] = parseFootnote(tokens, i)\n result.push(footnoteNode)\n i = newIndex\n break\n }\n\n case 'container_open': {\n const match\n = /^::: ?(warning|info|note|tip|danger|caution|error) ?(.*)$/.exec(\n token.info || '',\n )\n if (match) {\n const [admonitionNode, newIndex] = parseAdmonition(tokens, i, match)\n result.push(admonitionNode)\n i = newIndex\n }\n else {\n i += 1 // Not a container type we handle, skip\n }\n break\n }\n\n case 'hardbreak':\n result.push(parseHardBreak())\n i++\n break\n\n case 'math_block':\n result.push(parseMathBlock(tokens[i]))\n i += 1\n break\n\n default:\n // Handle other token types or skip them\n i += 1\n break\n }\n }\n\n return result\n}\n\nexport { parseInlineTokens }\n","import type { FactoryOptions } from './factory'\nimport MarkdownIt from 'markdown-it'\nimport { full as markdownItEmoji } from 'markdown-it-emoji'\nimport markdownItFootnote from 'markdown-it-footnote'\nimport markdownItIns from 'markdown-it-ins'\nimport markdownItMark from 'markdown-it-mark'\nimport markdownItSub from 'markdown-it-sub'\nimport markdownItSup from 'markdown-it-sup'\n\nimport * as markdownItCheckbox from 'markdown-it-task-checkbox'\nimport { factory } from './factory'\nimport {\n parseInlineTokens,\n parseMarkdownToStructure,\n processTokens,\n} from './parser'\n\n// Re-export config\nexport { setDefaultMathOptions } from './config'\n\n// Re-export parser functions\nexport { parseInlineTokens, parseMarkdownToStructure, processTokens }\nexport type { MathOptions } from './config'\n\n// Re-export utilities\nexport { findMatchingClose } from './findMatchingClose'\n\nexport { parseFenceToken } from './parser/inline-parsers/fence-parser'\n// Re-export plugins\nexport { applyContainers } from './plugins/containers'\n\nexport { ESCAPED_TEX_BRACE_COMMANDS, isMathLike, TEX_BRACE_COMMANDS } from './plugins/isMathLike'\nexport { applyMath, KATEX_COMMANDS, normalizeStandaloneBackslashT } from './plugins/math'\n// Re-export the node types for backward compatibility\nexport * from './types'\n\nexport interface GetMarkdownOptions extends FactoryOptions {\n plugin?: Array<any>\n apply?: Array<(md: MarkdownIt) => void>\n /**\n * Custom translation function or translation map for UI texts\n * @default { 'common.copy': 'Copy' }\n */\n i18n?: ((key: string) => string) | Record<string, string>\n}\n\nexport function getMarkdown(msgId: string = `editor-${Date.now()}`, options: GetMarkdownOptions = {}) {\n // keep legacy behaviour but delegate to new factory and reapply project-specific rules\n const md = factory(options)\n\n // Setup i18n translator function\n const defaultTranslations: Record<string, string> = {\n 'common.copy': 'Copy',\n }\n\n let t: (key: string) => string\n if (typeof options.i18n === 'function') {\n t = options.i18n\n }\n else if (options.i18n && typeof options.i18n === 'object') {\n const i18nMap = options.i18n as Record<string, string>\n t = (key: string) => i18nMap[key] ?? defaultTranslations[key] ?? key\n }\n else {\n t = (key: string) => defaultTranslations[key] ?? key\n }\n\n // apply user supplied plugins (md.use)\n if (Array.isArray(options.plugin)) {\n for (const p of options.plugin) {\n // allow both [plugin, opts] tuple or plugin function\n if (Array.isArray(p))\n md.use(p[0], p[1])\n else\n md.use(p)\n }\n }\n\n // apply user supplied apply functions to mutate the md instance (e.g. md.block.ruler.before(...))\n if (Array.isArray(options.apply)) {\n for (const fn of options.apply) {\n try {\n fn(md)\n }\n catch (e) {\n // swallow errors to preserve legacy behaviour; developers can see stack in console\n\n console.error('[getMarkdown] apply function threw an error', e)\n }\n }\n }\n\n // Re-apply a few project specific plugins that were previously always enabled\n md.use(markdownItSub)\n md.use(markdownItSup)\n md.use(markdownItMark)\n md.use(markdownItEmoji)\n const markdownItCheckboxPlugin\n = (markdownItCheckbox as any).default ?? markdownItCheckbox\n md.use(markdownItCheckboxPlugin)\n md.use(markdownItIns)\n md.use(markdownItFootnote)\n\n // Annotate fence tokens with unclosed meta using a lightweight line check\n md.core.ruler.after('block', 'mark_fence_closed', (state: any) => {\n const src: string = state.src as string\n const lines = src.split(/\\r?\\n/)\n for (const token of state.tokens) {\n if (token.type !== 'fence' || !token.map || !token.markup)\n continue\n const openLine: number = token.map[0]\n const endLine: number = token.map[1]\n const markup: string = token.markup\n const marker = markup[0]\n const minLen = markup.length\n // The closing line, if exists, should be the last line consumed by the block\n const lineIdx = Math.max(0, endLine - 1)\n const line = lines[lineIdx] ?? ''\n let i = 0\n while (i < line.length && (line[i] === ' ' || line[i] === '\\t')) i++\n let count = 0\n while (i + count < line.length && line[i + count] === marker) count++\n let j = i + count\n while (j < line.length && (line[j] === ' ' || line[j] === '\\t')) j++\n const closed = endLine > openLine + 1 && count >= minLen && j === line.length\n token.meta = token.meta || {}\n token.meta.unclosed = !closed\n // also set a explicit `closed` boolean for compatibility with plugins/tests\n token.meta.closed = !!closed\n }\n })\n\n // wave rule (legacy)\n const waveRule = (state: any, silent: boolean) => {\n const start = state.pos\n if (state.src[start] !== '~')\n return false\n const prevChar = state.src[start - 1]\n const nextChar = state.src[start + 1]\n if (/\\d/.test(prevChar) && /\\d/.test(nextChar)) {\n if (!silent) {\n const token = state.push('text', '', 0)\n token.content = '~'\n }\n state.pos += 1\n return true\n }\n return false\n }\n\n md.inline.ruler.before('sub', 'wave', waveRule)\n\n // custom fence that uses msgId for unique ids\n md.renderer.rules.fence = (tokens: any, idx: number) => {\n const token = tokens[idx]\n const info = token.info ? token.info.trim() : ''\n const str = token.content\n const encodedCode = btoa(unescape(encodeURIComponent(str)))\n const language = info || 'text'\n const uniqueId = `editor-${msgId}-${idx}-${language}`\n\n return `<div class=\"code-block\" data-code=\"${encodedCode}\" data-lang=\"${language}\" id=\"${uniqueId}\">\n <div class=\"code-header\">\n <span class=\"code-lang\">${language.toUpperCase()}</span>\n <button class=\"copy-button\" data-code=\"${encodedCode}\">${t(\n 'common.copy',\n )}</button>\n </div>\n <div class=\"code-editor\"></div>\n </div>`\n }\n\n // reference rule (legacy)\n const referenceInline = (state: any, silent: boolean) => {\n if (state.src[state.pos] !== '[')\n return false\n const match = /^\\[(\\d+)\\]/.exec(state.src.slice(state.pos))\n if (!match)\n return false\n if (!silent) {\n const id = match[1]\n const token = state.push('reference', 'span', 0)\n token.content = id\n token.markup = match[0]\n }\n state.pos += match[0].length\n return true\n }\n\n md.inline.ruler.before('escape', 'reference', referenceInline)\n md.renderer.rules.reference = (tokens: any, idx: number) => {\n const id = tokens[idx].content\n return `<span class=\"reference-link\" data-reference-id=\"${id}\" role=\"button\" tabindex=\"0\" title=\"Click to view reference\">${id}</span>`\n }\n\n return md\n}\n\nexport function getCommonMarkdown() {\n const md = new MarkdownIt({\n html: true,\n linkify: true,\n typographer: true,\n breaks: false,\n })\n return md\n}\n\nexport function renderMarkdown(md: MarkdownIt, content: string) {\n const html = md.render(content)\n return html\n}\n"],"mappings":";;;;;;;;;;;AAgBA,IAAIA;AAEJ,SAAgB,sBAAsB,MAA+B;AACnE,sBAAqB;;AAGvB,SAAgB,wBAAiD;AAC/D,QAAO;;;;;ACpBT,SAAgB,gBAAgB,IAAgB;AAC7C;EACC;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACD,CAAC,SAAS,SAAS;AAClB,KAAG,IAAI,qBAAqB,MAAM,EAChC,OAAO,QAAa,KAAa;AAE/B,OADc,OAAO,KACX,YAAY,EACpB,QAAO,2CAA2C,KAAK;OAGvD,QAAO;KAGZ,CAAC;GACF;AAGF,IAAG,MAAM,MAAM,OACb,SACA,2BACC,OAAY,WAAmB,SAAiB,WAAoB;EACnE,MAAM,WAAW,MAAM,OAAO,aAAa,MAAM,OAAO;EACxD,MAAM,UAAU,MAAM,OAAO;EAC7B,MAAM,cAAc,MAAM,IACvB,MAAM,UAAU,QAAQ,CACxB,MAAM,eAAe;AACxB,MAAI,CAAC,YACH,QAAO;AACT,MAAI,OACF,QAAO;EAET,MAAM,OAAO,YAAY;EACzB,IAAI,WAAW,YAAY;EAC3B,IAAI,QAAQ;AACZ,SAAO,YAAY,SAAS;GAC1B,MAAM,OAAO,MAAM,OAAO,YAAY,MAAM,OAAO;GACnD,MAAM,OAAO,MAAM,OAAO;AAC1B,OAAI,MAAM,IAAI,MAAM,MAAM,KAAK,CAAC,MAAM,KAAK,OAAO;AAChD,YAAQ;AACR;;AAEF;;AAEF,MAAI,CAAC,MACH,QAAO;AAGT,EADkB,MAAM,KAAK,sBAAsB,OAAO,EAAE,CAClD,QAAQ,SAAS,+BAA+B,OAAO;EAEjE,MAAMC,eAAyB,EAAE;AACjC,OAAK,IAAI,IAAI,YAAY,GAAG,IAAI,UAAU,KAAK;GAC7C,MAAM,OAAO,MAAM,OAAO,KAAK,MAAM,OAAO;GAC5C,MAAM,OAAO,MAAM,OAAO;AAC1B,gBAAa,KAAK,MAAM,IAAI,MAAM,MAAM,KAAK,CAAC;;AAIhD,QAAM,KAAK,kBAAkB,KAAK,EAAE;EACpC,MAAM,cAAc,MAAM,KAAK,UAAU,IAAI,EAAE;AAC/C,cAAY,UAAU,aAAa,KAAK,KAAK;AAC7C,cAAY,MAAM,CAAC,YAAY,GAAG,SAAS;AAG3C,cAAY,WAAW,EAAE;AACzB,QAAM,GAAG,OAAO,MAAM,YAAY,SAAS,MAAM,IAAI,MAAM,KAAK,YAAY,SAAS;AACrF,QAAM,KAAK,mBAAmB,KAAK,GAAG;AAEtC,QAAM,KAAK,uBAAuB,OAAO,GAAG;AAE5C,QAAM,OAAO,WAAW;AACxB,SAAO;GAEV;;;;;ACnFH,SAAgB,kBAAkB,KAAa,UAAkB,MAAc,OAAe;CAC5F,MAAM,MAAM,IAAI;AAGhB,KAAI,SAAS,QAAQ,UAAU,MAAM;EACnC,IAAIC,MAAI;AACR,SAAOA,MAAI,MAAM,GAAG;AAClB,OAAI,IAAIA,SAAO,OAAO,IAAIA,MAAI,OAAO,KAAK;IAExC,IAAI,IAAIA,MAAI;IACZ,IAAI,cAAc;AAClB,WAAO,KAAK,KAAK,IAAI,OAAO,MAAM;AAChC;AACA;;AAEF,QAAI,cAAc,MAAM,EACtB,QAAOA;;AAEX;;AAEF,SAAO;;CAGT,MAAM,WAAW,KAAK,KAAK,SAAS;CACpC,MAAM,WAAW;CACjB,IAAI,QAAQ;CACZ,IAAI,IAAI;AACR,QAAO,IAAI,KAAK;AAEd,MAAI,IAAI,MAAM,GAAG,IAAI,SAAS,OAAO,KAAK,UAAU;GAClD,IAAI,IAAI,IAAI;GACZ,IAAI,cAAc;AAClB,UAAO,KAAK,KAAK,IAAI,OAAO,MAAM;AAChC;AACA;;AAEF,OAAI,cAAc,MAAM,GAAG;AACzB,QAAI,UAAU,EACZ,QAAO;AACT;AACA,SAAK,SAAS;AACd;;;EAIJ,MAAM,KAAK,IAAI;AAEf,MAAI,OAAO,MAAM;AACf,QAAK;AACL;;AAGF,MAAI,OAAO,SACT;WAEO,OAAO,SAAS,SAAS,SAAS,IACzC;OAAI,QAAQ,EACV;;AAEJ;;AAEF,QAAO;;AAGT,gCAAe;;;;AChEf,MAAa,qBAAqB;CAChC;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD;AAED,MAAa,6BAA6B,mBAAmB,KAAI,MAAK,EAAE,QAAQ,wBAAwB,OAAO,CAAC,CAAC,KAAK,IAAI;AAE1H,MAAM,aAAa;AACnB,MAAM,eAAe;AACrB,MAAM,yBAAyB,IAAI,OAAO,GAAG,aAAa,KAAK,2BAA2B,mBAAmB,IAAI;AAKjH,MAAM,yBAAyB,IAAI,OAAO,MAAM,aAAa,OAAO,2BAA2B,SAAS,IAAI;AAC5G,MAAM,kBAAkB;AAOxB,MAAM,yBAAS,IAAI,OAAO,iFAAiF;AAC3G,MAAM,eAAe;AACrB,MAAM,WAAW;AAGjB,MAAM,eAAe;AACrB,SAAgB,WAAW,GAAW;AACpC,KAAI,CAAC,EACH,QAAO;CAQT,MAAM,OAAO,EAAE,QAAQ,WAAW,MAAM;CACxC,MAAM,WAAW,KAAK,MAAM;AAI5B,KAAI,aAAa,KAAK,SAAS,CAC7B,QAAO;AACT,KAAI,SAAS,SAAS,IACpB,QAAO;AAET,KAAI,sBAAsB,KAAK,EAAE,CAC/B,QAAO;CAIT,MAAM,SAAS,WAAW,KAAK,KAAK;CACpC,MAAM,mBAAmB,uBAAuB,KAAK,KAAK;CAC1D,MAAM,gBAAgB,uBAAuB,KAAK,KAAK;CAGvD,MAAM,cAAc,gBAAgB,KAAK,KAAK;CAG9C,MAAM,WAFmB,4DAES,KAAK,KAAK,IADjB,6DACwC,KAAK,KAAK;CAE7E,MAAM,MAAM,OAAO,KAAK,KAAK;CAE7B,MAAM,WAAW,aAAa,KAAK,KAAK;CAExC,MAAM,QAAQ,SAAS,KAAK,KAAK;AAKjC,QAAO,UAAU,oBAAoB,iBAAiB,eAAe,YAAY,OAAO,YAAY;;;;;ACvEtG,MAAa,iBAAiB;CAC5B;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD;AAOD,MAAa,yBAAyB,eACnC,OAAO,CACP,MAAM,GAAG,MAAM,EAAE,SAAS,EAAE,OAAO,CACnC,KAAI,MAAK,EAAE,QAAQ,yBAAyB,OAAO,CAAC,CACpD,KAAK,IAAI;AACZ,MAAM,sBAAsB;AAI5B,MAAMC,cAAsC;CAC1C,KAAM;CACN,MAAM;CACN,MAAM;CACN,MAAM;CACN,MAAM;CACP;AAED,SAAS,qBAAqB,GAAW;CACvC,MAAM,KAAK;CAEX,IAAI,IAAI;AAER,QAAY,GAAG,KAAK,EAAE,KAAM,KAC1B;AAEF,QAAO;;AAGT,SAAgB,8BAA8B,GAAW,MAAoB;CAC3E,MAAM,WAAW,MAAM,YAAY;CACnC,MAAM,oBAAoB,MAAM,qBAAqB;CAErD,MAAM,aAAa,MAAM,YAAY;CAGrC,IAAIC;AACJ,KAAI,WACF,MAAK,IAAI,OAAO,GAAG,oBAAoB,iBAAiB,uBAAuB,OAAO,IAAI;MAEvF;EACH,MAAM,iBAAiB,MAAM,SAAS,OAAO,CAAC,MAAM,GAAG,MAAM,EAAE,SAAS,EAAE,OAAO,CAAC,KAAI,MAAK,EAAE,QAAQ,0BAA0B,OAAO,CAAC,CAAC,KAAK,IAAI,CAAC;AAClJ,OAAK,IAAI,OAAO,GAAG,oBAAoB,iBAAiB,eAAe,OAAO,IAAI;;CAGpF,IAAI,MAAM,EAAE,QAAQ,KAAK,GAAW,QAAiB;AACnD,MAAI,YAAY,OAAO,OACrB,QAAO,KAAK,YAAY;AAC1B,MAAI,OAAO,SAAS,SAAS,IAAI,CAC/B,QAAO,KAAK;AACd,SAAO;GACP;AAGF,KAAI,kBACF,OAAM,IAAI,QAAQ,eAAe,QAAQ;CAS3C,MAAM,eAAe,aACjB,CAAC,4BAA4B,uBAAuB,CAAC,OAAO,QAAQ,CAAC,KAAK,IAAI,GAC9E,CAAC,SAAS,KAAI,MAAK,EAAE,QAAQ,yBAAyB,OAAO,CAAC,CAAC,KAAK,IAAI,EAAE,2BAA2B,CAAC,OAAO,QAAQ,CAAC,KAAK,IAAI;CACnI,IAAI,SAAS;AACb,KAAI,cAAc;EAChB,MAAM,aAAa,IAAI,OAAO,eAAe,aAAa,WAAW,IAAI;AACzE,WAAS,OAAO,QAAQ,aAAa,IAAY,IAAY,OAAe,GAAG,GAAG,IAAI,GAAG,GAAG;;AAE9F,UAAS,OAAO,QAAQ,mBAAmB,eAAe,CACvD,QAAQ,oDAAoD,+BAA+B;AAC9F,QAAO;;AAET,SAAgB,UAAU,IAAgB,UAAwB;CAEhE,MAAM,cAAc,OAAY,WAAoB;AAClD,MAAI,WAAW,KAAK,MAAM,IAAI,CAC5B,QAAO;EAET,MAAMC,aAAiC;GACrC,CAAC,MAAM,KAAK;GACZ,CAAC,OAAO,MAAM;GACd,CAAC,KAAM,IAAK;GACb;EAED,IAAI,YAAY;EAChB,IAAI,aAAa;AAEjB,OAAK,MAAM,CAAC,MAAM,UAAU,YAAY;GAEtC,MAAM,MAAM,MAAM;GAClB,IAAI,WAAW;GACf,MAAM,YAAY,SAAiB;AAEjC,QAAI,SAAS,eAAe,QAAQ,KAClC,QAAO;AAET,QAAI,SAAS,MAAM;AACjB,WAAM,MAAM,MAAM,MAAM,KAAK;AAC7B,iBAAY,MAAM;AAClB;;AAEF,QAAI,SAAS,SAAS,SAAS,OAAO;KACpC,MAAMC,MAAI,MAAM,KAAK,gBAAgB,IAAI,EAAE;AAC3C,SAAE,UAAU,SAAS,QAAQ,MAAM;AACnC,SAAE,SAAS;AACX,WAAM,MAAM,MAAM,MAAM,KAAK;AAC7B,iBAAY,MAAM;AAClB;;AAGF,QAAI,CAAC,KACH;IAsBF,MAAM,IAAI,MAAM,KAAK,QAAQ,IAAI,EAAE;AACnC,MAAE,UAAU;AACZ,UAAM,MAAM,MAAM,MAAM,KAAK;AAC7B,gBAAY,MAAM;;AAGpB,UAAO,MAAM;AACX,QAAI,aAAa,IAAI,OACnB;IACF,MAAM,QAAQ,IAAI,QAAQ,MAAM,UAAU;AAC1C,QAAI,UAAU,GACZ;AAMF,QAAI,QAAQ,GAAG;KACb,IAAI,IAAI,QAAQ;AAEhB,YAAO,KAAK,KAAK,IAAI,OAAO,IAC1B;AACF,SAAI,KAAK,KAAK,IAAI,OAAO,IACvB,QAAO;;IAMX,MAAM,SAASC,0BAAkB,KAAK,QAAQ,KAAK,QAAQ,MAAM,MAAM;AAEvE,QAAI,WAAW,IAAI;KAEjB,MAAMC,YAAU,IAAI,MAAM,QAAQ,KAAK,OAAO;AAC9C,SAAI,WAAWA,UAAQ,EAAE;AACvB,kBAAY,QAAQ,KAAK;AACzB,iBAAW;AACX,UAAI,CAAC,QAAQ;AACX,aAAM,UAAU;OAEhB,MAAM,iBAAiB,qBADF,aAAa,IAAI,MAAM,YAAY,UAAU,GAAG,IAAI,MAAM,GAAG,UAAU,CACnC,GAAG,MAAM;AAElE,WAAI,WACF,UAAS,IAAI,MAAM,YAAY,UAAU,CAAC;WAE1C,UAAS,IAAI,MAAM,GAAG,UAAU,CAAC;AACnC,WAAI,gBAAgB;QAClB,MAAM,cAAc,MAAM,KAAK,eAAe,IAAI,EAAE;AACpD,oBAAY,SAAS,IAAI,MAAM,GAAG,QAAQ,EAAE;QAC5C,MAAM,QAAQ,MAAM,KAAK,eAAe,QAAQ,EAAE;AAClD,cAAM,UAAU,8BAA8BA,WAAS,SAAS;AAChE,cAAM,SAAS,SAAS,OAAO,OAAO,SAAS,QAAQ,WAAW,SAAS,MAAM,MAAM;AACvF,cAAM,MAAM,GAAG,OAAOA,YAAU;AAChC,cAAM,UAAU;AAChB,oBAAY,UAAUA;AACtB,cAAM,KAAK,gBAAgB,IAAI,EAAE;cAE9B;QACH,MAAM,QAAQ,MAAM,KAAK,eAAe,QAAQ,EAAE;AAClD,cAAM,UAAU,8BAA8BA,WAAS,SAAS;AAChE,cAAM,SAAS,SAAS,OAAO,OAAO,SAAS,QAAQ,WAAW,SAAS,MAAM,MAAM;AACvF,cAAM,MAAM,GAAG,OAAOA,YAAU;AAChC,cAAM,UAAU;;AAGlB,aAAM,MAAM,IAAI;;AAElB,kBAAY,IAAI;AAChB,mBAAa;;AAEf;;IAEF,MAAM,UAAU,IAAI,MAAM,QAAQ,KAAK,QAAQ,OAAO;AACtD,QAAI,CAAC,WAAW,QAAQ,EAAE;AAGxB,iBAAY,SAAS,MAAM;KAC3B,MAAM,OAAO,IAAI,MAAM,MAAM,KAAK,UAAU;AAC5C,SAAI,CAAC,MAAM,QACT,UAAS,KAAK;AAChB;;AAEF,eAAW;AAEX,QAAI,CAAC,QAAQ;KAEX,MAAM,SAAS,IAAI,MAAM,GAAG,MAAM;KAUlC,IAAI,eAPiB,IAAI,MAAM,GAAG,UAAU,GAOV,IAAI,MAAM,YAAY,MAAM,GAAG;KACjE,MAAM,iBAAiB,qBAAqB,aAAa,GAAG,MAAM;AAClE,SAAI,UAAU,MAAM,OAAO,eACzB,gBAAe,MAAM,UAAU,IAAI,MAAM,MAAM,KAAK,MAAM;AAI5D,SAAI,MAAM,YAAY,cAAc;AAClC,YAAM,UAAU;AAChB,UAAI,gBAAgB;OAClB,MAAM,SAAS,aAAa,MAAM,QAAQ;OAC1C,MAAM,QAAQ,aAAa,MAAM,OAAQ,QAAS,OAAQ,GAAG,OAAO;AACpE,gBAAS,aAAa,MAAM,GAAG,OAAQ,MAAO,CAAC;OAC/C,MAAM,cAAc,MAAM,KAAK,eAAe,IAAI,EAAE;AACpD,mBAAY,SAAS,OAAQ;OAC7B,MAAM,YAAY,MAAM,KAAK,QAAQ,IAAI,EAAE;AAC3C,iBAAU,UAAU;AACpB,aAAM,KAAK,gBAAgB,IAAI,EAAE;YAGjC,UAAS,aAAa;;AAG1B,SAAI,gBAAgB;MAClB,MAAM,cAAc,MAAM,KAAK,eAAe,IAAI,EAAE;AACpD,kBAAY,SAAS;MACrB,MAAM,QAAQ,MAAM,KAAK,eAAe,QAAQ,EAAE;AAClD,YAAM,UAAU,8BAA8B,SAAS,SAAS;AAChE,YAAM,SAAS,SAAS,OAAO,OAAO,SAAS,QAAQ,WAAW,SAAS,MAAM,MAAM;AACvF,YAAM,MAAM,GAAG,OAAO,UAAU;AAChC,YAAM,UAAU;MAChB,MAAM,MAAM,IAAI,MAAM,SAAS,MAAM,OAAO;MAC5C,MAAM,gBAAgB,IAAI,WAAW,IAAI;AACzC,UAAI,cACF,OAAM,KAAK,gBAAgB,IAAI,EAAE;AAEnC,UAAI,KAAK;OACP,MAAM,mBAAmB,MAAM,KAAK,QAAQ,IAAI,EAAE;AAClD,wBAAiB,WAAW,OAAO,OAAO,KAAK,OAAO,IAAI,EAAE,QAAQ,QAAQ,GAAG;;AAEjF,UAAI,CAAC,cACH,OAAM,KAAK,gBAAgB,IAAI,EAAE;AACnC,YAAM,MAAM,IAAI;AAChB,kBAAY,IAAI;AAChB,mBAAa;AACb;YAEG;MACH,MAAM,QAAQ,MAAM,KAAK,eAAe,QAAQ,EAAE;AAClD,YAAM,UAAU,8BAA8B,SAAS,SAAS;AAChE,YAAM,SAAS,SAAS,OAAO,OAAO,SAAS,QAAQ,WAAW,SAAS,MAAM,MAAM;AACvF,YAAM,MAAM,GAAG,OAAO,UAAU;AAChC,YAAM,UAAU;;;AAIpB,gBAAY,SAAS,MAAM;AAC3B,iBAAa;AACb,UAAM,MAAM;;AAGd,OAAI,UAAU;AACZ,QAAI,CAAC,QAAQ;AAEX,SAAI,YAAY,IAAI,OAClB,UAAS,IAAI,MAAM,UAAU,CAAC;AAEhC,WAAM,MAAM,IAAI;UAIhB,OAAM,MAAM;AAGd,WAAO;;;AAIX,SAAO;;CAIT,MAAM,aACJ,OACA,WACA,SACA,WACG;EACH,MAAMH,aAAiC;GACrC,CAAC,OAAO,MAAM;GACd,CAAC,KAAM,IAAK;GACZ,CAAC,MAAM,KAAK;GACb;EAED,MAAM,WAAW,MAAM,OAAO,aAAa,MAAM,OAAO;EACxD,MAAM,WAAW,MAAM,IAAI,MAAM,UAAU,MAAM,OAAO,WAAW,CAAC,MAAM;EAC1E,IAAI,UAAU;EACd,IAAI,YAAY;EAChB,IAAI,aAAa;AACjB,OAAK,MAAM,CAAC,MAAM,UAAU,WAC1B,KAAI,aAAa,QAAQ,SAAS,WAAW,KAAK,CAChD,KAAI,KAAK,SAAS,IAAI,EACpB;OAAI,SAAS,QAAQ,MAAM,GAAG,KAAK,KAAK;AACtC,QAAI,YAAY,IAAI,SAAS;AAO3B,eAAU;AACV,iBAAY;AACZ,kBAAa;AACb;;AAEF;;SAGC;AACH,aAAU;AACV,eAAY;AACZ,gBAAa;AACb;;AAKN,MAAI,CAAC,QACH,QAAO;AACT,MAAI,OACF,QAAO;AAET,MACE,SAAS,SAAS,WAAW,IAC1B,SAAS,QAAQ,WAAW,GAAG,UAAU,QAC5C;GACA,MAAM,kBAAkB,SAAS,QAAQ,UAAU;GACnD,MAAM,gBAAgB,SAAS,QAC7B,YACA,kBAAkB,UAAU,OAC7B;GACD,MAAMG,YAAU,SAAS,MACvB,kBAAkB,UAAU,QAC5B,cACD;GAED,MAAMC,UAAa,MAAM,KAAK,cAAc,QAAQ,EAAE;AACtD,WAAM,UAAU,8BAA8BD,UAAQ;AACtD,WAAM,SACF,cAAc,OAAO,OAAO,cAAc,MAAM,OAAO;AAC3D,WAAM,MAAM,CAAC,WAAW,YAAY,EAAE;AACtC,WAAM,MAAM,GAAG,YAAYA,YAAU;AACrC,WAAM,QAAQ;AACd,WAAM,UAAU;AAChB,SAAM,OAAO,YAAY;AACzB,UAAO;;EAGT,IAAI,WAAW;EACf,IAAI,UAAU;EACd,IAAI,QAAQ;EAEZ,MAAM,mBACF,aAAa,YAAY,KAAK,SAAS,MAAM,UAAU,OAAO;AAElE,MAAI,iBAAiB,SAAS,WAAW,EAAE;GACzC,MAAM,WAAW,iBAAiB,QAAQ,WAAW;AACrD,aAAU,iBAAiB,MAAM,GAAG,SAAS;AAC7C,WAAQ;AACR,cAAW;SAER;AACH,OAAI,iBACF,WAAU;AAEZ,QAAK,WAAW,YAAY,GAAG,WAAW,SAAS,YAAY;IAC7D,MAAM,YAAY,MAAM,OAAO,YAAY,MAAM,OAAO;IACxD,MAAM,UAAU,MAAM,OAAO;IAC7B,MAAM,cAAc,MAAM,IAAI,MAAM,YAAY,GAAG,QAAQ;AAC3D,QAAI,YAAY,MAAM,KAAK,YAAY;AACrC,aAAQ;AACR;eAEO,YAAY,SAAS,WAAW,EAAE;AACzC,aAAQ;KACR,MAAM,WAAW,YAAY,QAAQ,WAAW;AAChD,iBAAY,UAAU,OAAO,MAAM,YAAY,MAAM,GAAG,SAAS;AACjE;;AAEF,gBAAY,UAAU,OAAO,MAAM;;;EAIvC,MAAMC,QAAa,MAAM,KAAK,cAAc,QAAQ,EAAE;AACtD,QAAM,UAAU,8BAA8B,QAAQ;AACtD,QAAM,SACF,cAAc,OAAO,OAAO,cAAc,MAAM,OAAO;AAC3D,QAAM,MAAM,GAAG,YAAY,UAAU,QAAQ,WAAW,KAAK,GAAG,OAAO,KAAK;AAC5E,QAAM,MAAM,CAAC,WAAW,WAAW,EAAE;AACrC,QAAM,QAAQ;AACd,QAAM,UAAU,CAAC;AACjB,QAAM,OAAO,WAAW;AACxB,SAAO;;AAOT,IAAG,OAAO,MAAM,OAAO,UAAU,QAAQ,WAAW;AACpD,IAAG,MAAM,MAAM,OAAO,aAAa,cAAc,WAAW,EAC1D,KAAK;EAAC;EAAa;EAAa;EAAc;EAAO,EACtD,CAAC;;;;;ACzgBJ,SAAgB,iBAAiB,IAAgB;CAC/C,MAAM,eACF,GAAG,SAAS,MAAM,SACf,SAAU,QAAa,KAAa,SAAc,KAAU,MAAW;AACxE,SAAO,KAAK,YAAY,QAAQ,KAAK,QAAQ;;AAGnD,IAAG,SAAS,MAAM,SAChB,QACA,KACA,SACA,KACA,SACG;AAEH,EADc,OAAO,KACf,UAAU,WAAW,OAAO;AAClC,SAAO,aAAa,QAAQ,KAAK,SAAS,KAAK,KAAK;;AAGtD,IAAG,SAAS,MAAM,QACd,GAAG,SAAS,MAAM,WACb,QAAa,QAAgB;EAChC,MAAM,QAAQ,OAAO;EACrB,MAAM,OAAO,MAAM,OAAO,MAAM,KAAK,MAAM,GAAG;AAK9C,SAAO,eAJW,OACd,YAAY,GAAG,MAAM,WAAW,KAAK,MAAM,OAAO,CAAC,GAAG,KACtD,GAE4B,UADnB,GAAG,MAAM,WAAW,MAAM,QAAQ,CACA;;;;;;AChBvD,SAAgB,QAAQ,OAAuB,EAAE,EAAc;CAC7D,MAAM,KAAK,IAAI,WAAW;EACxB,MAAM;EACN,SAAS;EACT,aAAa;EACb,GAAI,KAAK,qBAAqB,EAAE;EACjC,CAAC;AAEF,KAAI,KAAK,cAAc,KAErB,WAAU,IAD6B;EAAE,GAAI,uBAAuB,IAAI,EAAE;EAAG,GAAI,KAAK,eAAe,EAAE;EAAG,CAC1E;AAElC,KAAI,KAAK,oBAAoB,KAC3B,iBAAgB,GAAG;AACrB,kBAAiB,GAAG;AAEpB,QAAO;;;;;AC5BT,SAAS,cAAc;AACrB,QAAO;EACL;GACE,MAAM;GACN,KAAK;GACL,OAAO;GACP,KAAK;GACL,UAAU;GACV,SAAS;GACT,QAAQ;GACR,MAAM;GACN,OAAO;GACP,SAAS;GACT,MAAM;GACP;EACD;GACE,MAAM;GACN,KAAK;GACL,OAAO;GACP,OAAO;GACP,OAAO;GACP,UAAU;GACX;EACD;GACE,MAAM;GACN,KAAK;GACL,OAAO;GACP,OAAO;GACP,OAAO;GACP,UAAU;GACX;EAEF;;AAEH,SAAS,YAAY;AACnB,QAAO;EACL;GACE,MAAM;GACN,KAAK;GACL,OAAO;GACP,OAAO;GACP,OAAO;GACP,UAAU;GACX;EACD;GACE,MAAM;GACN,KAAK;GACL,OAAO;GACP,OAAO;GACP,OAAO;GACP,UAAU;GACX;EACD;GACE,MAAM;GACN,KAAK;GACL,OAAO;GACP,KAAK;GACL,UAAU;GACV,SAAS;GACT,QAAQ;GACR,MAAM;GACN,OAAO;GACP,MAAM;GACP;EACF;;AAEH,SAAS,SAAS,MAAc;AAC9B,QAAO;EAAC;GACN,MAAM;GACN,KAAK;GACL,OAAO;GACP,OAAO;GACP,OAAO;GACP,UAAU;GACX;EAAE;GACD,MAAM;GACN,KAAK;GACL,UAAU,CACR;IACE,KAAK;IACL,MAAM;IACN,OAAO;IACP,SAAS;IACT,UAAU;IACX,CACF;GACD,SAAS;GACT,OAAO;GACP,OAAO;GACP,OAAO;GACR;EAAE;GACD,MAAM;GACN,KAAK;GACL,OAAO;GACP,OAAO;GACP,OAAO;GACP,UAAU;GACX;EAAC;;AAEJ,SAAgB,eAAe,QAA0C;CACvE,MAAM,cAAc,CAAC,GAAG,OAAO;AAC/B,KAAI,OAAO,SAAS,EAClB,QAAO;CACT,MAAM,IAAI,OAAO,SAAS;CAC1B,MAAM,QAAQ,OAAO;AAErB,KAAI,MAAM,SAAS,UACjB;MAAI,qBAAqB,KAAK,MAAM,QAAS,EAAE;GAE7C,MAAM,OAAO,MAAM,SAAU,GAAG,QAAS,MAAM,EAAE,CAAC,MAAM,IAAI,CAAC,KAAI,QAAKC,IAAE,MAAM,CAAC,CAAC,OAAO,QAAQ,CAAC,SAAQ,QAAK,SAASA,IAAE,CAAC;GACzH,MAAM,SAAS;IACb,GAAG,aAAa;IAChB,GAAG;IACH,GAAG,WAAW;IACf;AACD,eAAY,OAAO,IAAI,GAAG,GAAG,GAAG,OAAO;aAEhC,2BAA2B,KAAK,MAAM,QAAS,EAAE;GAExD,MAAM,OAAO,MAAM,SAAU,GAAG,QAAS,MAAM,GAAG,GAAG,CAAC,MAAM,IAAI,CAAC,KAAI,QAAKA,IAAE,MAAM,CAAC,CAAC,SAAQ,QAAK,SAASA,IAAE,CAAC;GAC7G,MAAM,SAAS;IACb,GAAG,aAAa;IAChB,GAAG;IACH,GAAG,WAAW;IACf;AACD,eAAY,OAAO,IAAI,GAAG,GAAG,GAAG,OAAO;aAEhC,4BAA4B,KAAK,MAAM,QAAS,EAAE;AACzD,SAAM,UAAU,MAAM,QAAS,MAAM,GAAG,GAAG;AAC3C,SAAM,SAAU,OAAO,GAAG,EAAE;;;AAIhC,QAAO;;;;;ACrIT,SAAgB,mBAAmB,OAAoC;AACrE,QAAO;EACL,MAAM;EACN,SAAS,MAAM,MAAM,YAAY;EACjC,KAAK,MAAM,MAAM,UAAU,QAAQ;EACpC;;AAGH,SAAgB,wBAAwB,OAA+B;AACrE,QAAO;EACL,MAAM;EACN,SAAS,MAAM,QAAQ,UAAU,KAAK,MAAM,MAAM,QAAQ,UAAU,KAAK;EACzE,KAAK,MAAM,QAAQ,UAAU,KAAK,MAAM,MAAM,QAAQ,UAAU,KAAK,SAAS,QAAQ;EACvF;;;;;ACbH,SAAgB,gBAAgB,OAAiC;AAC/D,QAAO;EACL,MAAM;EACN,MAAM,MAAM,WAAW;EACvB,QAAQ,MAAM,UAAU;EACxB,KAAK,IAAI,MAAM,WAAW,GAAG;EAC9B;;;;;ACLH,SAAgB,mBACd,QACA,YAIA;CACA,MAAMC,WAAyB,EAAE;CACjC,IAAI,SAAS;CACb,IAAI,IAAI,aAAa;CACrB,MAAMC,cAA+B,EAAE;AAGvC,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,YAAY;AACzD,YAAU,OAAO,GAAG,WAAW;AAC/B,cAAY,KAAK,OAAO,GAAG;AAC3B;;AAIF,UAAS,KAAK,GAAG,kBAAkB,YAAY,CAAC;AAWhD,QAAO;EAAE,MATkB;GACzB,MAAM;GACN;GACA,KAAK,IAAI,OAAO;GACjB;EAKc,WAFG,IAAI,OAAO,SAAS,IAAI,IAAI,OAAO;EAE3B;;;;;AChC5B,SAAS,iBAAiB,SAAiB;CACzC,MAAMC,OAAiB,EAAE;CACzB,MAAMC,UAAoB,EAAE;AAC5B,MAAK,MAAM,WAAW,QAAQ,MAAM,QAAQ,EAAE;EAC5C,MAAM,OAAO;AAEb,MAAI,qCAAqC,KAAK,KAAK,CACjD;AAEF,MAAI,KAAK,WAAW,KAAK,CACvB,MAAK,KAAK,IAAI,KAAK,MAAM,EAAE,GAAG;WAEvB,KAAK,WAAW,KAAK,CAC5B,SAAQ,KAAK,IAAI,KAAK,MAAM,EAAE,GAAG;OAE9B;AAEH,QAAK,KAAK,KAAK;AACf,WAAQ,KAAK,KAAK;;;AAGtB,QAAO;EACL,UAAU,KAAK,KAAK,KAAK;EACzB,SAAS,QAAQ,KAAK,KAAK;EAC5B;;AAGH,SAAgB,gBAAgB,OAAqC;CACnE,MAAM,SAAS,MAAM,QAAQ,MAAM,IAAI,IAAI,MAAM,IAAI,WAAW;CAChE,MAAM,OAAQ,MAAc;CAC5B,MAAM,SAAS,OAAO,MAAM,WAAW,YAAY,KAAK,SAAS;CACjE,MAAM,OAAO,MAAM,MAAM,WAAW,OAAO,IAAI;CAC/C,MAAM,WAAW,OAAO,MAAM,MAAM,MAAM,IAAI,CAAC,MAAM,KAAK,MAAM,QAAQ;CAUxE,IAAI,UAAU,MAAM,WAAW;CAC/B,MAAM,oBAAoB;AAC1B,KAAI,kBAAkB,KAAK,QAAQ,CACjC,WAAU,QAAQ,QAAQ,mBAAmB,GAAG;AAElD,KAAI,MAAM;EACR,MAAM,EAAE,UAAU,YAAY,iBAAiB,QAAQ;AAEvD,SAAO;GACL,MAAM;GACN;GACA,MAAM,WAAW;GACjB,KAAK;GACL;GACA,SAAS,WAAW,OAAO,QAAQ,WAAW,QAAQ,OAAO,CAAC;GAC9D,cAAc;GACd,aAAa;GACd;;AAGH,QAAO;EACL,MAAM;EACN;EACA,MAAM,WAAW;EACjB,KAAK,WAAW;EAChB;EACA,SAAS,WAAW,OAAO,QAAQ,WAAW,QAAQ,OAAO,CAAC;EAC/D;;;;;ACrEH,SAAgB,aAAa,QAA0C;AACrE,KAAI,OAAO,SAAS,EAClB,QAAO;CACT,MAAM,QAAQ,OAAO,OAAO,SAAS;AACrC,KAAI,MAAM,SAAS,UAAU,CAAC,MAAM,QAAS,SAAS,IAAI,CACxD,QAAO,eAAe,OAAO;AAE/B,KADe,OAAO,OAAO,SAAS,GAC3B,QAAQ,KACjB,QAAO,eAAe,OAAO;CAC/B,MAAM,OAAO,OAAO,OAAO,SAAS;AACpC,KAAI,KAAM,SAAS,UAAU,CAAC,KAAK,QAAS,WAAW,IAAI,CACzD,QAAO,eAAe,OAAO;CAE/B,MAAM,QAAQ,OAAO,OAAO,SAAS;CACrC,MAAM,OAAO,KAAK,QAAS,QAAQ,UAAU,GAAG;CAChD,MAAM,UAAU,CAAC,KAAK,QAAS,SAAS,IAAI;AAC5C,OAAM,UAAU,MAAM,QAAS,QAAQ,OAAO,GAAG;AACjD,QAAO,OAAO,OAAO,SAAS,GAAG,GAAG;EAClC,MAAM;EACN;EACA,MAAM,MAAM;EACZ,UAAU,CACR;GACE,MAAM;GACN,SAAS,MAAM;GACf,KAAK,MAAM;GACZ,CACF;EACD;EACD,CAAQ;AACT,QAAO,OAAO,OAAO,SAAS,GAAG,EAAE;AACnC,QAAO;;AAGT,SAAgB,eAAe,QAA0C;AACvE,KAAI,OAAO,SAAS,EAClB,QAAO;CACT,IAAI,SAAS,OAAO;CACpB,IAAI,OAAO,OAAO,SAAS;AAC3B,KAAI,KAAK,SAAS,cAAc;AAC9B;AACA,SAAO,OAAO,SAAS;AACvB,MAAI,KAAK,SAAS,aAChB,QAAO;;AAGX,KADe,OAAO,SAAS,GACpB,SAAS,UAClB,QAAO;CACT,MAAM,QAAQ,OAAO,SAAS;CAC9B,MAAM,QAAQ,OAAO,SAAS;AAC9B,KAAI,MAAM,SAAS,OACjB,QAAO;CAGT,IAAI,OAAO,OAAO,SAAS,GAAG;CAC9B,IAAI,QAAQ;AACZ,KAAI,WAAW,OAAO,QAAQ;AAE5B,UAAQ,KAAK,WAAW;AACxB;;AAEF,QAAO,OAAO,SAAS,GAAG,MAAM;CAChC,MAAM,UAAU,MAAM;AACtB,WAAU;AACV,OAAM,UAAU,MAAM,QAAS,QAAQ,OAAO,GAAG;AACjD,QAAO,OAAO,SAAS,GAAG,GAAG;EAC3B,MAAM;EACN;EACA,MAAM;EACN,UAAU,CACR;GACE,MAAM;GACN;GACA,KAAK;GACN,CACF;EACD,SAAS;EACV,CAAQ;AACT,QAAO;;;;;AC9ET,SAAgB,YAAY,QAA0C;CACpE,MAAM,OAAO,OAAO,OAAO,SAAS;AAEpC,KAAI,MAAM,SAAS,UAAW,YAAY,KAAK,KAAK,WAAW,GAAG,IAAI,OAAO,OAAO,SAAS,IAAI,QAAQ,KACvG,QAAO,OAAO,OAAO,SAAS,GAAG,EAAE;AAErC,QAAO;;;;;ACNT,SAAgB,gBAAgB,QAA0C;CACxE,MAAM,cAAc,CAAC,GAAG,OAAO;AAC/B,KAAI,OAAO,SAAS,EAClB,QAAO;CACT,MAAM,IAAI,OAAO,SAAS;CAC1B,MAAM,QAAQ,OAAO;CACrB,MAAM,YAAY,OAAO,IAAI;AAC7B,KAAI,MAAM,SAAS,UAAU,MAAM,SAAS,SAAS,IAAI,IAAI,UAAU,SAAS,WAAW;EAEzF,MAAM,aAAa,OAAO,IAAI;EAC9B,MAAM,QAAQ,YAAY,SAAS,SAAS,IAAI;EAChD,MAAM,SAAS;GACb;IACE,MAAM;IACN,KAAK;IACL,OAAO;IACP,KAAK;IACL,UAAU;IACV,SAAS;IACT,QAAQ;IACR,MAAM;IACN,MAAM;IACP;GACD;IACE,MAAM;IACN,SAAS,YAAY,SAAS,SAAS,WAAW,UAAU;IAC7D;GACD;IACE,MAAM;IACN,KAAK;IACL,OAAO;IACP,KAAK;IACL,UAAU;IACV,SAAS;IACT,QAAQ;IACR,MAAM;IACN,MAAM;IACP;GACF;EACD,MAAM,aAAa,MAAM,SAAS,MAAM,GAAG,GAAG;AAC9C,MAAI,WACF,QAAO,QAAQ;GACb,MAAM;GACN,SAAS;GACT,KAAK;GACN,CAAC;AAEJ,cAAY,OAAO,GAAG,OAAO,GAAG,OAAO;AACvC,SAAO;;AAGT,QAAO;;;;;ACnDT,SAAgB,sBACd,OACuB;AACvB,QAAO;EACL,MAAM;EACN,IAAI,MAAM,MAAM,SAAS;EACzB,KAAK,KAAK,MAAM,MAAM,SAAS,GAAG;EACnC;;;;;ACPH,SAAgB,sBAAqC;AACnD,QAAO;EACL,MAAM;EACN,KAAK;EACN;;;;;ACHH,SAAgB,oBACd,QACA,YAIA;CACA,MAAMC,WAAyB,EAAE;CACjC,IAAI,WAAW;CACf,IAAI,IAAI,aAAa;CACrB,MAAMC,cAA+B,EAAE;AAGvC,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,cAAc;AAC3D,cAAY,OAAO,GAAG,WAAW;AACjC,cAAY,KAAK,OAAO,GAAG;AAC3B;;AAIF,UAAS,KAAK,GAAG,kBAAkB,YAAY,CAAC;AAWhD,QAAO;EAAE,MATmB;GAC1B,MAAM;GACN;GACA,KAAK,KAAK,SAAS;GACpB;EAKc,WAFG,IAAI,OAAO,SAAS,IAAI,IAAI,OAAO;EAE3B;;;;;AChC5B,SAAgB,gBAAgB,OAAsB,UAAU,OAAkB;AAChF,QAAO;EACL,MAAM;EACN,KAAK,MAAM,OAAO,MAAK,SAAQ,KAAK,OAAO,MAAM,GAAG,MAAM;EAC1D,KAAK,MAAM,OAAO,MAAK,SAAQ,KAAK,OAAO,MAAM,GAAG,MAAM;EAC1D,OAAO,MAAM,OAAO,MAAK,SAAQ,KAAK,OAAO,QAAQ,GAAG,MAAM;EAC9D,KAAK,MAAM,WAAW;EACtB;EACD;;;;;ACRH,SAAgB,qBAAqB,OAAsC;AACzE,QAAO;EACL,MAAM;EACN,MAAM,MAAM,WAAW;EACvB,KAAK,MAAM,WAAW;EACvB;;;;;ACJH,SAAgB,iBACd,QACA,YAIA;CACA,MAAMC,WAAyB,EAAE;CACjC,IAAI,UAAU;CACd,IAAI,IAAI,aAAa;CACrB,MAAMC,cAA+B,EAAE;AAGvC,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,aAAa;AAC1D,aAAW,OAAO,GAAG,WAAW;AAChC,cAAY,KAAK,OAAO,GAAG;AAC3B;;AAIF,UAAS,KAAK,GAAG,kBAAkB,YAAY,CAAC;AAWhD,QAAO;EAAE,MATgB;GACvB,MAAM;GACN;GACA,KAAK,KAAK,QAAQ;GACnB;EAKc,WAFG,IAAI,OAAO,SAAS,IAAI,IAAI,OAAO;EAE3B;;;;;AC/B5B,SAAgB,eACd,QACA,YAIA;CACA,MAAM,YAAY,OAAO;CACzB,MAAM,OAAO,UAAU,OAAO,MAAK,SAAQ,KAAK,OAAO,OAAO,GAAG,MAAM;CACvE,MAAM,QACF,UAAU,OAAO,MAAK,SAAQ,KAAK,OAAO,QAAQ,GAAG,MAAM;CAE/D,IAAI,IAAI,aAAa;CACrB,MAAMC,aAA8B,EAAE;CACtC,MAAM,UAAU;AAGhB,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,cAAc;AAC3D,aAAW,KAAK,OAAO,GAAG;AAC1B;;CAIF,MAAM,WAAW,kBAAkB,WAAW;CAC9C,MAAM,WAAW,SACd,KAAK,SAAS;AACb,MAAI,aAAa,KACf,QAAO,KAAK;AACd,SAAO,KAAK;GACZ,CACD,KAAK,GAAG;AAeX,QAAO;EAAE,MAbc;GACrB,MAAM;GACN;GACA;GACA,MAAM;GACN;GACA,KAAK,IAAI,SAAS,IAAI,OAAO,QAAQ,KAAK,MAAM,KAAK,GAAG;GACxD;GACD;EAKc,WAFG,IAAI,OAAO,SAAS,IAAI,IAAI,OAAO;EAE3B;;;;;AC7C5B,SAAgB,qBAAqB,OAAsC;AACzE,QAAO;EACL,MAAM;EACN,SAAS,MAAM,WAAW;EAC1B,SAAS,CAAC,CAAC,MAAM;EACjB,KAAK,MAAM;EACZ;;;;;ACNH,SAAgB,oBAAoB,OAAqC;AACvE,QAAO;EACL,MAAM;EACN,IAAI,MAAM,WAAW;EACrB,KAAK,MAAM,UAAU,IAAI,MAAM,QAAQ;EACxC;;;;;ACDH,SAAgB,wBACd,QACA,YAIA;CACA,MAAMC,WAAyB,EAAE;CACjC,IAAI,QAAQ;CACZ,IAAI,IAAI,aAAa;CACrB,MAAMC,cAA+B,EAAE;AAGvC,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,WAAW;AACxD,WAAS,OAAO,GAAG,WAAW;AAC9B,cAAY,KAAK,OAAO,GAAG;AAC3B;;AAIF,UAAS,KAAK,GAAG,kBAAkB,YAAY,CAAC;AAWhD,QAAO;EAAE,MATuB;GAC9B,MAAM;GACN;GACA,KAAK,KAAK,MAAM;GACjB;EAKc,WAFG,IAAI,OAAO,SAAS,IAAI,IAAI,OAAO;EAE3B;;;;;ACnC5B,SAAgB,iBACd,QACA,YACA,KAIA;CACA,MAAMC,WAAyB,EAAE;CACjC,IAAI,aAAa;CACjB,IAAI,IAAI,aAAa;CACrB,MAAMC,cAA+B,EAAE;AAGvC,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,gBAAgB;AAC7D,gBAAc,OAAO,GAAG,WAAW;AACnC,cAAY,KAAK,OAAO,GAAG;AAC3B;;AAIF,UAAS,KAAK,GAAG,kBAAkB,aAAa,IAAI,CAAC;AAWrD,QAAO;EAAE,MATgB;GACvB,MAAM;GACN;GACA,KAAK,KAAK,WAAW;GACtB;EAKc,WAFG,IAAI,OAAO,SAAS,IAAI,IAAI,OAAO;EAE3B;;;;;AChC5B,SAAgB,oBACd,QACA,YAIA;CACA,MAAMC,WAAyB,EAAE;CACjC,IAAI,UAAU;CACd,IAAI,IAAI,aAAa;CACrB,MAAMC,cAA+B,EAAE;AAGvC,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,aAAa;AAC1D,aAAW,OAAO,GAAG,WAAW;AAChC,cAAY,KAAK,OAAO,GAAG;AAC3B;;AAIF,UAAS,KAAK,GAAG,kBAAkB,YAAY,CAAC;AAqBhD,QAAO;EAAE,MAnBmB;GAC1B,MAAM;GACN,UACE,SAAS,SAAS,IACd,WACA,CACE;IACE,MAAM;IAEN,SAAS,WAAW,OAAO,YAAY,WAAW;IAClD,KAAK,WAAW,OAAO,YAAY,WAAW;IAC/C,CACF;GACP,KAAK,IAAI,WAAW,OAAO,YAAY,WAAW,GAAG;GACtD;EAKc,WAFG,IAAI,OAAO,SAAS,IAAI,IAAI,OAAO;EAE3B;;;;;ACzC5B,SAAgB,sBACd,QACA,YAIA;CACA,MAAMC,WAAyB,EAAE;CACjC,IAAI,UAAU;CACd,IAAI,IAAI,aAAa;CACrB,MAAMC,cAA+B,EAAE;AAGvC,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,aAAa;AAC1D,aAAW,OAAO,GAAG,WAAW;AAChC,cAAY,KAAK,OAAO,GAAG;AAC3B;;AAIF,UAAS,KAAK,GAAG,kBAAkB,YAAY,CAAC;AAqBhD,QAAO;EAAE,MAnBqB;GAC5B,MAAM;GACN,UACE,SAAS,SAAS,IACd,WACA,CACE;IACE,MAAM;IAEN,SAAS,WAAW,OAAO,YAAY,WAAW;IAClD,KAAK,WAAW,OAAO,YAAY,WAAW;IAC/C,CACF;GACP,KAAK,IAAI,WAAW,OAAO,YAAY,WAAW,GAAG;GACtD;EAKc,WAFG,IAAI,OAAO,SAAS,IAAI,IAAI,OAAO;EAE3B;;;;;AC1C5B,SAAgB,eAAe,OAAgC;AAC7D,QAAO;EACL,MAAM;EACN,SAAS,MAAM,WAAW;EAC1B,KAAK,MAAM,WAAW;EACvB;;;;;ACiBH,SAAgB,kBAAkB,QAAyB,KAAc,WAAyC;AAChH,KAAI,CAAC,UAAU,OAAO,WAAW,EAC/B,QAAO,EAAE;CAEX,MAAMC,SAAuB,EAAE;CAC/B,IAAIC,kBAAmC;CAEvC,IAAI,IAAI;AACR,UAAS,gBAAgB,OAAO;AAChC,UAAS,YAAY,OAAO;AAC5B,UAAS,aAAa,OAAO;AAE7B,QAAO,IAAI,OAAO,QAAQ;EACxB,MAAM,QAAQ,OAAO;AACrB,cAAY,MAAM;;CAGpB,SAAS,YAAY,OAAY;AAC/B,UAAQ,MAAM,MAAd;GACE,KAAK,QAAQ;IAEX,IAAI,QAAQ,OAAO,SAAS;IAC5B,IAAI,UAAU,MAAM,QAAQ,QAAQ,OAAO,GAAG,IAAI;AAClD,WAAY,SAAS,GAAG,SAAS;KAC/B,MAAM,OAAO,OAAO;AACpB,SAAI,KAAK,SAAS,QAAQ;AACxB,wBAAkB;AAClB,gBAAU,KAAK,UAAU;AACzB;;AAEF;;AAEF,QAAI,QAAQ,OAAO,SAAS,EAC1B,QAAO,OAAO,QAAQ,EAAE;IAE1B,MAAM,YAAY,OAAO,IAAI;AAC7B,QAAI,YAAY,OAAO,YAAY,OAAO,YAAY,OAAO,YAAY,OAAO,QAAQ,KAAK,QAAQ,IAAI,OAAO,KAAK,QAAQ,EAAE;AAC7H;AACA;;AAEF,QAAI,CAAC,aAAa,iBAAiB,KAAK,QAAQ,CAC9C,WAAU,QAAQ,QAAQ,UAAU,GAAG;AAEzC,QAAI,KAAK,WAAW,IAAI,IAAI,WAAW,SAAS,kBAAkB;KAEhE,MAAM,IADW,QAAQ,MAAM,EAAE,CACd,MAAM,UAAU;AACnC,SAAI,MAAM,MAAM;AACd;AACA;;AAGF,SAAK,KAAK,KAAK,KAAK,EAAE,GAAG,IAAK,CAAC,GAAG;MAEhC,MAAM,UAAU,IAAK,EAAE,OAAO,OAAO,EAAE,OAAO,MAAO;AACrD,aAAO,KAAK;OACV,MAAM;OACN;OACA,KAAK,UAAU,QAAQ;OACxB,CAAC;AACF;AACA;;;AAGJ,QAAI,SAAS,KAAK,QAAQ,EAAE;AAC1B,uBAAkB;KAClB,MAAMC,UAAQ,QAAQ,QAAQ,IAAI;KAClC,MAAM,QAAQ,QAAQ,MAAM,GAAGA,QAAM;KACrC,MAAM,cAAc,QAAQ,MAAMA,QAAM;AACxC,SAAI,MACF,QAAO,KAAK;MACV,MAAM;MACN,SAAS,SAAS;MAClB,KAAK,SAAS;MACf,CAAC;AAKJ,YAAO,KAAK;MACV,MAAM;MACN,MAAM,YAAY,QAAQ,MAAM,GAAG;MACnC,KAAK,eAAe;MACrB,CAAC;AACF;AACA;;AAEF,QAAI,YAAY,KAAK;AACnB;AACA;;AAEF,QAAI,kBAAkB,KAAK,QAAQ,EAAE;KAEnC,MAAMA,UAAQ,QAAQ,QAAQ,KAAK,IAAI;KACvC,MAAM,QAAQ,QAAQ,MAAM,GAAGA,QAAM;AACrC,SAAI,MACF,KAAI,iBAAiB;AAEnB,sBAAgB,WAAW;AAC3B,sBAAgB,OAAO;YAEpB;AAEH,wBAAkB;OAChB,MAAM;OACN,SAAS,SAAS;OAClB,KAAK,MAAM,WAAW;OACvB;AACD,aAAO,KAAK,gBAAgB;;KAGhC,MAAM,uBAAuB,QAAQ,MAAMA,QAAM;AAEjD,uBAAkB;KAGlB,MAAM,EAAE,SAAS,wBAAwB;MACvC;OACE,MAAM;OACN,KAAK;OACL,SAAS;OACT,QAAQ;OACR,MAAM;OACN,MAAM;OACP;MACD;OACE,MAAM;OACN,KAAK;OACL,SAAS,qBAAqB,QAAQ,MAAM,GAAG;OAC/C,QAAQ;OACR,MAAM;OACN,MAAM;OACP;MACD;OACE,MAAM;OACN,KAAK;OACL,SAAS;OACT,QAAQ;OACR,MAAM;OACN,MAAM;OACP;MACF,EAAE,EAAE;AACL,YAAO,KAAK,KAAK;AACjB;AACA;;AAEF,QAAI,iBAAiB,KAAK,QAAQ,EAAE;KAClC,MAAMA,UAAQ,QAAQ,QAAQ,IAAI,IAAI;KACtC,MAAM,QAAQ,QAAQ,MAAM,GAAGA,QAAM;AACrC,SAAI,MACF,KAAI,iBAAiB;AAEnB,sBAAgB,WAAW;AAC3B,sBAAgB,OAAO;YAEpB;AAEH,wBAAkB;OAChB,MAAM;OACN,SAAS,SAAS;OAClB,KAAK,MAAM,WAAW;OACvB;AACD,aAAO,KAAK,gBAAgB;;KAGhC,MAAM,gBAAgB,QAAQ,MAAMA,QAAM;AAE1C,uBAAkB;KAGlB,MAAM,EAAE,SAAS,iBAAiB;MAChC;OACE,MAAM;OACN,KAAK;OACL,SAAS;OACT,QAAQ;OACR,MAAM;OACN,MAAM;OACP;MACD;OACE,MAAM;OACN,KAAK;OACL,SAAS,cAAc,QAAQ,OAAO,GAAG;OACzC,QAAQ;OACR,MAAM;OACN,MAAM;OACP;MACD;OACE,MAAM;OACN,KAAK;OACL,SAAS;OACT,QAAQ;OACR,MAAM;OACN,MAAM;OACP;MACF,EAAE,GAAG,IAAI;AACV,YAAO,KAAK,KAAK;AACjB;AACA;eAEO,eAAe,KAAK,QAAQ,EAAE;KACrC,MAAMA,UAAQ,QAAQ,QAAQ,IAAI,IAAI;KACtC,MAAM,QAAQ,QAAQ,MAAM,GAAGA,QAAM;AACrC,SAAI,MACF,KAAI,iBAAiB;AAEnB,sBAAgB,WAAW;AAC3B,sBAAgB,OAAO;YAEpB;AAEH,wBAAkB;OAChB,MAAM;OACN,SAAS,SAAS;OAClB,KAAK,MAAM,WAAW;OACvB;AACD,aAAO,KAAK,gBAAgB;;KAGhC,MAAM,kBAAkB,QAAQ,MAAMA,QAAM;AAE5C,uBAAkB;KAGlB,MAAM,EAAE,SAAS,mBAAmB;MAClC;OACE,MAAM;OACN,KAAK;OACL,SAAS;OACT,QAAQ;OACR,MAAM;OACN,MAAM;OACP;MACD;OACE,MAAM;OACN,KAAK;OACL,SAAS,gBAAgB,QAAQ,OAAO,GAAG;OAC3C,QAAQ;OACR,MAAM;OACN,MAAM;OACP;MACD;OACE,MAAM;OACN,KAAK;OACL,SAAS;OACT,QAAQ;OACR,MAAM;OACN,MAAM;OACP;MACF,EAAE,EAAE;AACL,YAAO,KAAK,KAAK;AACjB;AACA;;IAEF,MAAM,aAAa,QAAQ,QAAQ,KAAK;AACxC,QAAI,eAAe,IAAI;KACrB,MAAM,kBAAkB,QAAQ,MAAM,GAAG,WAAW;AACpD,SAAI,CAAC,gBACH,mBAAkB;MAChB,MAAM;MACN,SAAS;MACT,KAAK;MACN;SAGD,iBAAgB,WAAW;AAE7B,YAAO,KAAK,gBAAgB;AAC5B,uBAAkB;AAClB,YAAO,KAAK,gBAAgB,OAAO,KAAK,CAAC;AACzC;AACA;;IAEF,MAAM,YAAY,QAAQ,QAAQ,IAAI;AAEtC,QAAI,QAAQ,SAAS,YAAY,IAAI,CAAC,KAAK,SAAS,YAAY,CAC9D,WAAU,QAAQ,MAAM,GAAG,GAAG;IAEhC,MAAM,WAAW,eAAe;KAAE,GAAG;KAAO;KAAS,CAAC;AAEtD,QAAI,cAAc,IAAI;KACpB,MAAM,kBAAkB,QAAQ,MAAM,GAAG,UAAU;KACnD,MAAM,UAAU,QAAQ,QAAQ,MAAM,UAAU;AAChD,SAAI,YAAY,IAAI;MAClB,MAAM,YAAY,OAAO,IAAI;MAC7B,MAAM,OAAO,QAAQ,MAAM,YAAY,GAAG,QAAQ;AAClD,UAAI,CAAC,QAAQ,KAAK,KAAK,EAAE;AACvB,WAAI,QAAQ,SAAS,KAAK,IAAI,WAAW,SAAS,eAAe,WAAW;QAE1E,MAAM,OAAO,OAAO,IAAI;QACxB,IAAIA,UAAQ;QACZ,IAAIC,YAAU;AACd,YAAI,MAAM,SAAS,UAAU,KAAK,YAAY,KAAK;AACjD;AACA,qBAAU;mBAEH,MAAM,SAAS,UAAU,KAAK,YAAY,IACjD;AAEF,eAAO,KAAK;SACV,MAAM;SACN,MAAM,UAAU,WAAW;SAC3B;SACA,UAAU,CACR;UACE,MAAM;UACN,SAAS;UACT,KAAK;UACN,CACF;SACD;SACD,CAAQ;AACT,aAAKD;AACL;;OAEF,MAAM,iBAAiB,QAAQ,QAAQ,KAAK,QAAQ;OACpD,MAAM,OAAO,mBAAmB,KAAK,QAAQ,MAAM,UAAU,GAAG,eAAe,GAAG;OAClF,MAAM,UAAU,mBAAmB;AAGnC,WAAI,gBACF,QAAO,KAAK;QACV,MAAM;QACN,SAAS;QACT,KAAK;QACN,CAAC;AAEJ,cAAO,KAAK;QACV,MAAM;QACN;QACA;QACA,UAAU,CACR;SACE,MAAM;SACN,SAAS;SACT,KAAK;SACN,CACF;QACD;QACD,CAAQ;OAET,MAAM,YAAY,mBAAmB,KAAK,QAAQ,MAAM,iBAAiB,EAAE,GAAG;AAC9E,WAAI,WAAW;AACb,oBAAY;SACV,MAAM;SACN,SAAS;SACT,KAAK;SACN,CAAC;AACF;;AAEF;AACA;;;;IAIN,MAAM,WAAW,OAAO,IAAI;AAC5B,QAAI,iBAAiB;AAEnB,qBAAgB,WAAW,SAAS,QAAQ,QAAQ,gBAAgB,GAAG;AACvE,qBAAgB,OAAO,SAAS;WAE7B;KACH,MAAM,YAAY,UAAU,QAAQ,QAAQ,OAAO,IAAI,IAAI,YAAY;AAGvE,SAAI,CADc,OAAO,IAAI,GAE3B,UAAS,UAAU,SAAS,QAAQ,QAAQ,gBAAgB,GAAG;AAEjE,uBAAkB;AAClB,qBAAgB,SAAS;AACzB,YAAO,KAAK,gBAAgB;;AAE9B;AACA;;GAGF,KAAK;AACH,QAAI,iBAAiB;AAEnB,qBAAgB,WAAW;AAC3B,qBAAgB,OAAO;;AAGzB;AACA;GAEF,KAAK;AACH,sBAAkB;AAClB,WAAO,KAAK,qBAAqB,MAAM,CAAC;AACxC;AACA;GAEF,KAAK,aAAa;AAChB,sBAAkB;IAClB,MAAM,OAAO,MAAM,OAAO,MAAM,SAAc,KAAK,OAAO,OAAO,GAAG;AAEpE,QAAI,OAAO,OAAO,IAAI,GAAG,SAAS,QAAQ;KACxC,MAAM,OAAO,OAAO,IAAI,IAAI,WAAW;AAEvC,SAAI,kBADQ,IAAI,OAAO,MAAM,KAAK,SAAS,EAClC,KAAK,IAAI,EAAE;AAClB,aAAO,KAAK;OACV,MAAM;OACN,SAAS;OACT,KAAK;OACN,CAAC;AACF,WAAK;AACL;;;AAGJ,QAAI,OAAO,MAAM;KACf,MAAM,8BAAc,IAAI,OAAO,UAAU,KAAK,SAAS;KACvD,MAAME,MAAW,OAAO,SAAS,IAAI,OAAO,OAAO,SAAS,KAAK;KACjE,MAAM,UAAU,CAAC,YAAY,KAAK,IAAI;AACtC,SAAI,WAAW,KAEb;2BADoB,IAAI,OAAO,MAAM,IAAI,KAAK,YAAY,EAC1C,KAAK,IAAI,EAAE;OACzB,MAAM,OAAO,KAAK,QAAS,KAAa,SAAS,MAAM,GAAG,GAAG,IAAI;AACjE,cAAO,OAAO,OAAO,SAAS,GAAG,GAAG;QAClC,MAAM;QACN,MAAM;QACN;QACA;QACD,CAAQ;AACT,YAAK;AACL,WAAI,OAAO,IAAI,YAAY,IACzB;AACF;;;;IAIN,MAAM,EAAE,MAAM,cAAc,eAAe,QAAQ,EAAE;AACrD,QAAI;AAEJ,SAAK,UAAU;AACf,WAAO,KAAK,KAAK;AACjB;;GAGF,KAAK;AACH,sBAAkB;AAClB,WAAO,KAAK,gBAAgB,MAAM,CAAC;AACnC;AACA;GAEF,KAAK,eAAe;AAClB,sBAAkB;IAClB,MAAM,EAAE,MAAM,cAAc,iBAAiB,QAAQ,GAAG,MAAM,QAAQ;AACtE,WAAO,KAAK,KAAK;AACjB,QAAI;AACJ;;GAGF,KAAK,WAAW;AACd,sBAAkB;IAClB,MAAM,EAAE,MAAM,cAAc,mBAAmB,QAAQ,EAAE;AACzD,WAAO,KAAK,KAAK;AACjB,QAAI;AACJ;;GAGF,KAAK,UAAU;AACb,sBAAkB;IAClB,MAAM,EAAE,MAAM,cAAc,wBAAwB,QAAQ,EAAE;AAC9D,WAAO,KAAK,KAAK;AACjB,QAAI;AACJ;;GAGF,KAAK,aAAa;AAChB,sBAAkB;IAClB,MAAM,EAAE,MAAM,cAAc,oBAAoB,QAAQ,EAAE;AAC1D,WAAO,KAAK,KAAK;AACjB,QAAI;AACJ;;GAGF,KAAK,YAAY;AACf,sBAAkB;IAClB,MAAM,EAAE,MAAM,cAAc,iBAAiB,QAAQ,EAAE;AACvD,WAAO,KAAK,KAAK;AACjB,QAAI;AACJ;;GAGF,KAAK,YAAY;AACf,sBAAkB;IAClB,MAAM,EAAE,MAAM,cAAc,oBAAoB,QAAQ,EAAE;AAC1D,WAAO,KAAK,KAAK;AACjB,QAAI;AACJ;;GAGF,KAAK,YAAY;AACf,sBAAkB;IAClB,MAAM,EAAE,MAAM,cAAc,sBAAsB,QAAQ,EAAE;AAC5D,WAAO,KAAK,KAAK;AACjB,QAAI;AACJ;;GAGF,KAAK;AACH,sBAAkB;AAClB,WAAO,KAAK;KACV,MAAM;KACN,UAAU,CACR;MACE,MAAM;MACN,SAAS,MAAM,WAAW;MAC1B,KAAK,MAAM,WAAW;MACvB,CACF;KACD,KAAK,IAAI,MAAM,WAAW,GAAG;KAC9B,CAAC;AACF;AACA;GAEF,KAAK;AACH,sBAAkB;AAClB,WAAO,KAAK;KACV,MAAM;KACN,UAAU,CACR;MACE,MAAM;MACN,SAAS,MAAM,WAAW;MAC1B,KAAK,MAAM,WAAW;MACvB,CACF;KACD,KAAK,IAAI,MAAM,WAAW,GAAG;KAC9B,CAAC;AACF;AACA;GAEF,KAAK,SAAS;AACZ,sBAAkB;IAElB,MAAM,WAAW,OAAO,IAAI;AAC5B,QAAI,UAAU,SAAS,UAAU,QAAQ,KAAK,SAAS,WAAW,GAAG,CAEnE,QAAO,KAAK;KACV,MAAM;KACN,SAAS;KACT,KAAK;KACN,CAAC;QAGF,QAAO,KAAK,gBAAgB,MAAM,CAAC;AAErC;AACA;;GAEF,KAAK;AACH,sBAAkB;AAClB,WAAO,KAAK,mBAAmB,MAAM,CAAC;AACtC;AACA;GACF,KAAK;AACH,sBAAkB;AAClB,WAAO,KAAK,wBAAwB,MAAM,CAAC;AAC3C;AACA;GACF,KAAK;AACH,sBAAkB;AAClB,WAAO,KAAK,sBAAsB,MAAM,CAAC;AACzC;AACA;GAEF,KAAK;AACH,sBAAkB;AAClB,WAAO,KAAK,qBAAqB,CAAC;AAClC;AACA;GAEF,KAAK;AACH,sBAAkB;AAElB,WAAO,KAAK,gBAAgB,OAAO,GAAG,CAAC;AACvC;AACA;GAGF,KAAK;AACH,sBAAkB;AAClB,WAAO,KAAK,qBAAqB,MAAM,CAAC;AACxC;AACA;GAGF,KAAK;AACH,sBAAkB;AAElB,QAAI,CADc,OAAO,IAAI,IACb,SAAS,WAAW,IAAI,CACtC,QAAO,KAAK,oBAAoB,MAAM,CAAC;AAEzC;AACA;GAGF;AAEE,WAAO,KAAK,MAAM;AAClB,sBAAkB;AAClB;AACA;;;AAIN,QAAO;;;;;ACjnBT,SAAgB,gBACd,QACA,OAC0B;CAC1B,MAAMC,qBAAmC,EAAE;CAC3C,IAAI,IAAI,QAAQ;AAGhB,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,mBAC7C,KAAI,OAAO,GAAG,SAAS,kBAAkB;EACvC,MAAM,eAAe,OAAO,IAAI;AAChC,qBAAmB,KAAK;GACtB,MAAM;GACN,UAAU,kBAAkB,aAAa,YAAY,EAAE,CAAC;GACxD,KAAK,aAAa,WAAW;GAC9B,CAAC;AACF,OAAK;YAGL,OAAO,GAAG,SAAS,sBAChB,OAAO,GAAG,SAAS,qBACtB;EAEA,MAAM,CAAC,UAAU,YAAY,UAAU,QAAQ,EAAE;AACjD,qBAAmB,KAAK,SAAS;AACjC,MAAI;OAGJ;AAUJ,QAAO,CANgC;EACrC,MAAM;EACN,UAAU;EACV,KAAK,mBAAmB,KAAI,UAAS,MAAM,IAAI,CAAC,KAAK,KAAK;EAC3D,EAEuB,IAAI,EAAE;;;;;ACvChC,SAAgB,eAAe,OAAqC;AAIlE,KAAI,MAAM,MAAM,WAAW,OAAO,CAChC,QAAO,gBAAgB,MAAM;CAG/B,MAAM,QAAQ,MAAM,QAAS,MAAM,yCAAyC;AAC5E,KAAI,QAAQ,GAEV,OAAM,UAAU,MAAM,QACnB,QAAQ,uBAAuB,GAAG,CAClC,QAAQ,oBAAoB,GAAG;CAEpC,MAAM,SAAS,MAAM,QAAQ,MAAM,IAAI,IAAI,MAAM,IAAI,WAAW;AAChE,QAAO;EACL,MAAM;EACN,UAAU,QAAQ,MAAM,KAAM,MAAM,QAAQ;EAC5C,MAAM,MAAM,WAAW;EACvB,KAAK,MAAM,WAAW;EACtB,SAAS,CAAC;EACX;;;;;ACjBH,SAAgB,oBACd,QACA,OAC8B;CAC9B,MAAMC,QAA8B,EAAE;CACtC,IAAI,IAAI,QAAQ;CAChB,IAAIC,YAA0B,EAAE;CAChC,IAAIC,kBAAgC,EAAE;AAEtC,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,WAC7C,KAAI,OAAO,GAAG,SAAS,WAAW;EAEhC,MAAM,YAAY,OAAO,IAAI;AAC7B,cAAY,kBAAkB,UAAU,YAAY,EAAE,CAAC;AACvD,OAAK;YAEE,OAAO,GAAG,SAAS,WAAW;EAErC,IAAI,IAAI,IAAI;AACZ,oBAAkB,EAAE;AAEpB,SAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,WAC7C,KAAI,OAAO,GAAG,SAAS,kBAAkB;GACvC,MAAM,eAAe,OAAO,IAAI;AAChC,mBAAgB,KAAK;IACnB,MAAM;IACN,UAAU,kBAAkB,aAAa,YAAY,EAAE,EAAE,aAAa,WAAW,GAAG;IACpF,KAAK,aAAa,WAAW;IAC9B,CAAC;AACF,QAAK;QAGL;AAKJ,MAAI,UAAU,SAAS,GAAG;AACxB,SAAM,KAAK;IACT,MAAM;IACN,MAAM;IACN,YAAY;IACZ,KAAK,GAAG,UAAU,KAAI,SAAQ,KAAK,IAAI,CAAC,KAAK,GAAG,CAAC,IAAI,gBAClD,KAAI,QAAO,IAAI,IAAI,CACnB,KAAK,KAAK;IACd,CAAC;AAGF,eAAY,EAAE;;AAGhB,MAAI,IAAI;OAGR;AAUJ,QAAO,CANwC;EAC7C,MAAM;EACN;EACA,KAAK,MAAM,KAAI,SAAQ,KAAK,IAAI,CAAC,KAAK,KAAK;EAC5C,EAE2B,IAAI,EAAE;;;;;ACrEpC,SAAgB,cACd,QACA,OACwB;CAExB,MAAM,KADQ,OAAO,OACJ,MAAM,SAAS;CAChC,MAAMC,mBAAiC,EAAE;CACzC,IAAI,IAAI,QAAQ;AAEhB,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,iBAC7C,KAAI,OAAO,GAAG,SAAS,kBAAkB;EACvC,MAAM,eAAe,OAAO,IAAI;AAChC,mBAAiB,KAAK;GACpB,MAAM;GACN,UAAU,kBAAkB,aAAa,YAAY,EAAE,CAAC;GACxD,KAAK,aAAa,WAAW;GAC9B,CAAC;AACF,OAAK;OAGL;AAWJ,QAAO,CAP4B;EACjC,MAAM;EACN;EACA,UAAU;EACV,KAAK,KAAK,GAAG,KAAK,iBAAiB,KAAI,UAAS,MAAM,IAAI,CAAC,KAAK,KAAK;EACtE,EAEqB,IAAI,EAAE;;;;;AC/B9B,SAAgB,aACd,QACA,OACa;CACb,MAAM,QAAQ,OAAO;CACrB,MAAM,eAAe,OAAO,SAAS,MAAM,KAAK,UAAU,EAAE,IAAI,IAAI;CACpE,MAAM,sBAAsB,OAAO,QAAQ;CAC3C,MAAM,iBAAiB,oBAAoB,WAAW;AAEtD,QAAO;EACL,MAAM;EACN,OAAO;EACP,MAAM;EACN,UAAU,kBAAkB,oBAAoB,YAAY,EAAE,CAAC;EAC/D,KAAK;EACN;;;;;ACfH,SAAgB,eAAe,OAAqC;AAClE,QAAO;EACL,MAAM;EACN,SAAS,MAAM,WAAW;EAC1B,SAAS,CAAC,CAAC,MAAM;EACjB,KAAK,MAAM,OAAO;EACnB;;;;;ACDH,SAAgB,WACd,QACA,OACqB;CACrB,IAAI,IAAI,QAAQ;CAChB,IAAIC,YAAiC;CACrC,MAAMC,OAAuB,EAAE;CAC/B,IAAI,WAAW;AAEf,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,cAC7C,KAAI,OAAO,GAAG,SAAS,cAAc;AACnC,aAAW;AACX;YAEO,OAAO,GAAG,SAAS,eAAe;AACzC,aAAW;AACX;YAGA,OAAO,GAAG,SAAS,gBAChB,OAAO,GAAG,SAAS,cAEtB;UAEO,OAAO,GAAG,SAAS,WAAW;EACrC,MAAMC,QAAyB,EAAE;EACjC,IAAI,IAAI,IAAI;AAEZ,SAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,WAC7C,KAAI,OAAO,GAAG,SAAS,aAAa,OAAO,GAAG,SAAS,WAAW;GAChE,MAAM,eAAe,OAAO,GAAG,SAAS;GACxC,MAAM,eAAe,OAAO,IAAI;GAChC,MAAM,UAAU,aAAa,WAAW;AAExC,SAAM,KAAK;IACT,MAAM;IACN,QAAQ,gBAAgB;IACxB,UAAU,kBAAkB,aAAa,YAAY,EAAE,EAAE,QAAQ;IACjE,KAAK;IACN,CAAC;AAEF,QAAK;QAGL;EAIJ,MAAMC,UAAwB;GAC5B,MAAM;GACN;GACA,KAAK,MAAM,KAAI,SAAQ,KAAK,IAAI,CAAC,KAAK,IAAI;GAC3C;AAED,MAAI,SACF,aAAY;MAGZ,MAAK,KAAK,QAAQ;AAGpB,MAAI,IAAI;OAGR;AAIJ,KAAI,CAAC,UAEH,aAAY;EACV,MAAM;EACN,OAAO,EAAE;EACT,KAAK;EACN;AAWH,QAAO,CARsB;EAC3B,MAAM;EACN,QAAQ;EACR;EACA,SAAS,OAAO,OAAO,WAAW;EAClC,KAAK,CAAC,WAAW,GAAG,KAAK,CAAC,KAAI,QAAO,IAAI,IAAI,CAAC,KAAK,KAAK;EACzD,EAEkB,IAAI,EAAE;;;;;AC3F3B,SAAgB,qBAAwC;AACtD,QAAO;EACL,MAAM;EACN,KAAK;EACN;;;;;ACYH,SAAgB,UACd,QACA,OACoB;CACpB,MAAM,QAAQ,OAAO;CACrB,MAAMC,YAA4B,EAAE;CACpC,IAAI,IAAI,QAAQ;AAEhB,QACE,IAAI,OAAO,UACR,OAAO,GAAG,SAAS,uBACnB,OAAO,GAAG,SAAS,qBAEtB,KAAI,OAAO,GAAG,SAAS,kBAAkB;AACvC,MAAI,OAAO,GAAG,WAAW,KAAK;AAC5B;AACA;;EAEF,MAAMC,eAA6B,EAAE;EACrC,IAAI,IAAI,IAAI;AACZ,SAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,kBAE7C,KAAI,OAAO,GAAG,SAAS,kBAAkB;GACvC,MAAM,eAAe,OAAO,IAAI;GAChC,MAAM,WAAW,OAAO,IAAI;AAC5B,OAAI,SAAS,KAAK,aAAa,WAAW,GAAG,EAAE;AAC7C,iBAAa,UAAU,aAAa,SAAS,QAAQ,UAAU,GAAG;AAClE,iBAAa,UAAU,OAAO,IAAI,EAAE;;AAEtC,gBAAa,KAAK;IAChB,MAAM;IACN,UAAU,kBAAkB,aAAa,YAAY,EAAE,EAAE,aAAa,WAAW,IAAI,SAAS;IAC9F,KAAK,aAAa,WAAW;IAC9B,CAAC;AACF,QAAK;aAEE,OAAO,GAAG,SAAS,mBAAmB;GAE7C,MAAM,CAAC,gBAAgB,YAAY,gBAAgB,QAAQ,EAAE;AAC7D,gBAAa,KAAK,eAAe;AACjC,OAAI;aAGJ,OAAO,GAAG,SAAS,sBAChB,OAAO,GAAG,SAAS,qBACtB;AACA,OAAI,OAAO,GAAG,WAAW,KAAK;AAC5B;AACA;;GAGF,MAAM,CAAC,gBAAgB,YAAY,gBAAgB,QAAQ,EAAE;AAC7D,gBAAa,KAAK,eAAe;AACjC,OAAI;aAEG,OAAO,GAAG,SAAS,cAAc;AAExC,gBAAa,KAAK,eAAe,OAAO,GAAG,CAAC;AAC5C,QAAK;aAEE,OAAO,GAAG,SAAS,SAAS;AAEnC,gBAAa,KAAK,gBAAgB,OAAO,GAAG,CAAC;AAC7C,QAAK;aAEE,OAAO,GAAG,SAAS,cAAc;AAExC,gBAAa,KAAK,eAAe,OAAO,GAAG,CAAC;AAC5C,QAAK;aAEE,OAAO,GAAG,SAAS,cAAc;GAExC,MAAM,CAAC,WAAW,YAAY,WAAW,QAAQ,EAAE;AACnD,gBAAa,KAAK,UAAU;AAC5B,OAAI;aAEG,OAAO,GAAG,SAAS,WAAW;GAErC,MAAM,CAAC,aAAa,YAAY,oBAAoB,QAAQ,EAAE;AAC9D,gBAAa,KAAK,YAAY;AAC9B,OAAI;aAEG,OAAO,GAAG,SAAS,iBAAiB;GAE3C,MAAM,CAAC,cAAc,YAAY,cAAc,QAAQ,EAAE;AACzD,gBAAa,KAAK,aAAa;AAC/B,OAAI;aAEG,OAAO,GAAG,SAAS,gBAAgB;GAE1C,MAAM,cAAc,aAAa,QAAQ,EAAE;AAC3C,gBAAa,KAAK,YAAY;AAC9B,QAAK;aAEE,OAAO,GAAG,SAAS,MAAM;AAEhC,gBAAa,KAAK,oBAAoB,CAAC;AACvC,QAAK;aAEE,OAAO,GAAG,SAAS,kBAAkB;GAE5C,MAAM,QACF,sDAAsD,KACtD,OAAO,GAAG,QAAQ,GACnB;AACH,OAAI,OAAO;IACT,MAAM,CAAC,gBAAgB,YAAY,gBAAgB,QAAQ,GAAG,MAAM;AACpE,iBAAa,KAAK,eAAe;AACjC,QAAI;SAGJ,MAAK;QAIP,MAAK;AAIT,YAAU,KAAK;GACb,MAAM;GACN,UAAU;GACV,KAAK,aAAa,KAAI,UAAS,MAAM,IAAI,CAAC,KAAK,GAAG;GACnD,CAAC;AAEF,MAAI,IAAI;OAGR,MAAK;AAoBT,QAAO,CAhBoB;EACzB,MAAM;EACN,SAAS,MAAM,SAAS;EAExB,cAAc;AACZ,OAAI,MAAM,SAAS,MAAM,MAAM,QAAQ;IACrC,MAAM,QAAQ,MAAM,MAAM,MAAK,MAAK,EAAE,OAAO,QAAQ;AACrD,QAAI,MACF,QAAO,OAAO,MAAM,GAAG,IAAI;;MAG7B;EACJ,OAAO;EACP,KAAK,UAAU,KAAI,SAAQ,KAAK,IAAI,CAAC,KAAK,KAAK;EAChD,EAEiB,IAAI,EAAE;;AAI1B,SAAS,gBACP,QACA,OACoB;CAGpB,MAAM,cAAc,OAAO;CAC3B,MAAMC,cAA8B,EAAE;CACtC,IAAI,IAAI,QAAQ;AAEhB,QACE,IAAI,OAAO,UACR,OAAO,GAAG,SAAS,uBACnB,OAAO,GAAG,SAAS,qBAEtB,KAAI,OAAO,GAAG,SAAS,kBAAkB;AACvC,MAAI,OAAO,GAAG,WAAW,KAAK;AAC5B;AACA;;EAEF,MAAMD,eAA6B,EAAE;EACrC,IAAI,IAAI,IAAI;AAEZ,SAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,kBAE7C,KAAI,OAAO,GAAG,SAAS,kBAAkB;GACvC,MAAM,eAAe,OAAO,IAAI;GAChC,MAAM,WAAW,OAAO,IAAI;AAC5B,gBAAa,KAAK;IAChB,MAAM;IACN,UAAU,kBAAkB,aAAa,YAAY,EAAE,EAAE,aAAa,WAAW,IAAI,SAAS;IAC9F,KAAK,aAAa,WAAW;IAC9B,CAAC;AACF,QAAK;aAGL,OAAO,GAAG,SAAS,sBAChB,OAAO,GAAG,SAAS,qBACtB;AACA,OAAI,OAAO,GAAG,WAAW,KAAK;AAC5B;AACA;;GAIF,MAAM,CAAC,sBAAsB,YAAY,gBAAgB,QAAQ,EAAE;AACnE,gBAAa,KAAK,qBAAqB;AACvC,OAAI;aAEG,OAAO,GAAG,SAAS,cAAc;AACxC,gBAAa,KAAK,eAAe,OAAO,GAAG,CAAC;AAC5C,QAAK;aAEE,OAAO,GAAG,SAAS,SAAS;AACnC,gBAAa,KAAK,gBAAgB,OAAO,GAAG,CAAC;AAC7C,QAAK;aAEE,OAAO,GAAG,SAAS,cAAc;AAExC,gBAAa,KAAK,eAAe,OAAO,GAAG,CAAC;AAC5C,QAAK;QAIL,MAAK;AAIT,cAAY,KAAK;GACf,MAAM;GACN,UAAU;GACV,KAAK,aAAa,KAAI,UAAS,MAAM,IAAI,CAAC,KAAK,GAAG;GACnD,CAAC;AAEF,MAAI,IAAI;OAGR,MAAK;AAmBT,QAAO,CAf0B;EAC/B,MAAM;EACN,SAAS,YAAY,SAAS;EAC9B,cAAc;AACZ,OAAI,YAAY,SAAS,YAAY,MAAM,QAAQ;IACjD,MAAM,QAAQ,YAAY,MAAM,MAAK,MAAK,EAAE,OAAO,QAAQ;AAC3D,QAAI,MACF,QAAO,OAAO,MAAM,GAAG,IAAI;;MAG7B;EACJ,OAAO;EACP,KAAK,YAAY,KAAI,SAAQ,KAAK,IAAI,CAAC,KAAK,KAAK;EAClD,EAEuB,IAAI,EAAE;;;;;ACtQhC,SAAgB,gBACd,QACA,OACA,OAC0B;CAC1B,MAAM,OAAO,MAAM,MAAM;CACzB,MAAM,QAAQ,MAAM,MAAM,KAAK,OAAO,EAAE,CAAC,aAAa,GAAG,KAAK,MAAM,EAAE;CACtE,MAAME,qBAAmC,EAAE;CAC3C,IAAI,IAAI,QAAQ;AAEhB,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,kBAC7C,KAAI,OAAO,GAAG,SAAS,kBAAkB;EACvC,MAAM,eAAe,OAAO,IAAI;AAChC,MAAI,aACF,oBAAmB,KAAK;GACtB,MAAM;GACN,UAAU,kBAAkB,aAAa,YAAY,EAAE,CAAC;GACxD,KAAK,aAAa,WAAW;GAC9B,CAAC;AAEJ,OAAK;YAGL,OAAO,GAAG,SAAS,sBAChB,OAAO,GAAG,SAAS,qBACtB;EAEA,MAAM,CAAC,UAAU,YAAY,UAAU,QAAQ,EAAE;AACjD,qBAAmB,KAAK,SAAS;AACjC,MAAI;OAGJ;AAcJ,QAAO,CAVgC;EACrC,MAAM;EACN;EACA;EACA,UAAU;EACV,KAAK,MAAM,KAAK,GAAG,MAAM,IAAI,mBAC1B,KAAI,UAAS,MAAM,IAAI,CACvB,KAAK,KAAK,CAAC;EACf,EAEuB,IAAI,EAAE;;;;;AC9ChC,SAAgB,eACd,QACA,OAC0B;CAC1B,MAAM,YAAY,OAAO;CAGzB,IAAI,OAAO;CACX,IAAI,QAAQ;CAEZ,MAAM,YAAY,UAAU,KAAK,MAAM,yBAAyB;AAChE,KAAI,WAAW;AACb,SAAO,UAAU;EAEjB,MAAM,QAAQ,UAAU,QAAQ,IAAI,MAAM;AAC1C,MAAI,QAAQ,CAAC,KAAK,WAAW,MAAM,EAAE;GAEnC,MAAM,QAAQ,KAAK,wBAAQ,IAAI,OAAO,IAAI,OAAO,EAAE,GAAG,CAAC,MAAM;AAC7D,OAAI,MACF,SAAQ;;QAGT;EAEH,MAAM,QAAQ,UAAU,QAAQ,IAAI,MAAM;EAE1C,MAAM,QAEF,4DAA4D,KAAK,KAAK;AAC1E,MAAI,OAAO;AACT,UAAO,MAAM;AACb,WAAQ,MAAM,MAAM;;;AAIxB,KAAI,CAAC,MACH,SAAQ,KAAK,OAAO,EAAE,CAAC,aAAa,GAAG,KAAK,MAAM,EAAE;CAEtD,MAAMC,WAAyB,EAAE;CACjC,IAAI,IAAI,QAAQ;CAGhB,MAAM,4BAAY,IAAI,OAAO,cAAc,KAAK,SAAS;AAEzD,QACE,IAAI,OAAO,UACR,OAAO,GAAG,SAAS,qBACnB,CAAC,UAAU,KAAK,OAAO,GAAG,KAAK,CAElC,KAAI,OAAO,GAAG,SAAS,kBAAkB;EACvC,MAAM,eAAe,OAAO,IAAI;AAChC,MAAI,cAAc;GAChB,MAAM,IAAK,aAAa,SAAiB,eAAe,MAAgB,EAAE,SAAS,UAAU,KAAK,KAAK,EAAE,QAAQ,CAAC;GAClH,MAAM,YAAY,MAAM,KACpB,aAAa,UAAU,MAAM,GAAG,EAAE,GAClC,aAAa;AACjB,YAAS,KAAK;IACZ,MAAM;IACN,UAAU,kBAAkB,aAAa,EAAE,CAAC;IAC5C,KAAK,aAAa,SAAS,QAAQ,SAAS,GAAG,CAAC,QAAQ,gBAAgB,GAAG,IAAI;IAChF,CAAC;;AAEJ,OAAK;YAGL,OAAO,GAAG,SAAS,sBAChB,OAAO,GAAG,SAAS,qBACtB;EACA,MAAM,CAAC,UAAU,YAAY,UAAU,QAAQ,EAAE;AACjD,WAAS,KAAK,SAAS;AACvB,MAAI;OAGJ;AAcJ,QAAO,CAVgC;EACrC,MAAM;EACN;EACA;EACA;EACA,KAAK,MAAM,KAAK,GAAG,MAAM,IAAI,SAAS,KAAI,MAAK,EAAE,IAAI,CAAC,KAAK,KAAK,CAAC;EAClE,EAGoB,IACkB,EAAE;;;;;ACzF3C,SAAgB,iBAAgC;AAC9C,QAAO;EACL,MAAM;EACN,KAAK;EACN;;;;;ACHH,SAAgB,eACd,QACA,OACe;CACf,MAAM,wBAAwB,OAAO,QAAQ;CAC7C,MAAM,mBAAmB,sBAAsB,WAAW;AAE1D,QAAO;EACL,MAAM;EACN,UAAU,kBAAkB,sBAAsB,YAAY,EAAE,EAAE,iBAAiB;EACnF,KAAK;EACN;;;;;ACKH,SAAgB,yBACd,UACA,IACA,UAAwB,EAAE,EACZ;CAEd,IAAI,gBAAgB,YAAY,IAAI,UAAU,CAAC,QAAQ,kBAAkB,YAAY;AACrF,KAAI,aAAa,SAAS,MAAM,CAE9B,gBAAe,aAAa,QAAQ,SAAS,QAAQ;AAEvD,KAAI,aAAa,KAAK,aAAa,CAEjC,gBAAe,aAAa,QAAQ,cAAc,KAAK;UAEhD,aAAa,KAAK,aAAa,CAEtC,gBAAe,aAAa,QAAQ,qBAAqB,KAAK;CAGhE,MAAM,SAAS,GAAG,MAAM,cAAc,EAAE,CAAC;AAEzC,KAAI,CAAC,UAAU,CAAC,MAAM,QAAQ,OAAO,CACnC,QAAO,EAAE;CAGX,MAAM,MAAM,QAAQ;CACpB,MAAM,OAAO,QAAQ;CACrB,IAAI,oBAAoB;AACxB,KAAI,OAAO,OAAO,QAAQ,WACxB,qBAAoB,IAAI,OAAO,IAAI;CAGrC,IAAI,SAAS,cAAc,kBAAkB;AAI7C,KAAI,QAAQ,OAAO,SAAS,WAC1B,UAAS,KAAK,kBAAkB,IAAI;AAEtC,QAAO;;AAIT,SAAgB,cAAc,QAAuC;AAEnE,KAAI,CAAC,UAAU,CAAC,MAAM,QAAQ,OAAO,CACnC,QAAO,EAAE;CAEX,MAAMC,SAAuB,EAAE;CAC/B,IAAI,IAAI;AACR,UAAS,eAAe,OAAO;AAC/B,QAAO,IAAI,OAAO,QAAQ;EACxB,MAAM,QAAQ,OAAO;AACrB,UAAQ,MAAM,MAAd;GACE,KAAK;GACL,KAAK;GACL,KAAK;GACL,KAAK;GACL,KAAK;GACL,KAAK;GACL,KAAK,wBAAwB;IAC3B,MAAM,CAAC,aAAa,YAAY,eAAe,QAAQ,EAAE;AACzD,WAAO,KAAK,YAAY;AACxB,QAAI;AACJ;;GAGF,KAAK;AACH,WAAO,KAAK,aAAa,QAAQ,EAAE,CAAC;AACpC,SAAK;AACL;GAEF,KAAK;AACH,WAAO,KAAK,eAAe,QAAQ,EAAE,CAAC;AACtC,SAAK;AACL;GAEF,KAAK;GACL,KAAK;AACH,WAAO,KAAK,eAAe,OAAO,GAAG,CAAC;AACtC,SAAK;AACL;GAEF,KAAK;AACH,WAAO,KAAK,gBAAgB,OAAO,GAAG,CAAC;AACvC,SAAK;AACL;GAEF,KAAK;GACL,KAAK,qBAAqB;IACxB,MAAM,CAAC,UAAU,YAAY,UAAU,QAAQ,EAAE;AACjD,WAAO,KAAK,SAAS;AACrB,QAAI;AACJ;;GAGF,KAAK;AACH,WAAO,KAAK,oBAAoB,CAAC;AACjC,SAAK;AACL;GAEF,KAAK,mBAAmB;IACtB,MAAM,CAAC,gBAAgB,YAAY,gBAAgB,QAAQ,EAAE;AAC7D,WAAO,KAAK,eAAe;AAC3B,QAAI;AACJ;;GAGF,KAAK,cAAc;IACjB,MAAM,CAAC,WAAW,YAAY,WAAW,QAAQ,EAAE;AACnD,WAAO,KAAK,UAAU;AACtB,QAAI;AACJ;;GAGF,KAAK,WAAW;IACd,MAAM,CAAC,oBAAoB,YAAY,oBAAoB,QAAQ,EAAE;AACrE,WAAO,KAAK,mBAAmB;AAC/B,QAAI;AACJ;;GAGF,KAAK,iBAAiB;IACpB,MAAM,CAAC,cAAc,YAAY,cAAc,QAAQ,EAAE;AACzD,WAAO,KAAK,aAAa;AACzB,QAAI;AACJ;;GAGF,KAAK,kBAAkB;IACrB,MAAM,QACF,4DAA4D,KAC5D,MAAM,QAAQ,GACf;AACH,QAAI,OAAO;KACT,MAAM,CAAC,gBAAgB,YAAY,gBAAgB,QAAQ,GAAG,MAAM;AACpE,YAAO,KAAK,eAAe;AAC3B,SAAI;UAGJ,MAAK;AAEP;;GAGF,KAAK;AACH,WAAO,KAAK,gBAAgB,CAAC;AAC7B;AACA;GAEF,KAAK;AACH,WAAO,KAAK,eAAe,OAAO,GAAG,CAAC;AACtC,SAAK;AACL;GAEF;AAEE,SAAK;AACL;;;AAIN,QAAO;;;;;ACxIT,SAAgB,YAAY,QAAgB,UAAU,KAAK,KAAK,IAAI,UAA8B,EAAE,EAAE;CAEpG,MAAM,KAAK,QAAQ,QAAQ;CAG3B,MAAMC,sBAA8C,EAClD,eAAe,QAChB;CAED,IAAIC;AACJ,KAAI,OAAO,QAAQ,SAAS,WAC1B,KAAI,QAAQ;UAEL,QAAQ,QAAQ,OAAO,QAAQ,SAAS,UAAU;EACzD,MAAM,UAAU,QAAQ;AACxB,OAAK,QAAgB,QAAQ,QAAQ,oBAAoB,QAAQ;OAGjE,MAAK,QAAgB,oBAAoB,QAAQ;AAInD,KAAI,MAAM,QAAQ,QAAQ,OAAO,CAC/B,MAAK,MAAM,KAAK,QAAQ,OAEtB,KAAI,MAAM,QAAQ,EAAE,CAClB,IAAG,IAAI,EAAE,IAAI,EAAE,GAAG;KAElB,IAAG,IAAI,EAAE;AAKf,KAAI,MAAM,QAAQ,QAAQ,MAAM,CAC9B,MAAK,MAAM,MAAM,QAAQ,MACvB,KAAI;AACF,KAAG,GAAG;UAED,GAAG;AAGR,UAAQ,MAAM,+CAA+C,EAAE;;AAMrE,IAAG,IAAI,cAAc;AACrB,IAAG,IAAI,cAAc;AACrB,IAAG,IAAI,eAAe;AACtB,IAAG,IAAIC,KAAgB;CACvB,MAAM,2BACD,mBAA2B,WAAW;AAC3C,IAAG,IAAI,yBAAyB;AAChC,IAAG,IAAI,cAAc;AACrB,IAAG,IAAI,mBAAmB;AAG1B,IAAG,KAAK,MAAM,MAAM,SAAS,sBAAsB,UAAe;EAEhE,MAAM,QADc,MAAM,IACR,MAAM,QAAQ;AAChC,OAAK,MAAM,SAAS,MAAM,QAAQ;AAChC,OAAI,MAAM,SAAS,WAAW,CAAC,MAAM,OAAO,CAAC,MAAM,OACjD;GACF,MAAMC,WAAmB,MAAM,IAAI;GACnC,MAAMC,UAAkB,MAAM,IAAI;GAClC,MAAMC,SAAiB,MAAM;GAC7B,MAAM,SAAS,OAAO;GACtB,MAAM,SAAS,OAAO;GAGtB,MAAM,OAAO,MADG,KAAK,IAAI,GAAG,UAAU,EAAE,KACT;GAC/B,IAAI,IAAI;AACR,UAAO,IAAI,KAAK,WAAW,KAAK,OAAO,OAAO,KAAK,OAAO,KAAO;GACjE,IAAI,QAAQ;AACZ,UAAO,IAAI,QAAQ,KAAK,UAAU,KAAK,IAAI,WAAW,OAAQ;GAC9D,IAAI,IAAI,IAAI;AACZ,UAAO,IAAI,KAAK,WAAW,KAAK,OAAO,OAAO,KAAK,OAAO,KAAO;GACjE,MAAM,SAAS,UAAU,WAAW,KAAK,SAAS,UAAU,MAAM,KAAK;AACvE,SAAM,OAAO,MAAM,QAAQ,EAAE;AAC7B,SAAM,KAAK,WAAW,CAAC;AAEvB,SAAM,KAAK,SAAS,CAAC,CAAC;;GAExB;CAGF,MAAM,YAAY,OAAY,WAAoB;EAChD,MAAM,QAAQ,MAAM;AACpB,MAAI,MAAM,IAAI,WAAW,IACvB,QAAO;EACT,MAAM,WAAW,MAAM,IAAI,QAAQ;EACnC,MAAM,WAAW,MAAM,IAAI,QAAQ;AACnC,MAAI,KAAK,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS,EAAE;AAC9C,OAAI,CAAC,QAAQ;IACX,MAAM,QAAQ,MAAM,KAAK,QAAQ,IAAI,EAAE;AACvC,UAAM,UAAU;;AAElB,SAAM,OAAO;AACb,UAAO;;AAET,SAAO;;AAGT,IAAG,OAAO,MAAM,OAAO,OAAO,QAAQ,SAAS;AAG/C,IAAG,SAAS,MAAM,SAAS,QAAa,QAAgB;EACtD,MAAM,QAAQ,OAAO;EACrB,MAAM,OAAO,MAAM,OAAO,MAAM,KAAK,MAAM,GAAG;EAC9C,MAAM,MAAM,MAAM;EAClB,MAAM,cAAc,KAAK,SAAS,mBAAmB,IAAI,CAAC,CAAC;EAC3D,MAAM,WAAW,QAAQ;AAGzB,SAAO,sCAAsC,YAAY,eAAe,SAAS,QAFhE,UAAU,MAAM,GAAG,IAAI,GAAG,WAEuD;;kCAEpE,SAAS,aAAa,CAAC;iDACR,YAAY,IAAI,EACvD,cACD,CAAC;;;;;CAOR,MAAM,mBAAmB,OAAY,WAAoB;AACvD,MAAI,MAAM,IAAI,MAAM,SAAS,IAC3B,QAAO;EACT,MAAM,QAAQ,aAAa,KAAK,MAAM,IAAI,MAAM,MAAM,IAAI,CAAC;AAC3D,MAAI,CAAC,MACH,QAAO;AACT,MAAI,CAAC,QAAQ;GACX,MAAM,KAAK,MAAM;GACjB,MAAM,QAAQ,MAAM,KAAK,aAAa,QAAQ,EAAE;AAChD,SAAM,UAAU;AAChB,SAAM,SAAS,MAAM;;AAEvB,QAAM,OAAO,MAAM,GAAG;AACtB,SAAO;;AAGT,IAAG,OAAO,MAAM,OAAO,UAAU,aAAa,gBAAgB;AAC9D,IAAG,SAAS,MAAM,aAAa,QAAa,QAAgB;EAC1D,MAAM,KAAK,OAAO,KAAK;AACvB,SAAO,mDAAmD,GAAG,+DAA+D,GAAG;;AAGjI,QAAO;;AAGT,SAAgB,oBAAoB;AAOlC,QANW,IAAI,WAAW;EACxB,MAAM;EACN,SAAS;EACT,aAAa;EACb,QAAQ;EACT,CAAC;;AAIJ,SAAgB,eAAe,IAAgB,SAAiB;AAE9D,QADa,GAAG,OAAO,QAAQ"}
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "stream-markdown-parser",
3
3
  "type": "module",
4
- "version": "0.0.2",
4
+ "version": "0.0.3",
5
5
  "packageManager": "pnpm@10.19.0",
6
6
  "description": "Pure markdown parser and renderer utilities with streaming support - framework agnostic",
7
7
  "author": "Simon He",