stream-markdown-parser 0.0.22 → 0.0.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +39 -3
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -96,9 +96,36 @@ const VOID_TAGS$1 = new Set([
|
|
|
96
96
|
function applyFixHtmlInlineTokens(md) {
|
|
97
97
|
md.core.ruler.push("fix_html_inline_tokens", (state) => {
|
|
98
98
|
const toks = state.tokens ?? [];
|
|
99
|
+
const tagStack = [];
|
|
99
100
|
for (let i = 0; i < toks.length; i++) {
|
|
100
101
|
const t = toks[i];
|
|
101
|
-
if (t.type === "html_block"
|
|
102
|
+
if (t.type === "html_block") {
|
|
103
|
+
const tag = t.content?.match(/<([^\s>/]+)/)?.[1] ?? "";
|
|
104
|
+
if (!/<\s*\/\s*[^\s>]+\s*>/.test(t.content || "")) tagStack.push([tag, i]);
|
|
105
|
+
else if (tagStack.length > 0 && tagStack[tagStack.length - 1][0] === tag) tagStack.pop();
|
|
106
|
+
continue;
|
|
107
|
+
} else if (tagStack.length > 0) {
|
|
108
|
+
if (t.type === "paragraph_open" || t.type === "paragraph_close") {
|
|
109
|
+
toks.splice(i, 1);
|
|
110
|
+
i--;
|
|
111
|
+
continue;
|
|
112
|
+
}
|
|
113
|
+
const content = t.content || "";
|
|
114
|
+
const isClosingTag = (/* @__PURE__ */ new RegExp(`<\\s*\\/\\s*${tagStack[tagStack.length - 1][0]}\\s*>`)).test(content);
|
|
115
|
+
if (content) {
|
|
116
|
+
const [, openIndex] = tagStack[tagStack.length - 1];
|
|
117
|
+
const openToken = toks[openIndex];
|
|
118
|
+
openToken.content = `${openToken.content || ""}\n${content}`;
|
|
119
|
+
if (openToken.loading !== false) openToken.loading = !isClosingTag;
|
|
120
|
+
}
|
|
121
|
+
if (isClosingTag) tagStack.pop();
|
|
122
|
+
toks.splice(i, 1);
|
|
123
|
+
i--;
|
|
124
|
+
} else continue;
|
|
125
|
+
}
|
|
126
|
+
for (let i = 0; i < toks.length; i++) {
|
|
127
|
+
const t = toks[i];
|
|
128
|
+
if (t.type === "html_block") {
|
|
102
129
|
const tag = t.content?.match(/<([^\s>/]+)/)?.[1] ?? "";
|
|
103
130
|
if ([
|
|
104
131
|
"br",
|
|
@@ -106,14 +133,19 @@ function applyFixHtmlInlineTokens(md) {
|
|
|
106
133
|
"img",
|
|
107
134
|
"input",
|
|
108
135
|
"link",
|
|
109
|
-
"meta"
|
|
136
|
+
"meta",
|
|
137
|
+
"div",
|
|
138
|
+
"p",
|
|
139
|
+
"ul",
|
|
140
|
+
"li"
|
|
110
141
|
].includes(tag)) continue;
|
|
111
142
|
t.type = "inline";
|
|
143
|
+
const loading = t.content?.includes(`</${tag}>`) ? false : t.loading !== void 0 ? t.loading : true;
|
|
112
144
|
t.children = [{
|
|
113
145
|
type: "html_block",
|
|
114
146
|
content: t.content,
|
|
115
147
|
tag: t.content?.match(/<([^\s>/]+)/)?.[1] ?? "",
|
|
116
|
-
loading
|
|
148
|
+
loading
|
|
117
149
|
}];
|
|
118
150
|
continue;
|
|
119
151
|
}
|
|
@@ -3042,6 +3074,10 @@ function processTokens(tokens) {
|
|
|
3042
3074
|
result.push(parseMathBlock(tokens[i]));
|
|
3043
3075
|
i += 1;
|
|
3044
3076
|
break;
|
|
3077
|
+
case "inline":
|
|
3078
|
+
result.push(...parseInlineTokens(token.children || []));
|
|
3079
|
+
i += 1;
|
|
3080
|
+
break;
|
|
3045
3081
|
default:
|
|
3046
3082
|
i += 1;
|
|
3047
3083
|
break;
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","names":["defaultMathOptions: MathOptions | undefined","contentLines: string[]","VOID_TAGS","i","i","CONTROL_MAP: Record<string, string>","delimiters: [string, string][]","t","findMatchingClose","content","token: any","children: ParsedNode[]","innerTokens: MarkdownToken[]","orig: string[]","updated: string[]","children: ParsedNode[]","innerTokens: MarkdownToken[]","childWithAttrs: any","children: ParsedNode[]","innerTokens: MarkdownToken[]","linkTokens: MarkdownToken[]","children: ParsedNode[]","innerTokens: MarkdownToken[]","children: ParsedNode[]","innerTokens: MarkdownToken[]","children: ParsedNode[]","innerTokens: MarkdownToken[]","children: ParsedNode[]","innerTokens: MarkdownToken[]","result: ParsedNode[]","currentTextNode: TextNode | null","raw","hrefAttr","node","loading","blockquoteChildren: ParsedNode[]","items: DefinitionItemNode[]","termNodes: ParsedNode[]","definitionNodes: ParsedNode[]","footnoteChildren: ParsedNode[]","headerRow: TableRowNode | null","rows: TableRowNode[]","cells: TableCellNode[]","rowNode: TableRowNode","listItems: ListItemNode[]","itemChildren: ParsedNode[]","nestedItems: ListItemNode[]","admonitionChildren: ParsedNode[]","children: ParsedNode[]","result: ParsedNode[]","defaultTranslations: Record<string, string>","t: (key: string) => string","markdownItEmoji","openLine: number","endLine: number","markup: string"],"sources":["../src/config.ts","../src/plugins/containers.ts","../src/plugins/fixHtmlInline.ts","../src/plugins/fixLinkInline.ts","../src/plugins/fixLinkTokens.ts","../src/plugins/fixListItem.ts","../src/plugins/fixStrongTokens.ts","../src/plugins/fixTableTokens.ts","../src/findMatchingClose.ts","../src/plugins/isMathLike.ts","../src/plugins/math.ts","../src/renderers/index.ts","../src/factory.ts","../src/parser/inline-parsers/checkbox-parser.ts","../src/parser/inline-parsers/emoji-parser.ts","../src/parser/inline-parsers/emphasis-parser.ts","../src/parser/inline-parsers/fence-parser.ts","../src/parser/inline-parsers/footnote-ref-parser.ts","../src/parser/inline-parsers/hardbreak-parser.ts","../src/parser/inline-parsers/highlight-parser.ts","../src/parser/inline-parsers/html-inline-code-parser.ts","../src/parser/inline-parsers/image-parser.ts","../src/parser/inline-parsers/inline-code-parser.ts","../src/parser/inline-parsers/insert-parser.ts","../src/parser/inline-parsers/link-parser.ts","../src/parser/inline-parsers/math-inline-parser.ts","../src/parser/inline-parsers/reference-parser.ts","../src/parser/inline-parsers/strikethrough-parser.ts","../src/parser/inline-parsers/strong-parser.ts","../src/parser/inline-parsers/subscript-parser.ts","../src/parser/inline-parsers/superscript-parser.ts","../src/parser/inline-parsers/text-parser.ts","../src/parser/inline-parsers/index.ts","../src/parser/node-parsers/blockquote-parser.ts","../src/parser/node-parsers/code-block-parser.ts","../src/parser/node-parsers/definition-list-parser.ts","../src/parser/node-parsers/footnote-parser.ts","../src/parser/node-parsers/heading-parser.ts","../src/parser/node-parsers/math-block-parser.ts","../src/parser/node-parsers/table-parser.ts","../src/parser/node-parsers/thematic-break-parser.ts","../src/parser/node-parsers/list-parser.ts","../src/parser/node-parsers/admonition-parser.ts","../src/parser/node-parsers/container-parser.ts","../src/parser/node-parsers/hardbreak-parser.ts","../src/parser/node-parsers/html-block-parser.ts","../src/parser/node-parsers/paragraph-parser.ts","../src/parser/index.ts","../src/index.ts"],"sourcesContent":["/**\n * MathOptions control how the math plugin normalizes content before\n * handing it to KaTeX (or other math renderers).\n *\n * - commands: list of command words that should be auto-prefixed with a\n * backslash if not already escaped (e.g. 'infty' -> '\\\\infty'). Use a\n * conservative list to avoid false positives in prose.\n * - escapeExclamation: whether to escape standalone '!' to '\\\\!' (default true).\n */\nexport interface MathOptions {\n /** List of command words to auto-escape. */\n commands?: readonly string[]\n /** Whether to escape standalone '!' (default: true). */\n escapeExclamation?: boolean\n}\n\nlet defaultMathOptions: MathOptions | undefined\n\nexport function setDefaultMathOptions(opts: MathOptions | undefined) {\n defaultMathOptions = opts\n}\n\nexport function getDefaultMathOptions(): MathOptions | undefined {\n return defaultMathOptions\n}\n","import type { MarkdownIt } from 'markdown-it-ts'\nimport markdownItContainer from 'markdown-it-container'\n\nexport function applyContainers(md: MarkdownIt) {\n ;[\n 'admonition',\n 'info',\n 'warning',\n 'error',\n 'tip',\n 'danger',\n 'note',\n 'caution',\n ].forEach((name) => {\n md.use(markdownItContainer, name, {\n render(tokens: unknown, idx: number) {\n const tokensAny = tokens as unknown as import('../types').MarkdownToken[]\n const token = tokensAny[idx]\n // `nesting` is a runtime-only property present on MarkdownIt tokens.\n // Narrow the shape with `unknown` -> specific minimal interface to avoid `as any`.\n const tokenShape = token as unknown as { nesting?: number }\n if (tokenShape.nesting === 1) {\n return `<div class=\"vmr-container vmr-container-${name}\">`\n }\n else {\n return '</div>\\n'\n }\n },\n })\n })\n\n // fallback for simple ::: blocks (kept for backwards compat)\n md.block.ruler.before(\n 'fence',\n 'vmr_container_fallback',\n (state: unknown, startLine: number, endLine: number, silent: boolean) => {\n interface ParserState {\n bMarks: number[]\n tShift: number[]\n eMarks: number[]\n src: string\n push: (type: string, tag?: string, nesting?: number) => any\n md: any\n line: number\n }\n const s = state as unknown as ParserState\n const startPos = s.bMarks[startLine] + s.tShift[startLine]\n const lineMax = s.eMarks[startLine]\n const markerMatch = s.src\n .slice(startPos, lineMax)\n .match(/^:::\\s*(\\w+)/)\n if (!markerMatch)\n return false\n if (silent)\n return true\n\n const name = markerMatch[1]\n let nextLine = startLine + 1\n let found = false\n while (nextLine <= endLine) {\n const sPos = s.bMarks[nextLine] + s.tShift[nextLine]\n const ePos = s.eMarks[nextLine]\n if (s.src.slice(sPos, ePos).trim() === ':::') {\n found = true\n break\n }\n nextLine++\n }\n if (!found)\n return false\n\n const tokenOpen = s.push('vmr_container_open', 'div', 1)\n // `tokenOpen` is runtime token object; keep using runtime helpers but avoid casting `s` to `any`.\n tokenOpen.attrSet('class', `vmr-container vmr-container-${name}`)\n\n const contentLines: string[] = []\n for (let i = startLine + 1; i < nextLine; i++) {\n const sPos = s.bMarks[i] + s.tShift[i]\n const ePos = s.eMarks[i]\n contentLines.push(s.src.slice(sPos, ePos))\n }\n\n // Open a paragraph, push inline content and then close paragraph\n s.push('paragraph_open', 'p', 1)\n const inlineToken = s.push('inline', '', 0)\n inlineToken.content = contentLines.join('\\n')\n inlineToken.map = [startLine + 1, nextLine]\n // Ensure children exist and parse the inline content into them so the renderer\n // won't encounter a null children array (which causes .length read errors).\n inlineToken.children = []\n s.md.inline.parse(inlineToken.content, s.md, (s as any).env, inlineToken.children)\n s.push('paragraph_close', 'p', -1)\n\n s.push('vmr_container_close', 'div', -1)\n\n s.line = nextLine + 1\n return true\n },\n )\n}\n","import type { MarkdownIt, Token } from 'markdown-it-ts'\n\nconst VOID_TAGS = new Set([\n 'area',\n 'base',\n 'br',\n 'col',\n 'embed',\n 'hr',\n 'img',\n 'input',\n 'link',\n 'meta',\n 'param',\n 'source',\n 'track',\n 'wbr',\n])\n\nexport function applyFixHtmlInlineTokens(md: MarkdownIt) {\n // Fix certain single-token inline HTML cases by expanding into [openTag, text, closeTag]\n // This helps downstream inline parsers (e.g., <a>text</a>) to recognize inner text reliably.\n md.core.ruler.push('fix_html_inline_tokens', (state: unknown) => {\n const s = state as unknown as { tokens?: Token[] }\n const toks = s.tokens ?? []\n\n for (let i = 0; i < toks.length; i++) {\n const t = toks[i] as Token & { content?: string, children: any[] }\n if (t.type === 'html_block' && /^<[^>\\s/]+>$/.test(t.content)) {\n const tag = t.content?.match(/<([^\\s>/]+)/)?.[1] ?? ''\n // 如果是常见的 block 标签,则跳过,否则转换成 inline 处理\n if (['br', 'hr', 'img', 'input', 'link', 'meta'].includes(tag))\n continue\n t.type = 'inline'\n t.children = [\n {\n type: 'html_block',\n content: t.content,\n tag: t.content?.match(/<([^\\s>/]+)/)?.[1] ?? '',\n loading: true,\n },\n ] as any[]\n continue\n }\n if (!t || t.type !== 'inline')\n continue\n\n // 修复children 是单个 html_inline的场景\n if (t.children.length === 2 && t.children[0].type === 'html_inline') {\n // 补充一个闭合标签\n const tag = t.children[0].content?.match(/<([^\\s>/]+)/)?.[1] ?? ''\n // 如果是常见的 inline标签,则只追加结尾标签,否则转换成 html_block\n if (['a', 'span', 'strong', 'em', 'b', 'i', 'u'].includes(tag)) {\n t.children[0].loading = true\n t.children[0].tag = tag\n t.children.push({\n type: 'html_inline',\n tag,\n loading: true,\n content: `</${tag}>`,\n } as any)\n }\n else {\n t.children = [\n {\n type: 'html_block',\n loading: true,\n tag,\n content: t.children[0].content + t.children[1].content,\n } as any,\n ]\n }\n continue\n }\n else if (t.children.length === 3 && t.children[0].type === 'html_inline' && t.children[2].type === 'html_inline') {\n const tag = t.children[0].content?.match(/<([^\\s>/]+)/)?.[1] ?? ''\n // 如果是常见的 inline标签,则不处理,否则转换成 html_block\n if (['a', 'span', 'strong', 'em', 'b', 'i', 'u'].includes(tag))\n continue\n t.children = [\n {\n type: 'html_block',\n loading: false,\n tag,\n content: t.children.map(ct => ct.content).join(''),\n } as any,\n ]\n continue\n }\n // Only handle pathological cases where inline content is a single HTML-ish chunk\n if (!t.content?.startsWith('<') || (t as any).children?.length !== 1)\n continue\n\n const raw = String(t.content)\n const tagName = raw.match(/<([^\\s>/]+)/)?.[1]?.toLowerCase() ?? ''\n if (!tagName)\n continue\n\n const selfClosing = /\\/\\s*>\\s*$/.test(raw)\n const isVoid = selfClosing || VOID_TAGS.has(tagName)\n\n const htmlToken = t as unknown as { children: Array<{ type: string, content: string }> }\n\n if (isVoid) {\n // For void/self-closing tags, keep a single html_inline token\n htmlToken.children = [\n { type: 'html_inline', content: raw },\n ] as any\n continue\n }\n htmlToken.children.length = 0\n }\n })\n}\n","import type { MarkdownIt } from 'markdown-it-ts'\n\n// Match link prefix: \"[text](href\" without requiring a closing ')'.\n// The href part may be empty, so use '*' (no extra '?').\nconst LINK_PREFIX_RE = /^\\[([^\\]]*)\\]\\(([^)\\s]*)/\n\nexport function applyFixLinkInline(md: MarkdownIt) {\n // Inline tokenizer that tries to recognize [text](href) and loading\n // link forms like \"[x](http://a\" earlier, producing link_open/text/link_close\n // tokens so downstream code sees them as links during the inline pass.\n const rule = (state: unknown, silent: boolean) => {\n const s = state as unknown as { src: string, pos: number, push: (type: string, tag?: string, nesting?: number) => any }\n const start = s.pos\n if (s.src[start] !== '[')\n return false\n\n // Don't handle image syntax here\n if (start > 0 && s.src[start - 1] === '!')\n return false\n\n // Look for closing ']' and opening '(' after it\n const rest = s.src.slice(start)\n\n const m = LINK_PREFIX_RE.exec(rest)\n if (!m)\n return false\n\n if (silent)\n return true\n\n const text = m[1] ?? ''\n const href = m[2] ?? ''\n // Be conservative: if the link text contains characters that indicate\n // emphasis or emoji shortcodes (e.g. '*' or ':'), don't pre-tokenize\n // here — let the core inline parser handle these ambiguous mid-states.\n if (text.includes('*') || text.includes(':'))\n return false\n const idxClose = rest.indexOf(')')\n const hasClosingParen = idxClose !== -1\n\n // push link_open\n const open = s.push('link_open', 'a', 1)\n open.attrs = [['href', href]]\n // push inner text\n const txt = s.push('text', '', 0)\n txt.content = text\n\n // only emit link_close if the source actually contained a closing paren\n if (hasClosingParen) {\n s.push('link_close', 'a', -1)\n // consume through the closing paren\n s.pos += idxClose + 1\n }\n else {\n // consume the matched prefix (e.g. \"[x](http://a\") but do not\n // emit a link_close so downstream logic treats this as a loading link\n s.pos += m[0].length\n }\n return true\n }\n\n // Insert before default 'link' rule to take precedence\n md.inline.ruler.before('link', 'fix_link_inline', rule)\n}\n","import type { MarkdownIt } from 'markdown-it-ts'\nimport type { MarkdownToken } from '../types'\n\n// todo: The code below has been refactored because it involves a lot of repetitive data transformations and needs to accommodate different scenarios, such as plain text. It should now be correctly converted to a link.\nexport function applyFixLinkTokens(md: MarkdownIt) {\n // Run after the inline rule so markdown-it has produced inline tokens\n // for block-level tokens; we then adjust each inline token's children\n // so downstream code receives corrected token arrays during the same\n // parsing pass.\n md.core.ruler.after('inline', 'fix_link_tokens', (state: unknown) => {\n const s = state as unknown as { tokens?: Array<{ type?: string, children?: any[] }> }\n const toks = s.tokens ?? []\n for (let i = 0; i < toks.length; i++) {\n const t = toks[i]\n if (t && t.type === 'inline' && Array.isArray(t.children)) {\n try {\n t.children = fixLinkToken(t.children)\n }\n catch (e) {\n // Swallow errors to avoid breaking parsing; keep original children\n // so parse still succeeds even if our fix fails for an unexpected shape.\n // Errors should be rare and indicate malformed token arrays.\n\n console.error('[applyFixLinkTokens] failed to fix inline children', e)\n }\n }\n }\n })\n}\n\nfunction fixLinkToken(tokens: MarkdownToken[]): MarkdownToken[] {\n if (tokens.length < 4)\n return tokens\n\n for (let i = 0; i <= tokens.length - 1; i++) {\n if (!tokens[i])\n break\n if (tokens[i]?.type === 'text' && tokens[i].content?.endsWith('(') && tokens[i + 1]?.type === 'link_open') {\n const match = tokens[i].content!.match(/\\[([^\\]]+)\\]/)\n if (match) {\n let beforeText = tokens[i].content!.slice(0, match.index)\n const emphasisMatch = beforeText.match(/(\\*+)$/)\n const replacerTokens = []\n if (emphasisMatch) {\n beforeText = beforeText.slice(0, emphasisMatch.index)\n if (beforeText) {\n replacerTokens.push({\n type: 'text',\n content: beforeText,\n raw: beforeText,\n })\n }\n const text = match[1]\n const type = emphasisMatch[1].length\n if (type === 1) {\n replacerTokens.push({ type: 'em_open', tag: 'em', nesting: 1 })\n }\n else if (type === 2) {\n replacerTokens.push({ type: 'strong_open', tag: 'strong', nesting: 1 })\n }\n else if (type === 3) {\n replacerTokens.push({ type: 'strong_open', tag: 'strong', nesting: 1 })\n replacerTokens.push({ type: 'em_open', tag: 'em', nesting: 1 })\n }\n let href = tokens[i + 2]?.content || ''\n if (tokens[i + 4]?.type === 'text' && !tokens[i + 4].content?.startsWith(')')) {\n href += tokens[i + 4]?.content || ''\n tokens[i + 4].content = ''\n }\n replacerTokens.push(\n {\n type: 'link',\n loading: !tokens[i + 4]?.content?.startsWith(')'),\n href,\n title: '',\n text,\n children: [\n {\n type: 'text',\n content: text,\n raw: text,\n },\n ],\n raw: String(`[${text}](${href})`),\n },\n )\n if (type === 1) {\n replacerTokens.push({ type: 'em_close', tag: 'em', nesting: -1 })\n }\n else if (type === 2) {\n replacerTokens.push({ type: 'strong_close', tag: 'strong', nesting: -1 })\n }\n else if (type === 3) {\n replacerTokens.push({ type: 'em_close', tag: 'em', nesting: -1 })\n replacerTokens.push({ type: 'strong_close', tag: 'strong', nesting: -1 })\n }\n if (tokens[i + 4]?.type === 'text') {\n const afterText = tokens[i + 4].content?.replace(/^\\)\\**/, '')\n if (afterText) {\n replacerTokens.push({\n type: 'text',\n content: afterText,\n raw: afterText,\n })\n }\n tokens.splice(i, 5, ...replacerTokens)\n }\n else {\n tokens.splice(i, 4, ...replacerTokens)\n }\n }\n else {\n if (beforeText) {\n replacerTokens.push({\n type: 'text',\n content: beforeText,\n raw: beforeText,\n })\n }\n const text = match[1]\n let href = tokens[i + 2]?.content || ''\n if (tokens[i + 4]?.type === 'text' && !tokens[i + 4].content?.startsWith(')')) {\n href += tokens[i + 4]?.content || ''\n tokens[i + 4].content = ''\n }\n replacerTokens.push(...[\n {\n type: 'link',\n loading: !tokens[i + 4]?.content?.startsWith(')'),\n href,\n title: '',\n text,\n children: [\n {\n type: 'text',\n content: text,\n raw: text,\n },\n ],\n raw: String(`[${text}](${href})`),\n },\n ])\n if (tokens[i + 4]?.type === 'text') {\n const afterText = tokens[i + 4].content?.replace(/^\\)/, '')\n if (afterText) {\n replacerTokens.push({\n type: 'text',\n content: afterText,\n raw: afterText,\n })\n }\n tokens.splice(i, 5, ...replacerTokens)\n }\n else {\n tokens.splice(i, 4, ...replacerTokens)\n }\n }\n i -= (replacerTokens.length - 1)\n continue\n }\n }\n else if (tokens[i].type === 'link_open' && tokens[i].markup === 'linkify' && tokens[i - 1]?.type === 'text' && tokens[i - 1].content?.endsWith('(')) {\n if (tokens[i - 2]?.type === 'link_close') {\n // 合并link\n const replacerTokens = []\n const text = (tokens[i - 3].content || '')\n let href = tokens[i].attrs?.find(attr => attr[0] === 'href')?.[1] || ''\n\n if (tokens[i + 3]?.type === 'text') {\n const m = (tokens[i + 3]?.content ?? '').indexOf(')')\n const loading = m === -1\n if (m === -1) {\n href += (tokens[i + 3]?.content?.slice(0, m) || '')\n tokens[i + 3].content = ''\n }\n\n replacerTokens.push({\n type: 'link',\n loading,\n href,\n title: '',\n text,\n children: [\n {\n type: 'text',\n content: text,\n raw: text,\n },\n ],\n raw: String(`[${text}](${href})`),\n })\n const afterText = tokens[i + 3].content?.replace(/^\\)\\**/, '')\n if (afterText) {\n replacerTokens.push({\n type: 'text',\n content: afterText,\n raw: afterText,\n })\n }\n tokens.splice(i - 4, 8, ...replacerTokens)\n }\n else {\n replacerTokens.push({\n type: 'link',\n loading: true,\n href,\n title: '',\n text,\n children: [\n {\n type: 'text',\n content: href,\n raw: href,\n },\n ],\n raw: String(`[${text}](${href})`),\n })\n tokens.splice(i - 4, 7, ...replacerTokens)\n }\n continue\n }\n }\n if (tokens[i].type === 'link_close' && tokens[i].nesting === -1 && tokens[i + 1]?.type === 'text' && tokens[i - 1]?.type === 'text' && tokens[i + 2]?.type !== 'link_open') {\n // 修复链接后多余文本被包含在链接内的问题\n tokens[i - 2].loading = true\n const text = tokens[i - 1].content || ''\n let href = tokens[i - 2].attrs?.[0]?.[1] || ''\n let count = 3\n if (tokens[i].markup === 'linkify' && tokens[i + 1]?.type === 'text') {\n const m = (tokens[i + 1]?.content ?? '').indexOf(')')\n if (m === -1) {\n href += (tokens[i + 1]?.content?.slice(0, m) || '')\n tokens[i + 1].content = ''\n }\n count += 1\n }\n tokens.splice(i - 2, count, {\n type: 'link',\n loading: false,\n href,\n title: '',\n text,\n children: [\n {\n type: 'text',\n content: text,\n raw: text,\n },\n ],\n raw: String(`[${text}](${href})`),\n } as any)\n }\n else if (tokens[i].content?.startsWith('](') && tokens[i - 1].markup?.includes('*') && tokens[i - 4].type === 'text' && tokens[i - 4].content?.endsWith('[')) {\n const type = tokens[i - 1].markup!.length\n const replacerTokens = []\n const beforeText = tokens[i - 4].content!.slice(0, tokens[i - 4].content!.length - 1 - type)\n if (beforeText) {\n replacerTokens.push({\n type: 'text',\n content: beforeText,\n raw: beforeText,\n })\n }\n if (type === 1) {\n replacerTokens.push({ type: 'em_open', tag: 'em', nesting: 1 })\n }\n else if (type === 2) {\n replacerTokens.push({ type: 'strong_open', tag: 'strong', nesting: 1 })\n }\n else if (type === 3) {\n replacerTokens.push({ type: 'strong_open', tag: 'strong', nesting: 1 })\n replacerTokens.push({ type: 'em_open', tag: 'em', nesting: 1 })\n }\n const text = tokens[i - 2].content || ''\n let href = tokens[i].content!.slice(2)\n let loading = true\n if (tokens[i + 1]?.type === 'text') {\n const m = (tokens[i + 1]?.content ?? '').indexOf(')')\n loading = m === -1\n if (m === -1) {\n href += (tokens[i + 1]?.content?.slice(0, m) || '')\n tokens[i + 1].content = ''\n }\n }\n replacerTokens.push({\n type: 'link',\n loading,\n href,\n title: '',\n text,\n children: [\n {\n type: 'text',\n content: text,\n raw: text,\n },\n ],\n raw: String(`[${text}](${href})`),\n })\n if (type === 1) {\n replacerTokens.push({ type: 'em_close', tag: 'em', nesting: -1 })\n }\n else if (type === 2) {\n replacerTokens.push({ type: 'strong_close', tag: 'strong', nesting: -1 })\n }\n else if (type === 3) {\n replacerTokens.push({ type: 'em_close', tag: 'em', nesting: -1 })\n replacerTokens.push({ type: 'strong_close', tag: 'strong', nesting: -1 })\n }\n if (tokens[i + 1]?.type === 'text') {\n const afterText = tokens[i + 1].content?.replace(/^\\)\\**/, '')\n if (afterText) {\n replacerTokens.push({\n type: 'text',\n content: afterText,\n raw: afterText,\n })\n }\n tokens.splice(i - 4, 8, ...replacerTokens)\n }\n else if (tokens[i + 1]?.type === 'link_open') {\n // 特殊情况其实要把href也处理,这里可以直接跳过\n tokens.splice(i - 4, 10, ...replacerTokens)\n }\n else {\n tokens.splice(i - 4, 7, ...replacerTokens)\n }\n i -= (replacerTokens.length - 1)\n continue\n }\n }\n return tokens\n}\n","import type { MarkdownIt } from 'markdown-it-ts'\nimport type { MarkdownToken } from '../types'\n\nexport function applyFixListItem(md: MarkdownIt) {\n // Normalize list-item related inline tokens after inline tokenization\n // so downstream parsers see corrected children.\n md.core.ruler.after('inline', 'fix_list_item_tokens', (state: unknown) => {\n const s = state as unknown as { tokens?: Array<{ type?: string, children?: any[] }> }\n const toks = s.tokens ?? []\n for (let i = 0; i < toks.length; i++) {\n const t = toks[i]\n if (t && t.type === 'inline' && Array.isArray(t.children)) {\n try {\n t.children = fixListItem(t.children)\n }\n catch (e) {\n // Keep original children on error to avoid breaking parsing\n\n console.error('[applyFixListItem] failed to fix inline children', e)\n }\n }\n }\n })\n}\n\nfunction fixListItem(tokens: MarkdownToken[]): MarkdownToken[] {\n const last = tokens[tokens.length - 1]\n const lastContent = String(last?.content ?? '')\n\n if (last?.type === 'text' && (/^\\s*\\d+\\.\\s*$/.test(lastContent) && tokens[tokens.length - 2]?.tag === 'br')) {\n tokens.splice(tokens.length - 1, 1)\n }\n\n return tokens\n}\n","import type { MarkdownIt } from 'markdown-it-ts'\nimport type { MarkdownToken } from 'stream-markdown-parser'\n\nexport function applyFixStrongTokens(md: MarkdownIt) {\n // Run after inline tokenization to normalize strong/em tokens in\n // each inline token's children. This ensures downstream inline\n // parsers receive a normalized token list.\n md.core.ruler.after('inline', 'fix_strong_tokens', (state: unknown) => {\n const s = state as unknown as { tokens?: Array<{ type?: string, children?: any[] }> }\n const toks = s.tokens ?? []\n for (let i = 0; i < toks.length; i++) {\n const t = toks[i]\n if (t && t.type === 'inline' && Array.isArray(t.children)) {\n try {\n t.children = fixStrongTokens(t.children)\n }\n catch (e) {\n // don't break parsing on plugin error\n\n console.error('[applyFixStrongTokens] failed to fix inline children', e)\n }\n }\n }\n })\n}\n\nfunction fixStrongTokens(tokens: MarkdownToken[]): MarkdownToken[] {\n const fixedTokens = [...tokens]\n if (tokens.length < 4)\n return fixedTokens\n const i = tokens.length - 4\n const token = tokens[i]\n if (!token)\n return fixedTokens\n const nextToken = tokens[i + 1]\n const tokenContent = String(token.content ?? '')\n if (token.type === 'link_open' && tokens[i - 1]?.type === 'em_open' && tokens[i - 2]?.type === 'text' && tokens[i - 2].content?.endsWith('*')) {\n const textContent = String(tokens[i - 2].content ?? '').slice(0, -1)\n\n const replaceTokens = [\n {\n type: 'strong_open',\n tag: 'strong',\n attrs: null,\n map: null,\n children: null,\n content: '',\n markup: '**',\n info: '',\n meta: null,\n },\n tokens[i],\n tokens[i + 1],\n tokens[i + 2],\n {\n type: 'strong_close',\n tag: 'strong',\n attrs: null,\n map: null,\n children: null,\n content: '',\n markup: '**',\n info: '',\n meta: null,\n },\n ]\n if (textContent) {\n replaceTokens.unshift({\n type: 'text',\n content: textContent,\n raw: textContent,\n })\n }\n fixedTokens.splice(i - 2, 6, ...replaceTokens)\n }\n else if (token.type === 'text' && tokenContent.endsWith('*') && nextToken.type === 'em_open') {\n // 解析有问题,要合并 emphasis 和 前面的 * 为 strong\n const _nextToken = tokens[i + 2]\n const count = _nextToken?.type === 'text' ? 4 : 3\n const insert = [\n {\n type: 'strong_open',\n tag: 'strong',\n attrs: null,\n map: null,\n children: null,\n content: '',\n markup: '**',\n info: '',\n meta: null,\n },\n {\n type: 'text',\n content: _nextToken?.type === 'text' ? String(_nextToken.content ?? '') : '',\n },\n {\n type: 'strong_close',\n tag: 'strong',\n attrs: null,\n map: null,\n children: null,\n content: '',\n markup: '**',\n info: '',\n meta: null,\n },\n ] as MarkdownToken[]\n const beforeText = tokenContent.slice(0, -1)\n if (beforeText) {\n insert.unshift({\n type: 'text',\n content: beforeText,\n raw: beforeText,\n })\n }\n fixedTokens.splice(i, count, ...insert)\n return fixedTokens\n }\n\n return fixedTokens\n}\n","import type { MarkdownIt } from 'markdown-it-ts'\nimport type { MarkdownToken } from '../types'\n\nexport function applyFixTableTokens(md: MarkdownIt) {\n // Run after block parsing so block-level tokens (including inline\n // children) are present. We replace the token array with the\n // fixed version returned by `fixTableTokens`.\n md.core.ruler.after('block', 'fix_table_tokens', (state: unknown) => {\n const s = state as unknown as { tokens?: any[] }\n try {\n const toks = s.tokens ?? []\n const fixed = fixTableTokens(toks)\n if (Array.isArray(fixed))\n s.tokens = fixed\n }\n catch (e) {\n // swallow errors to avoid breaking parsing; keep original tokens\n console.error('[applyFixTableTokens] failed to fix table tokens', e)\n }\n })\n}\n\nfunction createStart() {\n return [\n {\n type: 'table_open',\n tag: 'table',\n attrs: null,\n map: null,\n children: null,\n content: '',\n markup: '',\n info: '',\n level: 0,\n loading: true,\n meta: null,\n },\n {\n type: 'thead_open',\n tag: 'thead',\n attrs: null,\n block: true,\n level: 1,\n children: null,\n },\n {\n type: 'tr_open',\n tag: 'tr',\n attrs: null,\n block: true,\n level: 2,\n children: null,\n },\n\n ]\n}\nfunction createEnd() {\n return [\n {\n type: 'tr_close',\n tag: 'tr',\n attrs: null,\n block: true,\n level: 2,\n children: null,\n },\n {\n type: 'thead_close',\n tag: 'thead',\n attrs: null,\n block: true,\n level: 1,\n children: null,\n },\n {\n type: 'table_close',\n tag: 'table',\n attrs: null,\n map: null,\n children: null,\n content: '',\n markup: '',\n info: '',\n level: 0,\n meta: null,\n },\n ]\n}\nfunction createTh(text: string) {\n return [{\n type: 'th_open',\n tag: 'th',\n attrs: null,\n block: true,\n level: 3,\n children: null,\n }, {\n type: 'inline',\n tag: '',\n children: [\n {\n tag: '',\n type: 'text',\n block: false,\n content: text,\n children: null,\n },\n ],\n content: text,\n level: 4,\n attrs: null,\n block: true,\n }, {\n type: 'th_close',\n tag: 'th',\n attrs: null,\n block: true,\n level: 3,\n children: null,\n }]\n}\nexport function fixTableTokens(tokens: MarkdownToken[]): MarkdownToken[] {\n const fixedTokens = [...tokens]\n if (tokens.length < 3)\n return fixedTokens\n const i = tokens.length - 2\n const token = tokens[i]\n if (token.type === 'inline') {\n const tcontent = String(token.content ?? '')\n const childContent = String(token.children?.[0]?.content ?? '')\n\n if (/^\\|(?:[^|\\n]+\\|?)+/.test(tcontent)) {\n // 解析 table\n const body = childContent.slice(1).split('|').map(i => i.trim()).filter(Boolean).flatMap(i => createTh(i))\n const insert = ([\n ...createStart(),\n ...body,\n ...createEnd(),\n ] as unknown) as MarkdownToken[]\n fixedTokens.splice(i - 1, 3, ...insert)\n }\n else if (/^\\|(?:[^|\\n]+\\|)+\\n\\|:?-/.test(tcontent)) {\n // 解析 table\n const body = childContent.slice(1, -1).split('|').map(i => i.trim()).flatMap(i => createTh(i))\n const insert = ([\n ...createStart(),\n ...body,\n ...createEnd(),\n ] as unknown) as MarkdownToken[]\n fixedTokens.splice(i - 1, 3, ...insert)\n }\n else if (/^\\|(?:[^|\\n:]+\\|)+\\n\\|:?$/.test(tcontent)) {\n token.content = tcontent.slice(0, -2)\n token.children!.splice(2, 1)\n }\n }\n\n return fixedTokens\n}\n","export function findMatchingClose(src: string, startIdx: number, open: string, close: string) {\n const len = src.length\n // Special-case $$ since it's a two-char delimiter that shouldn't\n // be interpreted as nested parentheses.\n if (open === '$$' && close === '$$') {\n let i = startIdx\n while (i < len - 1) {\n if (src[i] === '$' && src[i + 1] === '$') {\n // ensure not escaped\n let k = i - 1\n let backslashes = 0\n while (k >= 0 && src[k] === '\\\\') {\n backslashes++\n k--\n }\n if (backslashes % 2 === 0)\n return i\n }\n i++\n }\n return -1\n }\n\n const openChar = open[open.length - 1]\n const closeSeq = close\n let depth = 0\n let i = startIdx\n while (i < len) {\n // If there's an unescaped close sequence here\n if (src.slice(i, i + closeSeq.length) === closeSeq) {\n let k = i - 1\n let backslashes = 0\n while (k >= 0 && src[k] === '\\\\') {\n backslashes++\n k--\n }\n if (backslashes % 2 === 0) {\n if (depth === 0)\n return i\n depth--\n i += closeSeq.length\n continue\n }\n }\n\n const ch = src[i]\n // skip escaped characters\n if (ch === '\\\\') {\n i += 2\n continue\n }\n\n if (ch === openChar) {\n depth++\n }\n else if (ch === closeSeq[closeSeq.length - 1]) {\n if (depth > 0)\n depth--\n }\n i++\n }\n return -1\n}\n\nexport default findMatchingClose\n","export const TEX_BRACE_COMMANDS = [\n 'mathbf',\n 'boldsymbol',\n 'mathbb',\n 'mathcal',\n 'mathfrak',\n 'mathrm',\n 'mathit',\n 'mathsf',\n 'vec',\n 'hat',\n 'bar',\n 'tilde',\n 'overline',\n 'underline',\n 'mathscr',\n 'mathnormal',\n 'operatorname',\n 'mathbf*',\n]\n\nexport const ESCAPED_TEX_BRACE_COMMANDS = TEX_BRACE_COMMANDS.map(c => c.replace(/[.*+?^${}()|[\\\\]\"\\]/g, '\\\\$&')).join('|')\n\nconst TEX_CMD_RE = /\\\\[a-z]+/i\nconst PREFIX_CLASS = '(?:\\\\\\\\|\\\\u0008)'\nconst TEX_CMD_WITH_BRACES_RE = new RegExp(`${PREFIX_CLASS}(?:${ESCAPED_TEX_BRACE_COMMANDS})\\\\s*\\\\{[^}]+\\\\}`, 'i')\n// Detect brace-taking TeX commands even when the leading backslash or the\n// closing brace/content is missing (e.g. \"operatorname{\" or \"operatorname{span\").\n// This helps the heuristic treat incomplete but clearly TeX-like fragments\n// as math-like instead of plain text.\nconst TEX_BRACE_CMD_START_RE = new RegExp(`(?:${PREFIX_CLASS})?(?:${ESCAPED_TEX_BRACE_COMMANDS})\\s*\\{`, 'i')\nconst TEX_SPECIFIC_RE = /\\\\(?:text|frac|left|right|times)/\n// Match common math operator symbols or named commands.\n// Avoid treating the C/C++ increment operator (\"++\") as a math operator by\n// ensuring a lone '+' isn't matched when it's part of a '++' sequence.\n// Use a RegExp constructed from a string to avoid issues escaping '/' in a\n// regex literal on some platforms/linters.\n// eslint-disable-next-line prefer-regex-literals\nconst OPS_RE = new RegExp('(?<!\\\\+)\\\\+(?!\\\\+)|[=\\\\-*/^<>]|\\\\\\\\times|\\\\\\\\pm|\\\\\\\\cdot|\\\\\\\\le|\\\\\\\\ge|\\\\\\\\neq')\nconst FUNC_CALL_RE = /[A-Z]+\\s*\\([^)]+\\)/i\nconst WORDS_RE = /\\b(?:sin|cos|tan|log|ln|exp|sqrt|frac|sum|lim|int|prod)\\b/\n// Heuristic to detect common date/time patterns like 2025/9/30 21:37:24 and\n// avoid classifying them as math merely because they contain '/' or ':'\nconst DATE_TIME_RE = /\\b\\d{4}\\/\\d{1,2}\\/\\d{1,2}(?:[ T]\\d{1,2}:\\d{2}(?::\\d{2})?)?\\b/\nexport function isMathLike(s: string) {\n if (!s)\n return false\n\n // Normalize accidental control characters that may appear if a single\n // backslash sequence was interpreted in a JS string literal (for example\n // '\\\\b' becoming a backspace U+0008). Convert such control characters\n // back into their two-character escaped forms so our regexes can match\n // TeX commands reliably.\n // eslint-disable-next-line no-control-regex\n const norm = s.replace(/\\u0008/g, '\\\\b')\n const stripped = norm.trim()\n\n // quick bailouts\n // If the content looks like a timestamp or date, it's not math.\n if (DATE_TIME_RE.test(stripped))\n return false\n if (stripped.includes('**'))\n return false\n if (stripped.length > 2000)\n return true // very long blocks likely math\n\n if (/[./]\\s*\\D|\\D\\s*[./]/.test(s)) {\n return false\n }\n\n // TeX commands e.g. \\frac, \\alpha\n const texCmd = TEX_CMD_RE.test(norm)\n const texCmdWithBraces = TEX_CMD_WITH_BRACES_RE.test(norm)\n const texBraceStart = TEX_BRACE_CMD_START_RE.test(norm)\n\n // Explicit common TeX tokens (keeps compatibility with previous heuristic)\n const texSpecific = TEX_SPECIFIC_RE.test(norm)\n const subscriptPattern = /(?:^|[^\\w\\\\])(?:[A-Z]|\\\\[A-Z]+)_(?:\\{[^}]+\\}|[A-Z0-9\\\\])/i\n const superscriptPattern = /(?:^|[^\\w\\\\])(?:[A-Z]|\\\\[A-Z]+)\\^(?:\\{[^}]+\\}|[A-Z0-9\\\\])/i\n const superSub = subscriptPattern.test(norm) || superscriptPattern.test(norm)\n // common math operator symbols or named commands\n const ops = OPS_RE.test(norm)\n // function-like patterns: f(x), sin(x)\n const funcCall = FUNC_CALL_RE.test(norm)\n // common math words\n const words = WORDS_RE.test(norm)\n // 纯单个英文字命,也渲染成数学公式\n // e.g. (w) (x) (y) (z)\n // const pureWord = /^\\([a-zA-Z]\\)$/i.test(stripped)\n\n return texCmd || texCmdWithBraces || texBraceStart || texSpecific || superSub || ops || funcCall || words\n}\n","import type { MarkdownIt } from 'markdown-it-ts'\nimport type { MathOptions } from '../config'\n\nimport findMatchingClose from '../findMatchingClose'\nimport { ESCAPED_TEX_BRACE_COMMANDS, isMathLike } from './isMathLike'\n\n// Heuristic to decide whether a piece of text is likely math.\n// Matches common TeX commands, math operators, function-call patterns like f(x),\n// superscripts/subscripts, and common math words.\n// Common TeX formatting commands that take a brace argument, e.g. \\boldsymbol{...}\n// Keep this list in a single constant so it's easy to extend/test.\n\n// Precompute an escaped, |-joined string of TEX brace commands so we don't\n// rebuild it on every call to `isMathLike`.\n\n// Common KaTeX/TeX command names that might lose their leading backslash.\n// Keep this list conservative to avoid false-positives in normal text.\nexport const KATEX_COMMANDS = [\n 'ldots',\n 'cdots',\n 'quad',\n 'in',\n 'end',\n 'infty',\n 'perp',\n 'mid',\n 'operatorname',\n 'to',\n 'rightarrow',\n 'leftarrow',\n 'math',\n 'mathrm',\n 'mathbf',\n 'mathit',\n 'mathbb',\n 'mathcal',\n 'mathfrak',\n 'alpha',\n 'beta',\n 'gamma',\n 'delta',\n 'epsilon',\n 'lambda',\n 'sum',\n 'prod',\n 'int',\n 'sqrt',\n 'fbox',\n 'boxed',\n 'color',\n 'rule',\n 'edef',\n 'fcolorbox',\n 'hline',\n 'hdashline',\n 'cdot',\n 'times',\n 'pm',\n 'le',\n 'ge',\n 'neq',\n 'sin',\n 'cos',\n 'tan',\n 'log',\n 'ln',\n 'exp',\n 'lim',\n 'frac',\n 'text',\n 'left',\n 'right',\n]\n\n// Precompute escaped KATEX commands and default regex used by\n// `normalizeStandaloneBackslashT` when no custom commands are provided.\n// Sort commands by length (desc) before joining so longer commands like\n// 'operatorname' are preferred over shorter substrings like 'to'. This\n// avoids accidental partial matches when building the regex.\nexport const ESCAPED_KATEX_COMMANDS = KATEX_COMMANDS\n .slice()\n .sort((a, b) => b.length - a.length)\n .map(c => c.replace(/[.*+?^${}()|[\\\\]\\\\\\]/g, '\\\\$&'))\n .join('|')\nconst CONTROL_CHARS_CLASS = '[\\t\\r\\b\\f\\v]'\n\n// Precompiled helpers reused by normalization\nconst SPAN_CURLY_RE = /span\\{([^}]+)\\}/\nconst OPERATORNAME_SPAN_RE = /\\\\operatorname\\{span\\}\\{((?:[^{}]|\\{[^}]*\\})+)\\}/\nconst SINGLE_BACKSLASH_NEWLINE_RE = /(^|[^\\\\])\\\\\\r?\\n/g\nconst ENDING_SINGLE_BACKSLASH_RE = /(^|[^\\\\])\\\\$/g\n\n// Cache for dynamically built regexes depending on commands list\nconst DEFAULT_MATH_RE = new RegExp(`${CONTROL_CHARS_CLASS}|(?<!\\\\\\\\|\\\\w)(${ESCAPED_KATEX_COMMANDS})\\\\b`, 'g')\nconst MATH_RE_CACHE = new Map<string, RegExp>()\nconst BRACE_CMD_RE_CACHE = new Map<string, RegExp>()\n\nfunction getMathRegex(commands: ReadonlyArray<string> | undefined) {\n if (!commands)\n return DEFAULT_MATH_RE\n const arr = [...commands]\n arr.sort((a, b) => b.length - a.length)\n const key = arr.join('\\u0001')\n const cached = MATH_RE_CACHE.get(key)\n if (cached)\n return cached\n const commandPattern = `(?:${arr.map(c => c.replace(/[.*+?^${}()|[\\\\]\\\\\"\\]/g, '\\\\$&')).join('|')})`\n const re = new RegExp(`${CONTROL_CHARS_CLASS}|(?<!\\\\\\\\|\\\\w)(${commandPattern})\\\\b`, 'g')\n MATH_RE_CACHE.set(key, re)\n return re\n}\n\nfunction getBraceCmdRegex(useDefault: boolean, commands: ReadonlyArray<string> | undefined) {\n const arr = useDefault ? [] : [...(commands ?? [])]\n if (!useDefault)\n arr.sort((a, b) => b.length - a.length)\n const key = useDefault ? '__default__' : arr.join('\\u0001')\n const cached = BRACE_CMD_RE_CACHE.get(key)\n if (cached)\n return cached\n const braceEscaped = useDefault\n ? [ESCAPED_TEX_BRACE_COMMANDS, ESCAPED_KATEX_COMMANDS].filter(Boolean).join('|')\n : [\n arr.map(c => c.replace(/[.*+?^${}()|[\\\\]\\\\\\]/g, '\\\\$&')).join('|'),\n ESCAPED_TEX_BRACE_COMMANDS,\n ].filter(Boolean).join('|')\n const re = new RegExp(`(^|[^\\\\\\\\\\\\w])(${braceEscaped})\\\\s*\\\\{`, 'g')\n BRACE_CMD_RE_CACHE.set(key, re)\n return re\n}\n\n// Hoisted map of control characters -> escaped letter (e.g. '\\t' -> 't').\n// Kept at module scope to avoid recreating on every normalization call.\nconst CONTROL_MAP: Record<string, string> = {\n '\\t': 't',\n '\\r': 'r',\n '\\b': 'b',\n '\\f': 'f',\n '\\v': 'v',\n}\n\nfunction countUnescapedStrong(s: string) {\n const re = /(^|[^\\\\])(__|\\*\\*)/g\n let m: RegExpExecArray | null\n let c = 0\n // eslint-disable-next-line unused-imports/no-unused-vars\n while ((m = re.exec(s)) !== null) {\n c++\n }\n return c\n}\n\nexport function normalizeStandaloneBackslashT(s: string, opts?: MathOptions) {\n const commands = opts?.commands ?? KATEX_COMMANDS\n const escapeExclamation = opts?.escapeExclamation ?? true\n\n const useDefault = opts?.commands == null\n\n // Build or reuse regex: match control chars or unescaped command words.\n const re = getMathRegex(useDefault ? undefined : commands)\n\n let out = s.replace(re, (m: string, cmd?: string) => {\n if (CONTROL_MAP[m] !== undefined)\n return `\\\\${CONTROL_MAP[m]}`\n if (cmd && commands.includes(cmd))\n return `\\\\${cmd}`\n return m\n })\n\n // Escape standalone '!' but don't double-escape already escaped ones.\n if (escapeExclamation)\n out = out.replace(/(^|[^\\\\])!/g, '$1\\\\!')\n\n // Final pass: some TeX command names take a brace argument and may have\n // lost their leading backslash, e.g. \"operatorname{span}\". Ensure we\n // restore a backslash before known brace-taking commands when they are\n // followed by '{' and are not already escaped.\n // Use default escaped list when possible. Include TEX_BRACE_COMMANDS so\n // known brace-taking TeX commands (e.g. `text`, `boldsymbol`) are also\n // restored when their leading backslash was lost.\n let result = out\n const braceCmdRe = getBraceCmdRegex(useDefault, useDefault ? undefined : commands)\n result = result.replace(braceCmdRe, (_m: string, p1: string, p2: string) => `${p1}\\\\${p2}{`)\n result = result.replace(SPAN_CURLY_RE, 'span\\\\{$1\\\\}')\n .replace(OPERATORNAME_SPAN_RE, '\\\\operatorname{span}\\\\{$1\\\\}')\n\n // If a single backslash appears immediately before a newline (e.g. \"... 8 \\n5...\"),\n // it's likely intended as a LaTeX linebreak (`\\\\`). Double it, but avoid\n // changing already escaped `\\\\` sequences.\n // Match a single backslash not preceded by another backslash, followed by an optional CR and a LF.\n result = result.replace(SINGLE_BACKSLASH_NEWLINE_RE, '$1\\\\\\\\\\n')\n\n // If the string ends with a single backslash (no trailing newline), double it.\n result = result.replace(ENDING_SINGLE_BACKSLASH_RE, '$1\\\\\\\\')\n return result\n}\nexport function applyMath(md: MarkdownIt, mathOpts?: MathOptions) {\n // Inline rule for \\(...\\) and $$...$$ and $...$\n const mathInline = (state: unknown, silent: boolean) => {\n const s = state as any\n\n if (/^\\*[^*]+/.test(s.src)) {\n return false\n }\n const delimiters: [string, string][] = [\n ['$$', '$$'],\n ['\\\\(', '\\\\)'],\n ['\\(', '\\)'],\n ]\n\n let searchPos = 0\n let preMathPos = 0\n // use findMatchingClose from util\n for (const [open, close] of delimiters) {\n // We'll scan the entire inline source and tokenize all occurrences\n const src = s.src\n let foundAny = false\n const pushText = (text: string) => {\n // sanitize unexpected values\n if (text === 'undefined' || text == null) {\n text = ''\n }\n if (text === '\\\\') {\n s.pos = s.pos + text.length\n searchPos = s.pos\n return\n }\n if (text === '\\\\)' || text === '\\\\(') {\n const t = s.push('text_special', '', 0)\n t.content = text === '\\\\)' ? ')' : '('\n t.markup = text\n s.pos = s.pos + text.length\n searchPos = s.pos\n return\n }\n\n if (!text)\n return\n\n const t = s.push('text', '', 0)\n t.content = text\n s.pos = s.pos + text.length\n searchPos = s.pos\n }\n\n while (true) {\n if (searchPos >= src.length)\n break\n const index = src.indexOf(open, searchPos)\n if (index === -1)\n break\n\n // If the delimiter is immediately preceded by a ']' (possibly with\n // intervening spaces), it's likely part of a markdown link like\n // `[text](...)`, so we should not treat this '(' as the start of\n // an inline math span. Also guard the index to avoid OOB access.\n if (index > 0) {\n let i = index - 1\n // skip spaces between ']' and the delimiter\n while (i >= 0 && src[i] === ' ')\n i--\n if (i >= 0 && src[i] === ']')\n return false\n }\n // 有可能遇到 \\((\\operatorname{span}\\\\{\\boldsymbol{\\alpha}\\\\})^\\perp\\)\n // 这种情况,前面的 \\( 是数学公式的开始,后面的 ( 是普通括号\n // endIndex 需要找到与 open 对应的 close\n // 不能简单地用 indexOf 找到第一个 close — 需要处理嵌套与转义字符\n const endIdx = findMatchingClose(src, index + open.length, open, close)\n\n if (endIdx === -1) {\n // no matching close for this opener; skip forward\n const content = src.slice(index + open.length)\n if (content.includes(open)) {\n searchPos = src.indexOf(open, index + open.length)\n continue\n }\n if (endIdx === -1) {\n if (isMathLike(content)) {\n searchPos = index + open.length\n foundAny = true\n if (!silent) {\n s.pending = ''\n const toPushBefore = preMathPos ? src.slice(preMathPos, searchPos) : src.slice(0, searchPos)\n const isStrongPrefix = countUnescapedStrong(toPushBefore) % 2 === 1\n\n if (preMathPos) {\n pushText(src.slice(preMathPos, searchPos))\n }\n else {\n let text = src.slice(0, searchPos)\n if (text.endsWith(open))\n text = text.slice(0, text.length - open.length)\n pushText(text)\n }\n if (isStrongPrefix) {\n const strongToken = s.push('strong_open', '', 0)\n strongToken.markup = src.slice(0, index + 2)\n const token = s.push('math_inline', 'math', 0)\n token.content = normalizeStandaloneBackslashT(content, mathOpts)\n token.markup = open === '$$' ? '$$' : open === '\\\\(' ? '\\\\(\\\\)' : open === '$' ? '$' : '()'\n token.raw = `${open}${content}${close}`\n token.loading = true\n strongToken.content = content\n s.push('strong_close', '', 0)\n }\n else {\n const token = s.push('math_inline', 'math', 0)\n token.content = normalizeStandaloneBackslashT(content, mathOpts)\n token.markup = open === '$$' ? '$$' : open === '\\\\(' ? '\\\\(\\\\)' : open === '$' ? '$' : '()'\n token.raw = `${open}${content}${close}`\n token.loading = true\n }\n // consume the full inline source\n s.pos = src.length\n }\n searchPos = src.length\n preMathPos = searchPos\n }\n break\n }\n }\n const content = src.slice(index + open.length, endIdx)\n if (!isMathLike(content)) {\n // push remaining text after last match\n // not math-like; skip this match and continue scanning\n searchPos = endIdx + close.length\n const text = src.slice(s.pos, searchPos)\n if (!s.pending)\n pushText(text)\n continue\n }\n foundAny = true\n\n if (!silent) {\n // push text before this math\n const before = src.slice(0, index)\n // If we already consumed some content, avoid duplicating the prefix\n // Only push the portion from previous search position\n const prevConsumed = src.slice(0, searchPos)\n let toPushBefore = prevConsumed ? src.slice(preMathPos, index) : before\n const isStrongPrefix = countUnescapedStrong(toPushBefore) % 2 === 1\n if (index !== s.pos && isStrongPrefix) {\n toPushBefore = s.pending + src.slice(s.pos, index)\n }\n\n // strong prefix handling (preserve previous behavior)\n if (s.pending !== toPushBefore) {\n s.pending = ''\n if (isStrongPrefix) {\n const _match = toPushBefore.match(/(\\*+)/)\n const after = toPushBefore.slice(_match!.index! + _match![0].length)\n pushText(toPushBefore.slice(0, _match!.index!))\n const strongToken = s.push('strong_open', '', 0)\n strongToken.markup = _match![0]\n const textToken = s.push('text', '', 0)\n textToken.content = after\n s.push('strong_close', '', 0)\n }\n else {\n pushText(toPushBefore)\n }\n }\n if (isStrongPrefix) {\n const strongToken = s.push('strong_open', '', 0)\n strongToken.markup = '**'\n const token = s.push('math_inline', 'math', 0)\n token.content = normalizeStandaloneBackslashT(content, mathOpts)\n token.markup = open === '$$' ? '$$' : open === '\\\\(' ? '\\\\(\\\\)' : open === '$' ? '$' : '()'\n token.raw = `${open}${content}${close}`\n token.loading = false\n const raw = src.slice(endIdx + close.length)\n const isBeforeClose = raw.startsWith('*')\n if (isBeforeClose) {\n s.push('strong_close', '', 0)\n }\n if (raw) {\n const textContentToken = s.push('text', '', 0)\n textContentToken.content = (raw == null ? '' : String(raw)).replace(/^\\*+/, '')\n }\n if (!isBeforeClose)\n s.push('strong_close', '', 0)\n s.pos = src.length\n searchPos = src.length\n preMathPos = searchPos\n continue\n }\n else {\n const token = s.push('math_inline', 'math', 0)\n token.content = normalizeStandaloneBackslashT(content, mathOpts)\n token.markup = open === '$$' ? '$$' : open === '\\\\(' ? '\\\\(\\\\)' : open === '$' ? '$' : '()'\n token.raw = `${open}${content}${close}`\n token.loading = false\n }\n }\n\n searchPos = endIdx + close.length\n preMathPos = searchPos\n s.pos = searchPos\n }\n\n if (foundAny) {\n if (!silent) {\n // push remaining text after last match\n if (searchPos < src.length)\n pushText(src.slice(searchPos))\n // consume the full inline source\n s.pos = src.length\n }\n else {\n // in silent mode, advance position past what we scanned\n s.pos = searchPos\n }\n\n return true\n }\n }\n\n return false\n }\n\n // Block math rule similar to previous implementation\n const mathBlock = (\n state: unknown,\n startLine: number,\n endLine: number,\n silent: boolean,\n ) => {\n const s = state as any\n const delimiters: [string, string][] = [\n ['\\\\[', '\\\\]'],\n ['\\[', '\\]'],\n ['$$', '$$'],\n ]\n const startPos = s.bMarks[startLine] + s.tShift[startLine]\n const lineText = s.src.slice(startPos, s.eMarks[startLine]).trim()\n let matched = false\n let openDelim = ''\n let closeDelim = ''\n for (const [open, close] of delimiters) {\n if (lineText === open || lineText.startsWith(open)) {\n if (open.includes('[')) {\n if (lineText.replace('\\\\', '') === '[') {\n if (startLine + 1 < endLine) {\n matched = true\n openDelim = open\n closeDelim = close\n break\n }\n continue\n }\n }\n else {\n matched = true\n openDelim = open\n closeDelim = close\n break\n }\n }\n }\n\n if (!matched)\n return false\n if (silent)\n return true\n\n if (\n lineText.includes(closeDelim)\n && lineText.indexOf(closeDelim) > openDelim.length\n ) {\n const startDelimIndex = lineText.indexOf(openDelim)\n const endDelimIndex = lineText.indexOf(\n closeDelim,\n startDelimIndex + openDelim.length,\n )\n const content = lineText.slice(\n startDelimIndex + openDelim.length,\n endDelimIndex,\n )\n const token: any = s.push('math_block', 'math', 0)\n token.content = normalizeStandaloneBackslashT(content)\n token.markup\n = openDelim === '$$' ? '$$' : openDelim === '[' ? '[]' : '\\\\[\\\\]'\n token.map = [startLine, startLine + 1]\n token.raw = `${openDelim}${content}${closeDelim}`\n token.block = true\n token.loading = false\n s.line = startLine + 1\n return true\n }\n\n let nextLine = startLine\n let content = ''\n let found = false\n\n const firstLineContent\n = lineText === openDelim ? '' : lineText.slice(openDelim.length)\n\n if (firstLineContent.includes(closeDelim)) {\n const endIndex = firstLineContent.indexOf(closeDelim)\n content = firstLineContent.slice(0, endIndex)\n found = true\n nextLine = startLine\n }\n else {\n if (firstLineContent)\n content = firstLineContent\n\n for (nextLine = startLine + 1; nextLine < endLine; nextLine++) {\n const lineStart = s.bMarks[nextLine] + s.tShift[nextLine]\n const lineEnd = s.eMarks[nextLine]\n const currentLine = s.src.slice(lineStart - 1, lineEnd)\n if (currentLine.trim() === closeDelim) {\n found = true\n break\n }\n else if (currentLine.includes(closeDelim)) {\n found = true\n const endIndex = currentLine.indexOf(closeDelim)\n content += (content ? '\\n' : '') + currentLine.slice(0, endIndex)\n break\n }\n content += (content ? '\\n' : '') + currentLine\n }\n }\n\n const token: any = s.push('math_block', 'math', 0)\n token.content = normalizeStandaloneBackslashT(content)\n token.markup\n = openDelim === '$$' ? '$$' : openDelim === '[' ? '[]' : '\\\\[\\\\]'\n token.raw = `${openDelim}${content}${content.startsWith('\\n') ? '\\n' : ''}${closeDelim}`\n token.map = [startLine, nextLine + 1]\n token.block = true\n token.loading = !found\n s.line = nextLine + 1\n return true\n }\n\n // Register math before the escape rule so inline math is tokenized\n // before markdown-it processes backslash escapes. This preserves\n // backslashes inside math content (e.g. \"\\\\{\") instead of having\n // the escape rule remove them from the token content.\n md.inline.ruler.before('escape', 'math', mathInline)\n md.block.ruler.before('paragraph', 'math_block', mathBlock, {\n alt: ['paragraph', 'reference', 'blockquote', 'list'],\n })\n}\n","import type { MarkdownIt } from 'markdown-it-ts'\n\nexport function applyRenderRules(md: MarkdownIt) {\n // Narrow external `any` surface to `unknown` and use local casts where\n // needed to interact with markdown-it runtime objects. This reduces the\n // exported `any` footprint while preserving runtime behavior.\n const defaultImage\n = md.renderer.rules.image\n || function (tokens: unknown, idx: number, options: unknown, env: unknown, self: unknown) {\n const tokensAny = tokens as unknown as import('../types').MarkdownToken[]\n const selfShape = self as unknown as { renderToken?: (tokens: import('../types').MarkdownToken[], idx: number, options?: unknown) => string }\n return selfShape.renderToken ? selfShape.renderToken(tokensAny, idx, options) : ''\n }\n\n md.renderer.rules.image = (\n tokens: unknown,\n idx: number,\n options: unknown,\n env: unknown,\n self: unknown,\n ) => {\n const tokensAny = tokens as unknown as import('../types').MarkdownToken[]\n const token = tokensAny[idx]\n // Narrow token shape to only the runtime helpers we need to call.\n const tokenShape = token as unknown as { attrSet?: (name: string, val: string) => void }\n tokenShape.attrSet?.('loading', 'lazy')\n const defaultImageFn = defaultImage as unknown as (tokens: import('../types').MarkdownToken[], idx: number, options: unknown, env?: unknown, self?: unknown) => string\n return defaultImageFn(tokensAny, idx, options, env, self)\n }\n\n md.renderer.rules.fence\n = md.renderer.rules.fence\n || ((tokens: unknown, idx: number) => {\n const tokensAny = tokens as unknown as import('../types').MarkdownToken[]\n const token = tokensAny[idx]\n const tokenShape = token as unknown as { info?: string, content?: string }\n const info = String(tokenShape.info ?? '').trim()\n const langClass = info\n ? `language-${md.utils.escapeHtml((info as string).split(/\\s+/g)[0])}`\n : ''\n const code = md.utils.escapeHtml(String(tokenShape.content ?? ''))\n return `<pre class=\"${langClass}\"><code>${code}</code></pre>`\n })\n}\n","import type { MathOptions } from './config'\nimport MarkdownIt from 'markdown-it-ts'\nimport { getDefaultMathOptions } from './config'\nimport { applyContainers } from './plugins/containers'\nimport { applyFixHtmlInlineTokens } from './plugins/fixHtmlInline'\nimport { applyFixLinkInline } from './plugins/fixLinkInline'\nimport { applyFixLinkTokens } from './plugins/fixLinkTokens'\nimport { applyFixListItem } from './plugins/fixListItem'\nimport { applyFixStrongTokens } from './plugins/fixStrongTokens'\nimport { applyFixTableTokens } from './plugins/fixTableTokens'\nimport { applyMath } from './plugins/math'\nimport { applyRenderRules } from './renderers'\n\nexport interface FactoryOptions extends Record<string, unknown> {\n markdownItOptions?: Record<string, unknown>\n enableMath?: boolean\n enableContainers?: boolean\n mathOptions?: { commands?: string[], escapeExclamation?: boolean }\n}\n\nexport function factory(opts: FactoryOptions = {}) {\n const md = new MarkdownIt({\n html: true,\n linkify: true,\n typographer: true,\n stream: true,\n ...(opts.markdownItOptions ?? {}),\n })\n\n if (opts.enableMath ?? true) {\n const mergedMathOptions: MathOptions = { ...(getDefaultMathOptions() ?? {}), ...(opts.mathOptions ?? {}) }\n applyMath(md, mergedMathOptions)\n }\n if (opts.enableContainers ?? true)\n applyContainers(md)\n // Apply link-fixing plugin early so tokens produced during parsing\n // have corrected inline children. This runs during markdown-it's\n // core stage (after inline tokenization) instead of after parse.\n // Install inline-level link tokenizer before the built-in 'link' rule\n applyFixLinkInline(md)\n // Retain the core-stage fix as a fallback for any cases the inline\n // tokenizer does not handle.\n applyFixLinkTokens(md)\n // Also apply strong-token normalization at the same stage.\n applyFixStrongTokens(md)\n // Apply list-item inline normalization as well.\n applyFixListItem(md)\n // Apply table token normalization at block stage.\n applyFixTableTokens(md)\n applyRenderRules(md)\n applyFixHtmlInlineTokens(md)\n\n return md\n}\n","import type { CheckboxInputNode, CheckboxNode, MarkdownToken } from '../../types'\n\nexport function parseCheckboxToken(token: MarkdownToken): CheckboxNode {\n const tokenMeta = (token.meta ?? {}) as unknown as { checked?: boolean }\n return {\n type: 'checkbox',\n checked: tokenMeta.checked === true,\n raw: tokenMeta.checked ? '[x]' : '[ ]',\n }\n}\n\nexport function parseCheckboxInputToken(token: any): CheckboxInputNode {\n const tokenAny = token as unknown as { attrGet?: (name: string) => string | undefined }\n const rawAttr = tokenAny.attrGet ? tokenAny.attrGet('checked') : undefined\n const checked = rawAttr === '' || rawAttr === 'true'\n return {\n type: 'checkbox_input',\n checked,\n raw: checked ? '[x]' : '[ ]',\n }\n}\n","import type { EmojiNode, MarkdownToken } from '../../types'\n\nexport function parseEmojiToken(token: MarkdownToken): EmojiNode {\n const name = String(token.content ?? '')\n const markup = String(token.markup ?? '')\n return {\n type: 'emoji',\n name,\n markup,\n raw: `:${name}:`,\n }\n}\n","import type { EmphasisNode, MarkdownToken, ParsedNode } from '../../types'\nimport { parseInlineTokens } from '../index'\n\nexport function parseEmphasisToken(\n tokens: MarkdownToken[],\n startIndex: number,\n): {\n node: EmphasisNode\n nextIndex: number\n} {\n const children: ParsedNode[] = []\n let emText = ''\n let i = startIndex + 1\n const innerTokens: MarkdownToken[] = []\n\n // Process tokens between em_open and em_close\n while (i < tokens.length && tokens[i].type !== 'em_close') {\n emText += String(tokens[i].content ?? '')\n innerTokens.push(tokens[i])\n i++\n }\n\n // Parse inner tokens to handle nested elements\n children.push(...parseInlineTokens(innerTokens))\n\n const node: EmphasisNode = {\n type: 'emphasis',\n children,\n raw: `*${emText}*`,\n }\n\n // Skip to after em_close\n const nextIndex = i < tokens.length ? i + 1 : tokens.length\n\n return { node, nextIndex }\n}\n","import type { CodeBlockNode, MarkdownToken } from '../../types'\n\n// Strip a final line that looks like a fence marker (``` etc.)\nconst TRAILING_FENCE_LINE_RE = /\\r?\\n[ \\t]*`+\\s*$/\n// Unified diff metadata/header line prefixes to skip when splitting a diff\nconst DIFF_HEADER_PREFIXES = ['diff ', 'index ', '--- ', '+++ ', '@@ ']\n// Newline splitter reused in this module\nconst NEWLINE_RE = /\\r?\\n/\n\nfunction splitUnifiedDiff(content: string) {\n const orig: string[] = []\n const updated: string[] = []\n for (const rawLine of content.split(NEWLINE_RE)) {\n const line = rawLine\n // skip diff metadata lines\n if (DIFF_HEADER_PREFIXES.some(p => line.startsWith(p)))\n continue\n\n if (line.length >= 2 && line[0] === '-' && line[1] === ' ') {\n orig.push(` ${line.slice(1)}`)\n }\n else if (line.length >= 2 && line[0] === '+' && line[1] === ' ') {\n updated.push(` ${line.slice(1)}`)\n }\n else {\n // fallback: treat as context (no prefix)\n orig.push(line)\n updated.push(line)\n }\n }\n return {\n original: orig.join('\\n'),\n updated: updated.join('\\n'),\n }\n}\n\nexport function parseFenceToken(token: MarkdownToken): CodeBlockNode {\n const hasMap = Array.isArray(token.map) && token.map.length === 2\n const tokenMeta = (token.meta ?? {}) as unknown as { closed?: boolean }\n const closed = typeof tokenMeta.closed === 'boolean' ? tokenMeta.closed : undefined\n const info = String(token.info ?? '')\n const diff = info.startsWith('diff')\n const language = diff\n ? (() => {\n const s = info\n const sp = s.indexOf(' ')\n return sp === -1\n ? ''\n : String(s.slice(sp + 1) ?? '')\n })()\n : info\n\n // Defensive sanitization: sometimes a closing fence line (e.g. ``` or ``)\n // can accidentally end up inside `token.content` (for example when\n // the parser/mapping is confused). Remove a trailing line that only\n // contains backticks and optional whitespace so we don't render stray\n // ` or `` characters at the end of the code output. This is a\n // conservative cleanup and only strips a final line that looks like a\n // fence marker (starts with optional spaces then one or more ` and\n // only whitespace until end-of-string).\n let content = String(token.content ?? '')\n if (TRAILING_FENCE_LINE_RE.test(content))\n content = content.replace(TRAILING_FENCE_LINE_RE, '')\n\n if (diff) {\n const { original, updated } = splitUnifiedDiff(content)\n // 返回时保留原来的 code 字段为 updated(编辑后代码),并额外附加原始与更新的文本\n return {\n type: 'code_block',\n language,\n code: String(updated ?? ''),\n raw: String(content ?? ''),\n diff,\n loading: closed === true ? false : closed === false ? true : !hasMap,\n originalCode: original,\n updatedCode: updated,\n }\n }\n\n return {\n type: 'code_block',\n language,\n code: String(content ?? ''),\n raw: String(content ?? ''),\n diff,\n loading: closed === true ? false : closed === false ? true : !hasMap,\n }\n}\n","import type { FootnoteReferenceNode, MarkdownToken } from '../../types'\n\nexport function parseFootnoteRefToken(\n token: MarkdownToken,\n): FootnoteReferenceNode {\n const tokenMeta = (token.meta ?? {}) as unknown as { label?: string }\n return {\n type: 'footnote_reference',\n id: String(tokenMeta.label ?? ''),\n raw: `[^${String(tokenMeta.label ?? '')}]`,\n }\n}\n","import type { HardBreakNode } from '../../types'\n\nexport function parseHardbreakToken(): HardBreakNode {\n return {\n type: 'hardbreak',\n raw: '\\\\\\n',\n }\n}\n","import type { HighlightNode, MarkdownToken, ParsedNode } from '../../types'\nimport { parseInlineTokens } from '../index'\n\nexport function parseHighlightToken(\n tokens: MarkdownToken[],\n startIndex: number,\n): {\n node: HighlightNode\n nextIndex: number\n} {\n const children: ParsedNode[] = []\n let markText = ''\n let i = startIndex + 1\n const innerTokens: MarkdownToken[] = []\n\n // Process tokens between mark_open and mark_close\n while (i < tokens.length && tokens[i].type !== 'mark_close') {\n markText += String(tokens[i].content ?? '')\n innerTokens.push(tokens[i])\n i++\n }\n\n // Parse inner tokens to handle nested elements\n children.push(...parseInlineTokens(innerTokens))\n\n const node: HighlightNode = {\n type: 'highlight',\n children,\n raw: `==${markText}==`,\n }\n\n // Skip to after mark_close\n const nextIndex = i < tokens.length ? i + 1 : tokens.length\n\n return { node, nextIndex }\n}\n","import type { InlineCodeNode, MarkdownToken, ParsedNode } from '../../types'\n\n// Parse inline HTML and return an appropriate ParsedNode depending on tag.\nexport function parseHtmlInlineCodeToken(token: MarkdownToken, tokens: MarkdownToken[], i: number): [ParsedNode, number] {\n let code = String(token.content ?? '').trim()\n const nextToken = tokens[i + 1]\n const nnextToken = tokens[i + 2]\n\n // Quick tag detection\n const tagMatch = code.match(/^<\\s*([\\w-]+)/)\n const tag = tagMatch ? tagMatch[1].toLowerCase() : ''\n\n // Helper to extract inner text for tags like <a>...</a>, <p>...</p>, <div>...</div>\n function extractInner(html: string) {\n // Match the first closing sequence like >...< /tag>\n const m = html.match(/>([\\s\\S]*?)<\\s*\\/\\s*[\\w-]+>/)\n return m ? m[1] : ''\n }\n\n if (tag === 'a') {\n let loading = false\n if (!nextToken || (nextToken?.type === 'text' && (!nnextToken || nnextToken.type !== 'html_inline')) || !nextToken) {\n loading = true\n }\n if (nextToken?.type === 'text' && (nnextToken?.type === 'html_inline' || !nnextToken)) {\n // Try to extract href and inner text\n const hrefMatch = code.match(/href\\s*=\\s*\"([^\"]+)\"|href\\s*=\\s*'([^']+)'|href\\s*=\\s*([^\\s>]+)/i)\n const href = hrefMatch ? (hrefMatch[1] || hrefMatch[2] || hrefMatch[3]) : ''\n let index = i + 1\n if (nextToken.type === 'text') {\n code = nextToken.content?.replace(/<[^>]*$/, '') ?? ''\n\n index = i + 2\n }\n if (nnextToken?.type === 'html_inline' && nextToken.type === 'text') {\n index = i + 3\n }\n const inner = code || href || ''\n return [\n {\n type: 'link',\n href: String(href ?? ''),\n title: null,\n text: code,\n children: [\n { type: 'text', content: inner, raw: inner },\n ],\n loading,\n raw: code,\n } as ParsedNode,\n index,\n ]\n }\n }\n\n if (tag === 'p' || tag === 'div') {\n const inner = extractInner(code) || ''\n return [\n {\n type: 'paragraph',\n children: [\n { type: 'text', content: inner, raw: inner },\n ],\n raw: code,\n } as ParsedNode,\n i + 1,\n ]\n }\n // Fallback: treat as inline code (preserve previous behavior)\n return [\n {\n type: 'inline_code',\n code,\n raw: code,\n } as InlineCodeNode,\n i + 1,\n ]\n}\n","import type { ImageNode, MarkdownToken } from '../../types'\n\nexport function parseImageToken(token: MarkdownToken, loading = false): ImageNode {\n // Some call-sites pass an outer/inline token whose children contain the\n // actual image token (with attrs). Prefer token.attrs when present; when\n // absent, search children for the first child that carries attrs.\n let attrs = token.attrs ?? []\n // If the parent token has no attrs, prefer attrs from the inner child image\n // token. Remember which child provided attrs so we can prefer its content\n // over the parent's `token.content` (the parent may contain the raw\n // markdown string like ``).\n let childWithAttrs: any = null\n if ((!attrs || attrs.length === 0) && Array.isArray(token.children)) {\n for (const child of token.children) {\n // child.attrs may be null in markdown-it; check and use if populated\n const childAttrs = (child as any)?.attrs\n if (Array.isArray(childAttrs) && childAttrs.length > 0) {\n attrs = childAttrs\n childWithAttrs = child\n break\n }\n }\n }\n const src = String(attrs.find(attr => attr[0] === 'src')?.[1] ?? '')\n const altAttr = attrs.find(attr => attr[0] === 'alt')?.[1]\n // Prefer a non-empty alt attribute. If attrs were sourced from an inner\n // child token prefer that child's `content` over the parent's `token.content`\n // because the parent may contain the raw markdown instead of the plain alt\n // text.\n let alt = ''\n if (altAttr != null && String(altAttr).length > 0) {\n alt = String(altAttr)\n }\n else if (childWithAttrs?.content != null && String(childWithAttrs.content).length > 0) {\n alt = String(childWithAttrs.content)\n }\n else if (Array.isArray(childWithAttrs?.children) && childWithAttrs.children[0]?.content) {\n // If the inner image token has children (e.g. a text token) prefer that\n // child's content when the child token's own `content` is empty.\n alt = String(childWithAttrs.children[0].content)\n }\n else if (Array.isArray(token.children) && token.children[0]?.content) {\n alt = String(token.children[0].content)\n }\n else if (token.content != null && String(token.content).length > 0) {\n alt = String(token.content)\n }\n\n const _title = attrs.find(attr => attr[0] === 'title')?.[1] ?? null\n const title = _title === null ? null : String(_title)\n const raw = String(token.content ?? '')\n\n return {\n type: 'image',\n src,\n alt,\n title,\n raw,\n loading,\n }\n}\n","import type { InlineCodeNode, MarkdownToken } from '../../types'\n\nexport function parseInlineCodeToken(token: MarkdownToken): InlineCodeNode {\n const code = String(token.content ?? '')\n return {\n type: 'inline_code',\n code,\n raw: code,\n }\n}\n","import type { InsertNode, MarkdownToken, ParsedNode } from '../../types'\nimport { parseInlineTokens } from '../index'\n\nexport function parseInsertToken(\n tokens: MarkdownToken[],\n startIndex: number,\n): {\n node: InsertNode\n nextIndex: number\n} {\n const children: ParsedNode[] = []\n let insText = ''\n let i = startIndex + 1\n const innerTokens: MarkdownToken[] = []\n\n // Process tokens between ins_open and ins_close\n while (i < tokens.length && tokens[i].type !== 'ins_close') {\n insText += String(tokens[i].content ?? '')\n innerTokens.push(tokens[i])\n i++\n }\n\n // Parse inner tokens to handle nested elements\n children.push(...parseInlineTokens(innerTokens))\n\n const node: InsertNode = {\n type: 'insert',\n children,\n raw: `++${String(insText)}++`,\n }\n\n // Skip to after ins_close\n const nextIndex = i < tokens.length ? i + 1 : tokens.length\n\n return { node, nextIndex }\n}\n","import type { LinkNode, MarkdownToken } from '../../types'\nimport { parseInlineTokens } from '../index'\n\nexport function parseLinkToken(\n tokens: MarkdownToken[],\n startIndex: number,\n): {\n node: LinkNode\n nextIndex: number\n} {\n const openToken = tokens[startIndex]\n const attrs = openToken.attrs ?? []\n const href = String(attrs.find(attr => attr[0] === 'href')?.[1] ?? '')\n const _title = attrs.find(attr => attr[0] === 'title')?.[1] ?? null\n const title = _title === null ? null : String(_title)\n\n let i = startIndex + 1\n const linkTokens: MarkdownToken[] = []\n let loading = true\n\n // Collect all tokens between link_open and link_close\n while (i < tokens.length && tokens[i].type !== 'link_close') {\n linkTokens.push(tokens[i])\n i++\n }\n\n if (tokens[i]?.type === 'link_close') {\n loading = false\n }\n\n // Parse the collected tokens as inline content\n const children = parseInlineTokens(linkTokens)\n const linkText = children\n .map((node) => {\n const nodeAny = node as unknown as { content?: string, raw?: string }\n if ('content' in node)\n return String(nodeAny.content ?? '')\n return String(nodeAny.raw ?? '')\n })\n .join('')\n\n const node: LinkNode = {\n type: 'link',\n href,\n title,\n text: linkText,\n children,\n raw: String(`[${linkText}](${href}${title ? ` \"${title}\"` : ''})`),\n loading,\n }\n\n // Skip to after link_close\n const nextIndex = i < tokens.length ? i + 1 : tokens.length\n\n return { node, nextIndex }\n}\n","import type { MarkdownToken, MathInlineNode } from '../../types'\n\n// Parse a math_inline token (inline math expressions)\nexport function parseMathInlineToken(token: MarkdownToken): MathInlineNode {\n return {\n type: 'math_inline',\n content: String(token.content ?? ''),\n loading: !!token.loading,\n raw: token.raw!,\n }\n}\n","import type { MarkdownToken, ReferenceNode } from '../../types'\n\n// Parse a reference token from markdown-it\nexport function parseReferenceToken(token: MarkdownToken): ReferenceNode {\n const id = String(token.content ?? '')\n const raw = String(token.markup ?? `[${token.content ?? ''}]`)\n return {\n type: 'reference',\n id,\n raw,\n }\n}\n","import type {\n MarkdownToken,\n ParsedNode,\n StrikethroughNode,\n} from '../../types'\nimport { parseInlineTokens } from '../index'\n\nexport function parseStrikethroughToken(\n tokens: MarkdownToken[],\n startIndex: number,\n): {\n node: StrikethroughNode\n nextIndex: number\n} {\n const children: ParsedNode[] = []\n let sText = ''\n let i = startIndex + 1\n const innerTokens: MarkdownToken[] = []\n\n // Process tokens between s_open and s_close\n while (i < tokens.length && tokens[i].type !== 's_close') {\n sText += String(tokens[i].content ?? '')\n innerTokens.push(tokens[i])\n i++\n }\n\n // Parse inner tokens to handle nested elements\n children.push(...parseInlineTokens(innerTokens))\n\n const node: StrikethroughNode = {\n type: 'strikethrough',\n children,\n raw: `~~${sText}~~`,\n }\n\n // Skip to after s_close\n const nextIndex = i < tokens.length ? i + 1 : tokens.length\n\n return { node, nextIndex }\n}\n","import type { MarkdownToken, ParsedNode, StrongNode } from '../../types'\nimport { parseInlineTokens } from '../index'\n\nexport function parseStrongToken(\n tokens: MarkdownToken[],\n startIndex: number,\n raw?: string,\n): {\n node: StrongNode\n nextIndex: number\n} {\n const children: ParsedNode[] = []\n let strongText = ''\n let i = startIndex + 1\n const innerTokens: MarkdownToken[] = []\n\n // Process tokens between strong_open and strong_close\n while (i < tokens.length && tokens[i].type !== 'strong_close') {\n strongText += String(tokens[i].content ?? '')\n innerTokens.push(tokens[i])\n i++\n }\n\n // Parse inner tokens to handle nested elements\n children.push(...parseInlineTokens(innerTokens, raw))\n\n const node: StrongNode = {\n type: 'strong',\n children,\n raw: `**${String(strongText)}**`,\n }\n\n // Skip to after strong_close\n const nextIndex = i < tokens.length ? i + 1 : tokens.length\n\n return { node, nextIndex }\n}\n","import type { MarkdownToken, ParsedNode, SubscriptNode } from '../../types'\nimport { parseInlineTokens } from '../index'\n\nexport function parseSubscriptToken(\n tokens: MarkdownToken[],\n startIndex: number,\n): {\n node: SubscriptNode\n nextIndex: number\n} {\n const children: ParsedNode[] = []\n let subText = ''\n let i = startIndex + 1\n const innerTokens: MarkdownToken[] = []\n\n // Process tokens between sub_open and sub_close (if applicable)\n while (i < tokens.length && tokens[i].type !== 'sub_close') {\n subText += String(tokens[i].content ?? '')\n innerTokens.push(tokens[i])\n i++\n }\n\n // Parse inner tokens to handle nested elements\n children.push(...parseInlineTokens(innerTokens))\n\n const startContent = String(tokens[startIndex].content ?? '')\n const display = subText || startContent\n const node: SubscriptNode = {\n type: 'subscript',\n children: children.length > 0\n ? children\n : [\n {\n type: 'text',\n // Fallback to the collected inner text (e.g., \"2\" in H~2~O)\n content: display,\n raw: display,\n },\n ],\n raw: `~${display}~`,\n }\n\n // Skip to after sub_close\n const nextIndex = i < tokens.length ? i + 1 : tokens.length\n\n return { node, nextIndex }\n}\n","import type { MarkdownToken, ParsedNode, SuperscriptNode } from '../../types'\nimport { parseInlineTokens } from '../index'\n\nexport function parseSuperscriptToken(\n tokens: MarkdownToken[],\n startIndex: number,\n): {\n node: SuperscriptNode\n nextIndex: number\n} {\n const children: ParsedNode[] = []\n let supText = ''\n let i = startIndex + 1\n const innerTokens: MarkdownToken[] = []\n\n // Process tokens between sup_open and sup_close (if applicable)\n while (i < tokens.length && tokens[i].type !== 'sup_close') {\n supText += String(tokens[i].content ?? '')\n innerTokens.push(tokens[i])\n i++\n }\n\n // Parse inner tokens to handle nested elements\n children.push(...parseInlineTokens(innerTokens))\n\n const node: SuperscriptNode = {\n type: 'superscript',\n children:\n children.length > 0\n ? children\n : [\n {\n type: 'text',\n // Fallback to the collected inner text (e.g., \"2\" in x^2^)\n content: supText || String(tokens[startIndex].content ?? ''),\n raw: supText || String(tokens[startIndex].content ?? ''),\n },\n ],\n raw: `^${supText || String(tokens[startIndex].content ?? '')}^`,\n }\n\n // Skip to after sup_close\n const nextIndex = i < tokens.length ? i + 1 : tokens.length\n\n return { node, nextIndex }\n}\n","import type { MarkdownToken, TextNode } from '../../types'\n\nexport function parseTextToken(token: MarkdownToken): TextNode {\n const content = String(token.content ?? '')\n return {\n type: 'text',\n content,\n raw: content,\n }\n}\n","import type { LinkNode, MarkdownToken, ParsedNode, TextNode } from '../../types'\nimport { parseCheckboxInputToken, parseCheckboxToken } from './checkbox-parser'\nimport { parseEmojiToken } from './emoji-parser'\nimport { parseEmphasisToken } from './emphasis-parser'\nimport { parseFenceToken } from './fence-parser'\nimport { parseFootnoteRefToken } from './footnote-ref-parser'\nimport { parseHardbreakToken } from './hardbreak-parser'\nimport { parseHighlightToken } from './highlight-parser'\nimport { parseHtmlInlineCodeToken } from './html-inline-code-parser'\nimport { parseImageToken } from './image-parser'\nimport { parseInlineCodeToken } from './inline-code-parser'\nimport { parseInsertToken } from './insert-parser'\nimport { parseLinkToken } from './link-parser'\nimport { parseMathInlineToken } from './math-inline-parser'\nimport { parseReferenceToken } from './reference-parser'\nimport { parseStrikethroughToken } from './strikethrough-parser'\nimport { parseStrongToken } from './strong-parser'\nimport { parseSubscriptToken } from './subscript-parser'\nimport { parseSuperscriptToken } from './superscript-parser'\nimport { parseTextToken } from './text-parser'\n\n// Precompiled regexes used frequently in inline parsing\nconst STRONG_PAIR_RE = /\\*\\*([\\s\\S]*?)\\*\\*/\n\n// Shared helper for building safe dynamic regex parts\nfunction escapeRegExp(str: string) {\n return str.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&')\n}\n\n// Helper: detect likely URLs/hrefs (autolinks). Extracted so the\n// detection logic is easy to tweak and test.\nconst AUTOLINK_PROTOCOL_RE = /^(?:https?:\\/\\/|mailto:|ftp:\\/\\/)/i\nconst AUTOLINK_GENERIC_RE = /:\\/\\//\n\nexport function isLikelyUrl(href?: string) {\n if (!href)\n return false\n return AUTOLINK_PROTOCOL_RE.test(href) || AUTOLINK_GENERIC_RE.test(href)\n}\n\n// Process inline tokens (for text inside paragraphs, headings, etc.)\nexport function parseInlineTokens(tokens: MarkdownToken[], raw?: string, pPreToken?: MarkdownToken): ParsedNode[] {\n if (!tokens || tokens.length === 0)\n return []\n\n const result: ParsedNode[] = []\n let currentTextNode: TextNode | null = null\n\n let i = 0\n // Note: strong-token normalization and list-item normalization are\n // applied during markdown-it parsing via core rules (plugins that\n // run after 'inline'). Inline parsers should receive normalized\n // children and only focus on parsing.\n\n // Helpers to manage text node merging and pushing parsed nodes\n function resetCurrentTextNode() {\n currentTextNode = null\n }\n\n function handleEmphasisAndStrikethrough(content: string, token: MarkdownToken): boolean {\n // strikethrough (~~)\n if (/[^~]*~{2,}[^~]+/.test(content)) {\n let idx = content.indexOf('~~')\n if (idx === -1)\n idx = 0\n const _text = content.slice(0, idx)\n if (_text) {\n if (currentTextNode) {\n currentTextNode.content += _text\n currentTextNode.raw += _text\n }\n else {\n currentTextNode = {\n type: 'text',\n content: String(_text ?? ''),\n raw: String(token.content ?? ''),\n }\n result.push(currentTextNode)\n }\n }\n const strikethroughContent = content.slice(idx)\n const { node } = parseStrikethroughToken([\n { type: 's_open', tag: 's', content: '', markup: '*', info: '', meta: null },\n { type: 'text', tag: '', content: strikethroughContent.replace(/~/g, ''), markup: '', info: '', meta: null },\n { type: 's_close', tag: 's', content: '', markup: '*', info: '', meta: null },\n ], 0)\n resetCurrentTextNode()\n pushNode(node)\n i++\n return true\n }\n\n // strong (**)\n if (/\\*\\*/.test(content)) {\n const openIdx = content.indexOf('**')\n const beforeText = openIdx > -1 ? content.slice(0, openIdx) : ''\n if (beforeText) {\n pushText(beforeText, beforeText)\n }\n\n if (openIdx === -1) {\n i++\n return true\n }\n\n // find the first matching closing ** pair in the content\n const exec = STRONG_PAIR_RE.exec(content)\n let inner = ''\n let after = ''\n if (exec && typeof exec.index === 'number') {\n inner = exec[1]\n after = content.slice(exec.index + exec[0].length)\n }\n else {\n // no closing pair found: mid-state, take rest as inner\n inner = content.slice(openIdx + 2)\n after = ''\n }\n\n const { node } = parseStrongToken([\n { type: 'strong_open', tag: 'strong', content: '', markup: '*', info: '', meta: null },\n { type: 'text', tag: '', content: inner, markup: '', info: '', meta: null },\n { type: 'strong_close', tag: 'strong', content: '', markup: '*', info: '', meta: null },\n ], 0, raw)\n\n resetCurrentTextNode()\n pushNode(node)\n\n if (after) {\n handleToken({\n type: 'text',\n content: after,\n raw: after,\n })\n i--\n }\n\n i++\n return true\n }\n\n // emphasis (*)\n if (/[^*]*\\*[^*]+/.test(content)) {\n let idx = content.indexOf('*')\n if (idx === -1)\n idx = 0\n const _text = content.slice(0, idx)\n if (_text) {\n if (currentTextNode) {\n currentTextNode.content += _text\n currentTextNode.raw += _text\n }\n else {\n currentTextNode = { type: 'text', content: String(_text ?? ''), raw: String(token.content ?? '') }\n result.push(currentTextNode)\n }\n }\n const emphasisContent = content.slice(idx)\n const { node } = parseEmphasisToken([\n { type: 'em_open', tag: 'em', content: '', markup: '*', info: '', meta: null },\n { type: 'text', tag: '', content: emphasisContent.replace(/\\*/g, ''), markup: '', info: '', meta: null },\n { type: 'em_close', tag: 'em', content: '', markup: '*', info: '', meta: null },\n ], 0)\n resetCurrentTextNode()\n pushNode(node)\n i++\n return true\n }\n\n return false\n }\n\n function handleInlineCodeContent(content: string, _token: MarkdownToken): boolean {\n if (!/`[^`]*/.test(content))\n return false\n\n // Close any current text node and handle inline code\n resetCurrentTextNode()\n const code_start = content.indexOf('`')\n const code_end = content.indexOf('`', code_start + 1)\n const _text = content.slice(0, code_start)\n const codeContent = code_end === -1 ? content.slice(code_start) : content.slice(code_start, code_end)\n const after = code_end === -1 ? '' : content.slice(code_end + 1)\n if (_text) {\n // Try to re-run emphasis/strong parsing on the fragment before the code span\n // but avoid mutating the outer token index `i` (handlers sometimes increment it).\n const handled = handleEmphasisAndStrikethrough(_text, _token)\n // restore index so we don't skip tokens in the outer loop\n if (!handled) {\n pushText(_text, _text)\n }\n else {\n i--\n }\n }\n\n const code = codeContent.replace(/`/g, '')\n pushParsed({\n type: 'inline_code',\n code,\n raw: String(code ?? ''),\n } as ParsedNode)\n\n // afterCode 可能也存在很多情况包括多个 code,我们递归处理 --- IGNORE ---\n if (after) {\n handleToken({\n type: 'text',\n content: after,\n raw: String(after ?? ''),\n })\n i--\n }\n else if (code_end === -1) {\n // 要把下一个 token 也合并进来,把类型变成 text\n const nextToken = tokens[i + 1]\n if (nextToken) {\n let fixedAfter = after\n for (let j = i + 1; j < tokens.length; j++) {\n fixedAfter += String(((tokens[j].content ?? '') + (tokens[j].markup ?? '')))\n }\n i = tokens.length - 1\n handleToken({\n type: 'text',\n content: fixedAfter,\n raw: String(fixedAfter ?? ''),\n })\n }\n }\n i++\n return true\n }\n\n function pushParsed(node: ParsedNode) {\n // ensure any ongoing text node is closed when pushing non-text nodes\n resetCurrentTextNode()\n result.push(node)\n }\n\n function pushToken(token: MarkdownToken) {\n // push a raw token into result as a ParsedNode (best effort cast)\n resetCurrentTextNode()\n result.push(token as ParsedNode)\n }\n\n // backward-compatible alias used by existing call sites that pass parsed nodes\n function pushNode(node: ParsedNode) {\n pushParsed(node)\n }\n\n function pushText(content: string, raw?: string) {\n if (currentTextNode) {\n currentTextNode.content += content\n currentTextNode.raw += raw ?? content\n }\n else {\n currentTextNode = {\n type: 'text',\n content: String(content ?? ''),\n raw: String(raw ?? content ?? ''),\n } as TextNode\n result.push(currentTextNode)\n }\n }\n\n while (i < tokens.length) {\n const token = tokens[i] as MarkdownToken\n handleToken(token)\n }\n\n function handleToken(token: MarkdownToken) {\n switch (token.type) {\n case 'text': {\n handleTextToken(token)\n break\n }\n\n case 'softbreak':\n if (currentTextNode) {\n // Append newline to the current text node\n currentTextNode.content += '\\n'\n currentTextNode.raw += '\\n' // Assuming raw should also reflect the newline\n }\n // Don't create a node for softbreak itself, just modify text\n i++\n break\n\n case 'code_inline':\n pushNode(parseInlineCodeToken(token))\n i++\n break\n case 'html_inline': {\n const [node, index] = parseHtmlInlineCodeToken(token, tokens, i)\n pushNode(node)\n i = index\n break\n }\n\n case 'link_open': {\n handleLinkOpen(token)\n break\n }\n\n case 'image':\n resetCurrentTextNode()\n pushNode(parseImageToken(token))\n i++\n break\n\n case 'strong_open': {\n resetCurrentTextNode()\n const { node, nextIndex } = parseStrongToken(tokens, i, token.content)\n pushNode(node)\n i = nextIndex\n break\n }\n\n case 'em_open': {\n resetCurrentTextNode()\n const { node, nextIndex } = parseEmphasisToken(tokens, i)\n pushNode(node)\n i = nextIndex\n break\n }\n\n case 's_open': {\n resetCurrentTextNode()\n const { node, nextIndex } = parseStrikethroughToken(tokens, i)\n pushNode(node)\n i = nextIndex\n break\n }\n\n case 'mark_open': {\n resetCurrentTextNode()\n const { node, nextIndex } = parseHighlightToken(tokens, i)\n pushNode(node)\n i = nextIndex\n break\n }\n\n case 'ins_open': {\n resetCurrentTextNode()\n const { node, nextIndex } = parseInsertToken(tokens, i)\n pushNode(node)\n i = nextIndex\n break\n }\n\n case 'sub_open': {\n resetCurrentTextNode()\n const { node, nextIndex } = parseSubscriptToken(tokens, i)\n pushNode(node)\n i = nextIndex\n break\n }\n\n case 'sup_open': {\n resetCurrentTextNode()\n const { node, nextIndex } = parseSuperscriptToken(tokens, i)\n pushNode(node)\n i = nextIndex\n break\n }\n\n case 'sub':\n resetCurrentTextNode()\n pushNode({\n type: 'subscript',\n children: [\n {\n type: 'text',\n content: String(token.content ?? ''),\n raw: String(token.content ?? ''),\n },\n ],\n raw: `~${String(token.content ?? '')}~`,\n })\n i++\n break\n\n case 'sup':\n resetCurrentTextNode()\n pushNode({\n type: 'superscript',\n children: [\n {\n type: 'text',\n content: String(token.content ?? ''),\n raw: String(token.content ?? ''),\n },\n ],\n raw: `^${String(token.content ?? '')}^`,\n })\n i++\n break\n\n case 'emoji': {\n resetCurrentTextNode()\n const preToken = tokens[i - 1]\n if (preToken?.type === 'text' && /\\|:-+/.test(String(preToken.content ?? ''))) {\n // 处理表格中的 emoji,跳过\n pushText('', '')\n }\n else {\n pushNode(parseEmojiToken(token))\n }\n i++\n break\n }\n case 'checkbox':\n resetCurrentTextNode()\n pushNode(parseCheckboxToken(token))\n i++\n break\n case 'checkbox_input':\n resetCurrentTextNode()\n pushNode(parseCheckboxInputToken(token))\n i++\n break\n case 'footnote_ref':\n resetCurrentTextNode()\n pushNode(parseFootnoteRefToken(token))\n i++\n break\n\n case 'hardbreak':\n resetCurrentTextNode()\n pushNode(parseHardbreakToken())\n i++\n break\n\n case 'fence': {\n resetCurrentTextNode()\n // Handle fenced code blocks with language specifications\n pushNode(parseFenceToken(tokens[i]))\n i++\n break\n }\n\n case 'math_inline': {\n resetCurrentTextNode()\n pushNode(parseMathInlineToken(token))\n i++\n break\n }\n\n case 'reference': {\n handleReference(token)\n break\n }\n\n default:\n // Skip unknown token types, ensure text merging stops\n pushToken(token)\n i++\n break\n }\n }\n\n function handleTextToken(token: MarkdownToken) {\n // 合并连续的 text 节点\n let index = result.length - 1\n let content = String(token.content ?? '').replace(/\\\\/g, '')\n if (content.startsWith(')') && result[result.length - 1]?.type === 'link') {\n content = content.slice(1)\n }\n\n if (content.endsWith('undefined') && !raw?.endsWith('undefined')) {\n content = content.slice(0, -9)\n }\n for (index; index >= 0; index--) {\n const item = result[index]\n if (item.type === 'text') {\n currentTextNode = null\n content = item.content + content\n continue\n }\n break\n }\n\n if (index < result.length - 1)\n result.splice(index + 1)\n\n const nextToken = tokens[i + 1]\n if (pPreToken?.type === 'list_item_open' && /^\\d$/.test(content)) {\n i++\n return\n }\n if (content === '`' || content === '|' || content === '$' || /^\\*+$/.test(content)) {\n i++\n return\n }\n if (!nextToken && /[^\\]]\\s*\\(\\s*$/.test(content)) {\n content = content.replace(/\\(\\s*$/, '')\n }\n if (handleCheckboxLike(content))\n return\n const preToken = tokens[i - 1]\n if ((content === '[' && !nextToken?.markup?.includes('*')) || (content === ']' && !preToken.markup?.includes('*'))) {\n i++\n return\n }\n if (handleInlineCodeContent(content, token))\n return\n if (handleEmphasisAndStrikethrough(content, token))\n return\n if (handleInlineImageContent(content, token))\n return\n\n const textNode = parseTextToken({ ...token, content })\n\n if (handleInlineLinkContent(content, token))\n return\n if (currentTextNode) {\n // Merge with the previous text node\n currentTextNode.content += textNode.content.replace(/(\\*+|\\(|\\\\)$/, '')\n currentTextNode.raw += textNode.raw\n }\n else {\n const maybeMath = preToken?.tag === 'br' && tokens[i - 2]?.content === '['\n // Start a new text node\n const nextToken = tokens[i + 1]\n if (!nextToken)\n textNode.content = textNode.content.replace(/(\\*+|\\(|\\\\)$/, '')\n\n currentTextNode = textNode\n currentTextNode.center = maybeMath\n result.push(currentTextNode)\n }\n i++\n }\n\n function handleLinkOpen(token: MarkdownToken) {\n // mirror logic previously in the switch-case for 'link_open'\n resetCurrentTextNode()\n const href = token.attrs?.find(([name]) => name === 'href')?.[1]\n // 如果 text 不在[]里说明,它不是一个link, 当 text 处理\n\n if (raw && tokens[i + 1].type === 'text') {\n const text = String(tokens[i + 1]?.content ?? '')\n const escText = escapeRegExp(text)\n const reg = new RegExp(`\\\\[${escText}\\\\s*\\\\]`)\n if (!reg.test(raw)) {\n // If this link_open comes from an autolinkified URL (e.g. http://...)\n // treat it as a real link node rather than plain text. Otherwise\n // fall back to pushing plain text.\n const hrefAttr = token.attrs?.find(([name]) => name === 'href')?.[1] ?? ''\n // Only treat as autolink when the original raw source does not contain\n // any square-bracket link text (i.e. it was not written as [text](...)).\n const isAutolink = (!raw.includes('[')) && isLikelyUrl(String(hrefAttr))\n if (isAutolink) {\n resetCurrentTextNode()\n const node = {\n type: 'link',\n href: String(hrefAttr),\n title: null,\n text,\n children: [\n { type: 'text', content: text, raw: text },\n ],\n loading: false,\n } as ParsedNode\n pushParsed(node)\n i += 3\n return\n }\n\n pushText(text, text)\n i += 3\n return\n }\n }\n if (raw && href) {\n const loadingMath = new RegExp(`\\\\(\\\\s*${escapeRegExp(href)}\\\\s*\\\\)`)\n const pre = result.length > 0 ? result[result.length - 1] : undefined as ParsedNode | undefined\n const loading = !loadingMath.test(raw)\n if (loading && pre) {\n let preText = ''\n if (pre) {\n if (pre.type === 'link')\n preText = String((pre as LinkNode).text ?? '')\n else if (pre.type === 'text')\n preText = String((pre as TextNode).content ?? '')\n else if (((pre as { content?: unknown }).content) && typeof (pre as { content?: unknown }).content === 'string')\n preText = String((pre as { content?: string }).content ?? '').slice(1, -1)\n }\n const isLinkMatch = new RegExp(`\\\\[${escapeRegExp(preText)}\\\\s*\\\\]\\\\(`)\n if (isLinkMatch.test(raw)) {\n const text = String(preText ?? '')\n resetCurrentTextNode()\n const node = {\n type: 'link',\n href: '',\n title: null,\n text,\n children: [\n { type: 'text', content: text, raw: text },\n ],\n loading,\n } as ParsedNode\n result.splice(result.length - 1, 1, node) // remove the pre node\n i += 3\n if (String(tokens[i]?.content ?? '') === '.')\n i++\n return\n }\n }\n }\n const { node, nextIndex } = parseLinkToken(tokens, i)\n i = nextIndex\n // Determine loading state conservatively: if the link token parser\n // marked it as loading already, keep it; otherwise compute from raw\n // and href as a fallback so unclosed links remain marked as loading.\n const hrefAttr = token.attrs?.find(([name]) => name === 'href')?.[1]\n const hrefStr = String(hrefAttr ?? '')\n // Only override the link parser's default loading state when we\n // actually have an href to check against the raw source. If the\n // tokenizer emitted a link_open without an href (partial tokenizers\n // may do this), prefer the parseLinkToken's initial loading value\n // (which defaults to true for mid-state links).\n if (raw && hrefStr) {\n // More robust: locate the first \"](\" after the link text and see if\n // there's a matching ')' that closes the href. This avoids false\n // positives when other parentheses appear elsewhere in the source.\n const openIdx = raw.indexOf('](')\n if (openIdx === -1) {\n // No explicit link start found in raw — be conservative and keep\n // the parser's default loading value.\n }\n else {\n const closeIdx = raw.indexOf(')', openIdx + 2)\n if (closeIdx === -1) {\n node.loading = true\n }\n else {\n // Check that the href inside the parens corresponds to this token\n const inside = raw.slice(openIdx + 2, closeIdx)\n if (inside.includes(hrefStr))\n node.loading = false\n else\n node.loading = true\n }\n }\n }\n pushParsed(node)\n }\n\n function handleReference(token: MarkdownToken) {\n // mirror previous in-switch 'reference' handling\n resetCurrentTextNode()\n const nextToken = tokens[i + 1]\n const preToken = tokens[i - 1]\n const preResult = result[result.length - 1]\n\n const nextIsTextNotStartingParens = nextToken?.type === 'text' && !((String(nextToken.content ?? '')).startsWith('('))\n const preIsTextEndingBracketOrOnlySpace = preToken?.type === 'text' && /\\]$|^\\s*$/.test(String(preToken.content ?? ''))\n\n if (nextIsTextNotStartingParens || preIsTextEndingBracketOrOnlySpace) {\n pushNode(parseReferenceToken(token))\n }\n else if (nextToken && nextToken.type === 'text') {\n nextToken.content = String(token.markup ?? '') + String(nextToken.content ?? '')\n }\n else if (preResult && preResult.type === 'text') {\n preResult.content = String(preResult.content ?? '') + String(token.markup ?? '')\n preResult.raw = String(preResult.raw ?? '') + String(token.markup ?? '')\n }\n i++\n }\n\n function handleInlineLinkContent(content: string, _token: MarkdownToken): boolean {\n const linkStart = content.indexOf('[')\n if (linkStart === -1)\n return false\n\n let textNodeContent = content.slice(0, linkStart)\n const linkEnd = content.indexOf('](', linkStart)\n if (linkEnd !== -1) {\n const textToken = tokens[i + 2]\n let text = content.slice(linkStart + 1, linkEnd)\n if (text.includes('[')) {\n const secondLinkStart = text.indexOf('[')\n // adjust original linkStart and text\n textNodeContent += content.slice(0, linkStart + secondLinkStart + 1)\n const newLinkStart = linkStart + secondLinkStart + 1\n text = content.slice(newLinkStart + 1, linkEnd)\n }\n const nextToken = tokens[i + 1]\n if (content.endsWith('](') && nextToken?.type === 'link_open' && textToken) {\n const last = tokens[i + 4]\n let index = 4\n let loading = true\n if (last?.type === 'text' && last.content === ')') {\n index++\n loading = false\n }\n else if (last?.type === 'text' && last.content === '.') {\n i++\n }\n\n if (textNodeContent) {\n pushText(textNodeContent, textNodeContent)\n }\n pushParsed({\n type: 'link',\n href: String(textToken.content ?? ''),\n title: null,\n text,\n children: [{ type: 'text', content: text, raw: text }],\n loading,\n } as ParsedNode)\n i += index\n return true\n }\n\n const linkContentEnd = content.indexOf(')', linkEnd)\n const href = linkContentEnd !== -1 ? content.slice(linkEnd + 2, linkContentEnd) : ''\n const loading = linkContentEnd === -1\n\n if (textNodeContent) {\n pushText(textNodeContent, textNodeContent)\n }\n pushParsed({\n type: 'link',\n href,\n title: null,\n text,\n children: [{ type: 'text', content: text, raw: text }],\n loading,\n } as ParsedNode)\n\n const afterText = linkContentEnd !== -1 ? content.slice(linkContentEnd + 1) : ''\n if (afterText) {\n handleToken({ type: 'text', content: afterText, raw: afterText } as unknown as MarkdownToken)\n i--\n }\n i++\n return true\n }\n\n return false\n }\n\n function handleInlineImageContent(content: string, token: MarkdownToken): boolean {\n const imageStart = content.indexOf('![')\n if (imageStart === -1)\n return false\n\n const textNodeContent = content.slice(0, imageStart)\n if (!currentTextNode) {\n currentTextNode = {\n type: 'text',\n content: textNodeContent,\n raw: textNodeContent,\n }\n }\n else {\n currentTextNode.content += textNodeContent\n }\n result.push(currentTextNode)\n currentTextNode = null // Reset current text node\n pushParsed(parseImageToken(token, true) as ParsedNode)\n i++\n return true\n }\n\n function handleCheckboxLike(content: string): boolean {\n // Detect checkbox-like syntax at the start of a list item e.g. [x] or [ ]\n if (!(content?.startsWith('[') && pPreToken?.type === 'list_item_open'))\n return false\n\n const _content = content.slice(1)\n const w = _content.match(/[^\\s\\]]/)\n if (w === null) {\n i++\n return true\n }\n // If the first non-space/']' char is x/X treat as a checkbox input\n if (w && /x/i.test(w[0])) {\n const checked = w[0] === 'x' || w[0] === 'X'\n pushParsed({\n type: 'checkbox_input',\n checked,\n raw: checked ? '[x]' : '[ ]',\n } as ParsedNode)\n i++\n return true\n }\n\n return false\n }\n\n return result\n}\n","import type { BlockquoteNode, MarkdownToken, ParsedNode } from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\nimport { parseList } from './list-parser'\n\nexport function parseBlockquote(\n tokens: MarkdownToken[],\n index: number,\n): [BlockquoteNode, number] {\n const blockquoteChildren: ParsedNode[] = []\n let j = index + 1\n\n // Process blockquote content until closing tag is found\n while (j < tokens.length && tokens[j].type !== 'blockquote_close') {\n if (tokens[j].type === 'paragraph_open') {\n const contentToken = tokens[j + 1]\n blockquoteChildren.push({\n type: 'paragraph',\n children: parseInlineTokens(contentToken.children || []),\n raw: String(contentToken.content ?? ''),\n })\n j += 3 // Skip paragraph_open, inline, paragraph_close\n }\n else if (\n tokens[j].type === 'bullet_list_open'\n || tokens[j].type === 'ordered_list_open'\n ) {\n // Handle nested lists - use parseList directly for proper nested list support\n const [listNode, newIndex] = parseList(tokens, j)\n blockquoteChildren.push(listNode)\n j = newIndex\n }\n else {\n j++\n }\n }\n\n const blockquoteNode: BlockquoteNode = {\n type: 'blockquote',\n children: blockquoteChildren,\n raw: blockquoteChildren.map(child => child.raw).join('\\n'),\n }\n\n return [blockquoteNode, j + 1] // Skip blockquote_close\n}\n","import type { CodeBlockNode, MarkdownToken } from '../../types'\nimport { parseFenceToken } from '../inline-parsers/fence-parser'\n\nexport function parseCodeBlock(token: MarkdownToken): CodeBlockNode {\n // If this code block is actually a diff (some markdown-it backends\n // classify fences vs code_block differently), delegate to the\n // fence parser to preserve original/updated fields.\n if (token.info?.startsWith('diff')) {\n return parseFenceToken(token)\n }\n\n const contentStr = String(token.content ?? '')\n const match = contentStr.match(/ type=\"application\\/vnd\\.ant\\.([^\"]+)\"/)\n if (match?.[1]) {\n // 需要把 <antArtifact> 标签去掉\n // mutate token.content safely by assigning the cleaned string\n token.content = contentStr\n .replace(/<antArtifact[^>]*>/g, '')\n .replace(/<\\/antArtifact>/g, '')\n }\n const hasMap = Array.isArray(token.map) && token.map.length === 2\n return {\n type: 'code_block',\n language: match ? match[1] : String(token.info ?? ''),\n code: String(token.content ?? ''),\n raw: String(token.content ?? ''),\n loading: !hasMap,\n }\n}\n","import type {\n DefinitionItemNode,\n DefinitionListNode,\n MarkdownToken,\n ParsedNode,\n} from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\n\nexport function parseDefinitionList(\n tokens: MarkdownToken[],\n index: number,\n): [DefinitionListNode, number] {\n const items: DefinitionItemNode[] = []\n let j = index + 1\n let termNodes: ParsedNode[] = []\n let definitionNodes: ParsedNode[] = []\n\n while (j < tokens.length && tokens[j].type !== 'dl_close') {\n if (tokens[j].type === 'dt_open') {\n // Process term\n const termToken = tokens[j + 1]\n termNodes = parseInlineTokens(termToken.children || [])\n j += 3 // Skip dt_open, inline, dt_close\n }\n else if (tokens[j].type === 'dd_open') {\n // Process definition\n let k = j + 1\n definitionNodes = []\n\n while (k < tokens.length && tokens[k].type !== 'dd_close') {\n if (tokens[k].type === 'paragraph_open') {\n const contentToken = tokens[k + 1]\n definitionNodes.push({\n type: 'paragraph',\n children: parseInlineTokens(contentToken.children || [], String(contentToken.content ?? '')),\n raw: String(contentToken.content ?? ''),\n })\n k += 3 // Skip paragraph_open, inline, paragraph_close\n }\n else {\n k++\n }\n }\n\n // Add definition item\n if (termNodes.length > 0) {\n items.push({\n type: 'definition_item',\n term: termNodes,\n definition: definitionNodes,\n raw: `${termNodes.map(term => term.raw).join('')}: ${definitionNodes\n .map(def => def.raw)\n .join('\\n')}`,\n })\n\n // Reset term nodes\n termNodes = []\n }\n\n j = k + 1 // Skip dd_close\n }\n else {\n j++\n }\n }\n\n const definitionListNode: DefinitionListNode = {\n type: 'definition_list',\n items,\n raw: items.map(item => item.raw).join('\\n'),\n }\n\n return [definitionListNode, j + 1] // Skip dl_close\n}\n","import type { FootnoteNode, MarkdownToken, ParsedNode } from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\n\nexport function parseFootnote(\n tokens: MarkdownToken[],\n index: number,\n): [FootnoteNode, number] {\n const token = tokens[index]\n const meta = (token.meta ?? {}) as unknown as { label?: number | string }\n const id = String(meta?.label ?? '0')\n const footnoteChildren: ParsedNode[] = []\n let j = index + 1\n\n while (j < tokens.length && tokens[j].type !== 'footnote_close') {\n if (tokens[j].type === 'paragraph_open') {\n const contentToken = tokens[j + 1]\n footnoteChildren.push({\n type: 'paragraph',\n children: parseInlineTokens(contentToken.children || []),\n raw: String(contentToken.content ?? ''),\n })\n j += 3 // Skip paragraph_open, inline, paragraph_close\n }\n else {\n j++\n }\n }\n\n const footnoteNode: FootnoteNode = {\n type: 'footnote',\n id,\n children: footnoteChildren,\n raw: `[^${id}]: ${footnoteChildren.map(child => child.raw).join('\\n')}`,\n }\n\n return [footnoteNode, j + 1] // Skip footnote_close\n}\n","import type { HeadingNode, MarkdownToken } from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\n\nexport function parseHeading(\n tokens: MarkdownToken[],\n index: number,\n): HeadingNode {\n const token = tokens[index]\n const levelStr = String(token.tag?.substring(1) ?? '1')\n const headingLevel = Number.parseInt(levelStr, 10)\n const headingContentToken = tokens[index + 1]\n const headingContent = String(headingContentToken.content ?? '')\n\n return {\n type: 'heading',\n level: headingLevel,\n text: headingContent,\n children: parseInlineTokens(headingContentToken.children || []),\n raw: headingContent,\n }\n}\n","import type { MarkdownToken, MathBlockNode } from '../../types'\n\n// Parse a math_block token (block/display math expressions)\nexport function parseMathBlock(token: MarkdownToken): MathBlockNode {\n return {\n type: 'math_block',\n content: String(token.content ?? ''),\n loading: !!token.loading,\n raw: String(token.raw ?? ''),\n }\n}\n","import type {\n MarkdownToken,\n TableCellNode,\n TableNode,\n TableRowNode,\n} from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\n\nexport function parseTable(\n tokens: MarkdownToken[],\n index: number,\n): [TableNode, number] {\n let j = index + 1\n let headerRow: TableRowNode | null = null\n const rows: TableRowNode[] = []\n let isHeader = false\n\n while (j < tokens.length && tokens[j].type !== 'table_close') {\n if (tokens[j].type === 'thead_open') {\n isHeader = true\n j++\n }\n else if (tokens[j].type === 'thead_close') {\n isHeader = false\n j++\n }\n else if (\n tokens[j].type === 'tbody_open'\n || tokens[j].type === 'tbody_close'\n ) {\n j++\n }\n else if (tokens[j].type === 'tr_open') {\n const cells: TableCellNode[] = []\n let k = j + 1\n\n while (k < tokens.length && tokens[k].type !== 'tr_close') {\n if (tokens[k].type === 'th_open' || tokens[k].type === 'td_open') {\n const isHeaderCell = tokens[k].type === 'th_open'\n const contentToken = tokens[k + 1]\n const content = String(contentToken.content ?? '')\n\n cells.push({\n type: 'table_cell',\n header: isHeaderCell || isHeader,\n children: parseInlineTokens(contentToken.children || [], content),\n raw: content,\n })\n\n k += 3 // Skip th_open/td_open, inline, th_close/td_close\n }\n else {\n k++\n }\n }\n\n const rowNode: TableRowNode = {\n type: 'table_row',\n cells,\n raw: cells.map(cell => cell.raw).join('|'),\n }\n\n if (isHeader) {\n headerRow = rowNode\n }\n else {\n rows.push(rowNode)\n }\n\n j = k + 1 // Skip tr_close\n }\n else {\n j++\n }\n }\n\n if (!headerRow) {\n // Default empty header if none found\n headerRow = {\n type: 'table_row',\n cells: [],\n raw: '',\n }\n }\n\n const tableNode: TableNode = {\n type: 'table',\n header: headerRow,\n rows,\n loading: tokens[index].loading ?? false,\n raw: [headerRow, ...rows].map(row => row.raw).join('\\n'),\n }\n\n return [tableNode, j + 1] // Skip table_close\n}\n","import type { ThematicBreakNode } from '../../types'\n\nexport function parseThematicBreak(): ThematicBreakNode {\n return {\n type: 'thematic_break',\n raw: '---',\n }\n}\n","import type {\n ListItemNode,\n ListNode,\n MarkdownToken,\n ParsedNode,\n} from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\nimport { parseFenceToken } from '../inline-parsers/fence-parser'\nimport { parseAdmonition } from './admonition-parser'\nimport { parseBlockquote } from './blockquote-parser'\nimport { parseCodeBlock } from './code-block-parser'\nimport { parseDefinitionList } from './definition-list-parser'\nimport { parseFootnote } from './footnote-parser'\nimport { parseHeading } from './heading-parser'\nimport { parseMathBlock } from './math-block-parser'\nimport { parseTable } from './table-parser'\nimport { parseThematicBreak } from './thematic-break-parser'\n\nexport function parseList(\n tokens: MarkdownToken[],\n index: number,\n): [ListNode, number] {\n const token = tokens[index]\n const listItems: ListItemNode[] = []\n let j = index + 1\n\n while (\n j < tokens.length\n && tokens[j].type !== 'bullet_list_close'\n && tokens[j].type !== 'ordered_list_close'\n ) {\n if (tokens[j].type === 'list_item_open') {\n // if (tokens[j].markup === '*') {\n // j++\n // continue\n // }\n const itemChildren: ParsedNode[] = []\n let k = j + 1\n while (k < tokens.length && tokens[k].type !== 'list_item_close') {\n // Handle different block types inside list items\n if (tokens[k].type === 'paragraph_open') {\n const contentToken = tokens[k + 1]\n const preToken = tokens[k - 1]\n const contentStr = String(contentToken.content ?? '')\n if (/\\n\\d+$/.test(contentStr)) {\n contentToken.content = contentStr.replace(/\\n\\d+$/, '')\n contentToken.children?.splice(-1, 1)\n }\n itemChildren.push({\n type: 'paragraph',\n children: parseInlineTokens(contentToken.children || [], String(contentToken.content ?? ''), preToken),\n raw: String(contentToken.content ?? ''),\n })\n k += 3 // Skip paragraph_open, inline, paragraph_close\n }\n else if (tokens[k].type === 'blockquote_open') {\n // Parse blockquote within list item\n const [blockquoteNode, newIndex] = parseBlockquote(tokens, k)\n itemChildren.push(blockquoteNode)\n k = newIndex\n }\n else if (\n tokens[k].type === 'bullet_list_open'\n || tokens[k].type === 'ordered_list_open'\n ) {\n if (tokens[k].markup === '*') {\n k++\n continue\n }\n // Parse nested list\n const [nestedListNode, newIndex] = parseNestedList(tokens, k)\n itemChildren.push(nestedListNode)\n k = newIndex\n }\n else if (tokens[k].type === 'code_block') {\n // Parse code block\n itemChildren.push(parseCodeBlock(tokens[k]))\n k += 1\n }\n else if (tokens[k].type === 'fence') {\n // Parse fenced code block\n itemChildren.push(parseFenceToken(tokens[k]))\n k += 1\n }\n else if (tokens[k].type === 'math_block') {\n // Parse math block\n itemChildren.push(parseMathBlock(tokens[k]))\n k += 1\n }\n else if (tokens[k].type === 'table_open') {\n // Parse table\n const [tableNode, newIndex] = parseTable(tokens, k)\n itemChildren.push(tableNode)\n k = newIndex\n }\n else if (tokens[k].type === 'dl_open') {\n // Parse definition list\n const [defListNode, newIndex] = parseDefinitionList(tokens, k)\n itemChildren.push(defListNode)\n k = newIndex\n }\n else if (tokens[k].type === 'footnote_open') {\n // Parse footnote\n const [footnoteNode, newIndex] = parseFootnote(tokens, k)\n itemChildren.push(footnoteNode)\n k = newIndex\n }\n else if (tokens[k].type === 'heading_open') {\n // Parse heading (though headings in lists are unusual)\n const headingNode = parseHeading(tokens, k)\n itemChildren.push(headingNode)\n k += 3 // Skip heading_open, inline, heading_close\n }\n else if (tokens[k].type === 'hr') {\n // Parse thematic break\n itemChildren.push(parseThematicBreak())\n k += 1\n }\n else if (tokens[k].type === 'container_open') {\n // Handle admonition containers (warning, info, note, tip, danger, caution)\n const match\n = /^::: ?(warning|info|note|tip|danger|caution) ?(.*)$/.exec(\n String(tokens[k].info ?? ''),\n )\n if (match) {\n const [admonitionNode, newIndex] = parseAdmonition(tokens, k, match)\n itemChildren.push(admonitionNode)\n k = newIndex\n }\n else {\n k += 1 // Skip unknown container types\n }\n }\n else {\n k += 1\n }\n }\n\n listItems.push({\n type: 'list_item',\n children: itemChildren,\n raw: itemChildren.map(child => child.raw).join(''),\n })\n\n j = k + 1 // Move past list_item_close\n }\n else {\n j += 1\n }\n }\n\n const listNode: ListNode = {\n type: 'list',\n ordered: token.type === 'ordered_list_open',\n // markdown-it may include attrs like [['start','2']] on ordered_list_open\n start: (() => {\n if (token.attrs && token.attrs.length) {\n const found = token.attrs.find(a => a[0] === 'start')\n if (found) {\n const parsed = Number(found[1])\n return Number.isFinite(parsed) && parsed !== 0 ? parsed : 1\n }\n }\n return undefined\n })(),\n items: listItems,\n raw: listItems.map(item => item.raw).join('\\n'),\n }\n\n return [listNode, j + 1] // Move past list_close\n}\n\n// Enhanced function to handle nested lists properly\nfunction parseNestedList(\n tokens: MarkdownToken[],\n index: number,\n): [ListNode, number] {\n // We can directly use parseList since we're in the same file\n // This avoids circular dependency issues\n const nestedToken = tokens[index]\n const nestedItems: ListItemNode[] = []\n let j = index + 1\n\n while (\n j < tokens.length\n && tokens[j].type !== 'bullet_list_close'\n && tokens[j].type !== 'ordered_list_close'\n ) {\n if (tokens[j].type === 'list_item_open') {\n const itemChildren: ParsedNode[] = []\n let k = j + 1\n\n while (k < tokens.length && tokens[k].type !== 'list_item_close') {\n // Handle different block types inside list items\n if (tokens[k].type === 'paragraph_open') {\n const contentToken = tokens[k + 1]\n const preToken = tokens[k - 1]\n itemChildren.push({\n type: 'paragraph',\n children: parseInlineTokens(contentToken.children || [], String(contentToken.content ?? ''), preToken),\n raw: String(contentToken.content ?? ''),\n })\n k += 3 // Skip paragraph_open, inline, paragraph_close\n }\n else if (\n tokens[k].type === 'bullet_list_open'\n || tokens[k].type === 'ordered_list_open'\n ) {\n if (tokens[k].markup === '*') {\n k++\n continue\n }\n\n // Handle deeper nested lists\n const [deeperNestedListNode, newIndex] = parseNestedList(tokens, k)\n itemChildren.push(deeperNestedListNode)\n k = newIndex\n }\n else if (tokens[k].type === 'code_block') {\n itemChildren.push(parseCodeBlock(tokens[k]))\n k += 1\n }\n else if (tokens[k].type === 'fence') {\n itemChildren.push(parseFenceToken(tokens[k]))\n k += 1\n }\n else if (tokens[k].type === 'math_block') {\n // Parse math block in nested lists\n itemChildren.push(parseMathBlock(tokens[k]))\n k += 1\n }\n else {\n // Skip other token types in nested lists for simplicity\n k += 1\n }\n }\n\n nestedItems.push({\n type: 'list_item',\n children: itemChildren,\n raw: itemChildren.map(child => child.raw).join(''),\n })\n\n j = k + 1 // Move past list_item_close\n }\n else {\n j += 1\n }\n }\n\n const nestedListNode: ListNode = {\n type: 'list',\n ordered: nestedToken.type === 'ordered_list_open',\n start: (() => {\n if (nestedToken.attrs && nestedToken.attrs.length) {\n const found = nestedToken.attrs.find(a => a[0] === 'start')\n if (found) {\n const parsed = Number(found[1])\n return Number.isFinite(parsed) && parsed !== 0 ? parsed : 1\n }\n }\n return undefined\n })(),\n items: nestedItems,\n raw: nestedItems.map(item => item.raw).join('\\n'),\n }\n\n return [nestedListNode, j + 1] // Move past list_close\n}\n","import type { AdmonitionNode, MarkdownToken, ParsedNode } from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\nimport { parseList } from './list-parser'\n\nexport function parseAdmonition(\n tokens: MarkdownToken[],\n index: number,\n match: RegExpExecArray,\n): [AdmonitionNode, number] {\n const kind = String(match[1] ?? 'note')\n const title = String(match[2] ?? (kind.charAt(0).toUpperCase() + kind.slice(1)))\n const admonitionChildren: ParsedNode[] = []\n let j = index + 1\n\n while (j < tokens.length && tokens[j].type !== 'container_close') {\n if (tokens[j].type === 'paragraph_open') {\n const contentToken = tokens[j + 1]\n if (contentToken) {\n admonitionChildren.push({\n type: 'paragraph',\n children: parseInlineTokens(contentToken.children || []),\n raw: String(contentToken.content ?? ''),\n })\n }\n j += 3 // Skip paragraph_open, inline, paragraph_close\n }\n else if (\n tokens[j].type === 'bullet_list_open'\n || tokens[j].type === 'ordered_list_open'\n ) {\n // Handle nested lists - use parseList directly for proper nested list support\n const [listNode, newIndex] = parseList(tokens, j)\n admonitionChildren.push(listNode)\n j = newIndex\n }\n else {\n j++\n }\n }\n\n const admonitionNode: AdmonitionNode = {\n type: 'admonition',\n kind,\n title,\n children: admonitionChildren,\n raw: `:::${kind} ${title}\\n${admonitionChildren\n .map(child => child.raw)\n .join('\\n')}\\n:::`,\n }\n\n return [admonitionNode, j + 1] // Skip container_close\n}\n","import type { AdmonitionNode, MarkdownToken, ParsedNode, TextNode } from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\nimport { parseList } from './list-parser'\n\nexport function parseContainer(\n tokens: MarkdownToken[],\n index: number,\n): [AdmonitionNode, number] {\n const openToken = tokens[index]\n\n // Determine kind and optional title\n let kind = 'note'\n let title = ''\n\n const typeMatch = openToken.type.match(/^container_(\\w+)_open$/)\n if (typeMatch) {\n kind = typeMatch[1]\n // some implementations set info to remaining title text\n const info = String(openToken.info ?? '').trim()\n if (info && !info.startsWith(':::')) {\n // if info looks like 'warning title', drop leading kind token\n const maybe = info.replace(new RegExp(`^${kind}`), '').trim()\n if (maybe)\n title = maybe\n }\n }\n else {\n // container_open: info usually contains the marker like ' warning Title'\n const info = String(openToken.info ?? '').trim()\n\n const match\n // eslint-disable-next-line regexp/no-super-linear-backtracking\n = /^:{1,3}\\s*(warning|info|note|tip|danger|caution)\\s*(.*)$/i.exec(info)\n if (match) {\n kind = match[1]\n title = String(match[2] ?? '')\n }\n }\n\n if (!title)\n title = kind.charAt(0).toUpperCase() + kind.slice(1)\n\n const children: ParsedNode[] = []\n let j = index + 1\n\n // Accept closing tokens: 'container_close' or 'container_<kind>_close'\n const closeType = new RegExp(`^container_${kind}_close$`)\n\n while (\n j < tokens.length\n && tokens[j].type !== 'container_close'\n && !closeType.test(tokens[j].type)\n ) {\n if (tokens[j].type === 'paragraph_open') {\n const contentToken = tokens[j + 1]\n if (contentToken) {\n const childrenArr = (contentToken.children as MarkdownToken[]) || []\n let i = -1\n for (let k = childrenArr.length - 1; k >= 0; k--) {\n const t = childrenArr[k] as TextNode\n if (t.type === 'text' && /:+/.test(t.content)) {\n i = k\n break\n }\n }\n const _children = i !== -1 ? childrenArr.slice(0, i) : childrenArr\n children.push({\n type: 'paragraph',\n children: parseInlineTokens(_children || []),\n raw: String(contentToken.content ?? '').replace(/\\n:+$/, '').replace(/\\n\\s*:::\\s*$/, ''),\n })\n }\n j += 3\n }\n else if (\n tokens[j].type === 'bullet_list_open'\n || tokens[j].type === 'ordered_list_open'\n ) {\n const [listNode, newIndex] = parseList(tokens, j)\n children.push(listNode)\n j = newIndex\n }\n else {\n j++\n }\n }\n\n const admonitionNode: AdmonitionNode = {\n type: 'admonition',\n kind,\n title,\n children,\n raw: `:::${kind} ${title}\\n${children.map(c => c.raw).join('\\n')}\\n:::`,\n }\n\n // Skip the closing token\n const closingIndex = j\n return [admonitionNode, closingIndex + 1]\n}\n","import type { HardBreakNode } from '../../types'\n\nexport function parseHardBreak(): HardBreakNode {\n return {\n type: 'hardbreak',\n raw: '\\\\\\n',\n }\n}\n","import type { HtmlBlockNode, MarkdownToken } from '../../types'\n\n// Common void tags that don't require a closing tag\nconst VOID_TAGS = new Set([\n 'area',\n 'base',\n 'br',\n 'col',\n 'embed',\n 'hr',\n 'img',\n 'input',\n 'link',\n 'meta',\n 'param',\n 'source',\n 'track',\n 'wbr',\n])\n\n// Cache for dynamic closing-tag regexes per tag name\nconst CLOSE_TAG_RE_CACHE = new Map<string, RegExp>()\n\nexport function parseHtmlBlock(token: MarkdownToken): HtmlBlockNode {\n const raw = String(token.content ?? '')\n\n // Non-element html blocks (comments, doctypes, processing instructions) are non-closable\n if (/^\\s*<!--/.test(raw) || /^\\s*<!/.test(raw) || /^\\s*<\\?/.test(raw)) {\n return {\n type: 'html_block',\n content: raw,\n raw,\n tag: '',\n loading: false,\n }\n }\n\n // Extract first tag name (lowercased) like div, p, section, etc.\n const tagMatch = raw.match(/^\\s*<([A-Z][\\w:-]*)/i)\n const tag = (tagMatch?.[1] || '').toLowerCase()\n\n // Handle unknown or malformed tag gracefully\n if (!tag) {\n return {\n type: 'html_block',\n content: raw,\n raw,\n tag: '',\n loading: false,\n }\n }\n\n // Self-closing first tag like <img ... />\n const selfClosing = /^\\s*<[^>]*\\/\\s*>/.test(raw)\n const isVoid = VOID_TAGS.has(tag)\n\n // Already closed somewhere in the block (case-insensitive)\n let closeRe = CLOSE_TAG_RE_CACHE.get(tag)\n if (!closeRe) {\n closeRe = new RegExp(`<\\\\/\\\\s*${tag}\\\\b`, 'i')\n CLOSE_TAG_RE_CACHE.set(tag, closeRe)\n }\n const hasClosing = closeRe.test(raw)\n\n const loading = !(isVoid || selfClosing || hasClosing)\n\n const content = loading\n ? `${raw.replace(/<[^>]*$/, '')}\\n</${tag}>`\n : raw\n\n return {\n type: 'html_block',\n content,\n raw,\n tag,\n loading,\n }\n}\n","import type { MarkdownToken, ParagraphNode } from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\n\nexport function parseParagraph(\n tokens: MarkdownToken[],\n index: number,\n): ParagraphNode {\n const paragraphContentToken = tokens[index + 1]\n const paragraphContent = String(paragraphContentToken.content ?? '')\n\n return {\n type: 'paragraph',\n children: parseInlineTokens(paragraphContentToken.children || [], paragraphContent),\n raw: paragraphContent,\n }\n}\n","import type { MarkdownIt } from 'markdown-it-ts'\nimport type { MarkdownToken, ParsedNode, ParseOptions } from '../types'\nimport { parseInlineTokens } from './inline-parsers'\nimport { parseFenceToken } from './inline-parsers/fence-parser'\nimport { parseAdmonition } from './node-parsers/admonition-parser'\nimport { parseBlockquote } from './node-parsers/blockquote-parser'\nimport { parseCodeBlock } from './node-parsers/code-block-parser'\nimport { parseContainer } from './node-parsers/container-parser'\nimport { parseDefinitionList } from './node-parsers/definition-list-parser'\nimport { parseFootnote } from './node-parsers/footnote-parser'\nimport { parseHardBreak } from './node-parsers/hardbreak-parser'\nimport { parseHeading } from './node-parsers/heading-parser'\nimport { parseHtmlBlock } from './node-parsers/html-block-parser'\nimport { parseList } from './node-parsers/list-parser'\nimport { parseMathBlock } from './node-parsers/math-block-parser'\nimport { parseParagraph } from './node-parsers/paragraph-parser'\nimport { parseTable } from './node-parsers/table-parser'\nimport { parseThematicBreak } from './node-parsers/thematic-break-parser'\n\nexport function parseMarkdownToStructure(\n markdown: string,\n md: MarkdownIt,\n options: ParseOptions = {},\n): ParsedNode[] {\n // Ensure markdown is a string — guard against null/undefined inputs from callers\n let safeMarkdown = (markdown ?? '').toString().replace(/([^\\\\])\\right/g, '$1\\\\right')\n if (safeMarkdown.endsWith('- *')) {\n // 放置markdown 解析 - * 会被处理成多个 ul >li 嵌套列表\n safeMarkdown = safeMarkdown.replace(/- \\*$/, '- \\\\*')\n }\n if (/\\n\\s*-\\s*$/.test(safeMarkdown)) {\n // 此时 markdown 解析会出错要跳过\n safeMarkdown = safeMarkdown.replace(/\\n\\s*-\\s*$/, '\\n')\n }\n else if (/\\n[[(]\\n*$/.test(safeMarkdown)) {\n // 此时 markdown 解析会出错要跳过\n safeMarkdown = safeMarkdown.replace(/(\\n\\[|\\n\\()+\\n*$/g, '\\n')\n }\n\n // Get tokens from markdown-it\n const tokens = md.parse(safeMarkdown, {})\n // Defensive: ensure tokens is an array\n if (!tokens || !Array.isArray(tokens))\n return []\n\n // Allow consumers to transform tokens before processing\n const pre = options.preTransformTokens\n const post = options.postTransformTokens\n let transformedTokens = tokens as unknown as MarkdownToken[]\n if (pre && typeof pre === 'function') {\n transformedTokens = pre(transformedTokens) || transformedTokens\n }\n\n // Process the tokens into our structured format\n let result = processTokens(transformedTokens)\n\n // Backwards compatible token-level post hook: if provided and returns\n // a modified token array, re-process tokens and override node-level result.\n if (post && typeof post === 'function') {\n const postResult = post(transformedTokens)\n if (Array.isArray(postResult)) {\n // Backwards compatibility: if the hook returns an array of tokens\n // (they have a `type` string property), re-process them into nodes.\n const first = (postResult as unknown[])[0] as unknown\n const firstType = (first as Record<string, unknown>)?.type\n if (first && typeof firstType === 'string') {\n result = processTokens(postResult as unknown as MarkdownToken[])\n }\n else {\n // Otherwise assume it returned ParsedNode[] and use it as-is\n result = postResult as unknown as ParsedNode[]\n }\n }\n }\n return result\n}\n\n// Process markdown-it tokens into our structured format\nexport function processTokens(tokens: MarkdownToken[]): ParsedNode[] {\n // Defensive: ensure tokens is an array\n if (!tokens || !Array.isArray(tokens))\n return []\n\n const result: ParsedNode[] = []\n let i = 0\n // Note: table token normalization is applied during markdown-it parsing\n // via the `applyFixTableTokens` plugin (core.ruler.after('block')).\n // Link/strong/list-item fixes are applied during the inline stage by\n // their respective plugins. That keeps parsing-time fixes centralized\n // and avoids ad-hoc post-processing here.\n while (i < tokens.length) {\n const token = tokens[i]\n switch (token.type) {\n case 'container_warning_open':\n case 'container_info_open':\n case 'container_note_open':\n case 'container_tip_open':\n case 'container_danger_open':\n case 'container_caution_open':\n case 'container_error_open': {\n const [warningNode, newIndex] = parseContainer(tokens, i)\n result.push(warningNode)\n i = newIndex\n break\n }\n\n case 'heading_open':\n result.push(parseHeading(tokens, i))\n i += 3 // Skip heading_open, inline, heading_close\n break\n\n case 'paragraph_open':\n result.push(parseParagraph(tokens, i))\n i += 3 // Skip paragraph_open, inline, paragraph_close\n break\n\n case 'html_block':\n result.push(parseHtmlBlock(token))\n i += 1\n break\n case 'code_block':\n result.push(parseCodeBlock(tokens[i]))\n i += 1\n break\n\n case 'fence':\n result.push(parseFenceToken(tokens[i]))\n i += 1\n break\n\n case 'bullet_list_open':\n case 'ordered_list_open': {\n const [listNode, newIndex] = parseList(tokens, i)\n result.push(listNode)\n i = newIndex\n break\n }\n\n case 'hr':\n result.push(parseThematicBreak())\n i += 1\n break\n\n case 'blockquote_open': {\n const [blockquoteNode, newIndex] = parseBlockquote(tokens, i)\n result.push(blockquoteNode)\n i = newIndex\n break\n }\n\n case 'table_open': {\n const [tableNode, newIndex] = parseTable(tokens, i)\n result.push(tableNode)\n i = newIndex\n break\n }\n\n case 'dl_open': {\n const [definitionListNode, newIndex] = parseDefinitionList(tokens, i)\n result.push(definitionListNode)\n i = newIndex\n break\n }\n\n case 'footnote_open': {\n const [footnoteNode, newIndex] = parseFootnote(tokens, i)\n result.push(footnoteNode)\n i = newIndex\n break\n }\n\n case 'container_open': {\n const match\n = /^::: ?(warning|info|note|tip|danger|caution|error) ?(.*)$/.exec(\n String(token.info ?? ''),\n )\n if (match) {\n const [admonitionNode, newIndex] = parseAdmonition(tokens, i, match)\n result.push(admonitionNode)\n i = newIndex\n }\n else {\n i += 1 // Not a container type we handle, skip\n }\n break\n }\n\n case 'hardbreak':\n result.push(parseHardBreak())\n i++\n break\n\n case 'math_block':\n result.push(parseMathBlock(tokens[i]))\n i += 1\n break\n\n default:\n // Handle other token types or skip them\n i += 1\n break\n }\n }\n\n return result\n}\n\nexport { parseInlineTokens }\n","import type { MarkdownIt, MarkdownItPlugin } from 'markdown-it-ts'\nimport type { FactoryOptions } from './factory'\nimport { full as markdownItEmoji } from 'markdown-it-emoji'\nimport markdownItFootnote from 'markdown-it-footnote'\nimport markdownItIns from 'markdown-it-ins'\nimport markdownItMark from 'markdown-it-mark'\nimport markdownItSub from 'markdown-it-sub'\n\nimport markdownItSup from 'markdown-it-sup'\nimport * as markdownItCheckbox from 'markdown-it-task-checkbox'\nimport { factory } from './factory'\nimport {\n parseInlineTokens,\n parseMarkdownToStructure,\n processTokens,\n} from './parser'\n\n// Re-export config\nexport { setDefaultMathOptions } from './config'\n\n// Re-export parser functions\nexport { parseInlineTokens, parseMarkdownToStructure, processTokens }\nexport type { MathOptions } from './config'\n\n// Re-export utilities\nexport { findMatchingClose } from './findMatchingClose'\n\nexport { parseFenceToken } from './parser/inline-parsers/fence-parser'\n// Re-export plugins\nexport { applyContainers } from './plugins/containers'\n\nexport { ESCAPED_TEX_BRACE_COMMANDS, isMathLike, TEX_BRACE_COMMANDS } from './plugins/isMathLike'\nexport { applyMath, KATEX_COMMANDS, normalizeStandaloneBackslashT } from './plugins/math'\n// Re-export the node types for backward compatibility\nexport * from './types'\n\nexport interface GetMarkdownOptions extends FactoryOptions {\n plugin?: Array<unknown>\n apply?: Array<(md: MarkdownIt) => void>\n /**\n * Custom translation function or translation map for UI texts\n * @default { 'common.copy': 'Copy' }\n */\n i18n?: ((key: string) => string) | Record<string, string>\n}\n\nexport function getMarkdown(msgId: string = `editor-${Date.now()}`, options: GetMarkdownOptions = {}) {\n // keep legacy behaviour but delegate to new factory and reapply project-specific rules\n const md = factory(options)\n\n // Setup i18n translator function\n const defaultTranslations: Record<string, string> = {\n 'common.copy': 'Copy',\n }\n\n let t: (key: string) => string\n if (typeof options.i18n === 'function') {\n t = options.i18n\n }\n else if (options.i18n && typeof options.i18n === 'object') {\n const i18nMap = options.i18n as Record<string, string>\n t = (key: string) => i18nMap[key] ?? defaultTranslations[key] ?? key\n }\n else {\n t = (key: string) => defaultTranslations[key] ?? key\n }\n\n // apply user supplied plugins (md.use)\n if (Array.isArray(options.plugin)) {\n for (const p of options.plugin) {\n // allow both [plugin, opts] tuple or plugin function\n const pluginItem = p as unknown\n if (Array.isArray(pluginItem)) {\n const fn = pluginItem[0]\n const opts = pluginItem[1]\n if (typeof fn === 'function')\n md.use(fn, opts)\n }\n else if (typeof pluginItem === 'function') {\n md.use(pluginItem as MarkdownItPlugin)\n }\n // otherwise ignore non-callable plugins\n }\n }\n\n // apply user supplied apply functions to mutate the md instance (e.g. md.block.ruler.before(...))\n if (Array.isArray(options.apply)) {\n for (const fn of options.apply) {\n try {\n fn(md)\n }\n catch (e) {\n // swallow errors to preserve legacy behaviour; developers can see stack in console\n\n console.error('[getMarkdown] apply function threw an error', e)\n }\n }\n }\n\n // Re-apply a few project specific plugins that were previously always enabled\n md.use(markdownItSub)\n md.use(markdownItSup)\n md.use(markdownItMark)\n md.use(markdownItEmoji)\n // Safely resolve default export or the module itself for checkbox plugin\n type CheckboxPluginFn = (md: MarkdownIt, opts?: unknown) => void\n const markdownItCheckboxPlugin = ((markdownItCheckbox as unknown) as {\n default?: CheckboxPluginFn\n }).default ?? markdownItCheckbox\n md.use(markdownItCheckboxPlugin)\n md.use(markdownItIns)\n md.use(markdownItFootnote)\n\n // Annotate fence tokens with unclosed meta using a lightweight line check\n md.core.ruler.after('block', 'mark_fence_closed', (state: unknown) => {\n const s = state as unknown as {\n src: string\n tokens: Array<{ type?: string, map?: number[], markup?: string, meta?: Record<string, unknown> }>\n }\n const src: string = s.src\n const lines = src.split(/\\r?\\n/)\n for (const token of s.tokens) {\n if (token.type !== 'fence' || !token.map || !token.markup)\n continue\n const openLine: number = token.map[0]\n const endLine: number = token.map[1]\n const markup: string = token.markup\n const marker = markup[0]\n const minLen = markup.length\n // The closing line, if exists, should be the last line consumed by the block\n const lineIdx = Math.max(0, endLine - 1)\n const line = lines[lineIdx] ?? ''\n let i = 0\n while (i < line.length && (line[i] === ' ' || line[i] === '\\t')) i++\n let count = 0\n while (i + count < line.length && line[i + count] === marker) count++\n let j = i + count\n while (j < line.length && (line[j] === ' ' || line[j] === '\\t')) j++\n const closed = endLine > openLine + 1 && count >= minLen && j === line.length\n const tokenShape = token as unknown as { meta?: Record<string, unknown> }\n tokenShape.meta = tokenShape.meta ?? {}\n ;(tokenShape.meta as Record<string, unknown>).unclosed = !closed\n // also set a explicit `closed` boolean for compatibility with plugins/tests\n ;(tokenShape.meta as Record<string, unknown>).closed = !!closed\n }\n })\n\n // wave rule (legacy)\n const waveRule = (state: unknown, silent: boolean) => {\n const s = state as unknown as { pos: number, src: string, push: (type: string, tag?: string, nesting?: number) => any }\n const start = s.pos\n if (s.src[start] !== '~')\n return false\n const prevChar = s.src[start - 1]\n const nextChar = s.src[start + 1]\n if (/\\d/.test(prevChar) && /\\d/.test(nextChar)) {\n if (!silent) {\n const token = s.push('text', '', 0)\n token.content = '~'\n }\n s.pos += 1\n return true\n }\n return false\n }\n\n md.inline.ruler.before('sub', 'wave', waveRule)\n\n // custom fence that uses msgId for unique ids\n md.renderer.rules.fence = (tokens: unknown, idx: number) => {\n const tokensAny = tokens as unknown as import('./types').MarkdownToken[]\n const token = tokensAny[idx]\n const tokenShape = token as unknown as { info?: string, content?: string }\n const info = String(tokenShape.info ?? '').trim()\n const str = String(tokenShape.content ?? '')\n const encodedCode = btoa(unescape(encodeURIComponent(str)))\n const language = String(info ?? 'text')\n const uniqueId = `editor-${msgId}-${idx}-${language}`\n\n return `<div class=\"code-block\" data-code=\"${encodedCode}\" data-lang=\"${language}\" id=\"${uniqueId}\">\n <div class=\"code-header\">\n <span class=\"code-lang\">${language.toUpperCase()}</span>\n <button class=\"copy-button\" data-code=\"${encodedCode}\">${t(\n 'common.copy',\n )}</button>\n </div>\n <div class=\"code-editor\"></div>\n </div>`\n }\n\n // reference rule (legacy)\n const RE_REFERENCE = /^\\[(\\d+)\\]/\n const referenceInline = (state: unknown, silent: boolean) => {\n const s = state as unknown as { src: string, pos: number, push: (type: string, tag?: string, nesting?: number) => any }\n if (s.src[s.pos] !== '[')\n return false\n const match = RE_REFERENCE.exec(s.src.slice(s.pos))\n if (!match)\n return false\n if (!silent) {\n const id = match[1]\n const token = s.push('reference', 'span', 0)\n token.content = id\n token.markup = match[0]\n }\n s.pos += match[0].length\n return true\n }\n\n md.inline.ruler.before('escape', 'reference', referenceInline)\n md.renderer.rules.reference = (tokens: unknown, idx: number) => {\n const tokensAny = tokens as unknown as import('./types').MarkdownToken[]\n const id = String(tokensAny[idx].content ?? '')\n return `<span class=\"reference-link\" data-reference-id=\"${id}\" role=\"button\" tabindex=\"0\" title=\"Click to view reference\">${id}</span>`\n }\n\n return md\n}\n"],"mappings":";;;;;;;;;;;AAgBA,IAAIA;AAEJ,SAAgB,sBAAsB,MAA+B;AACnE,sBAAqB;;AAGvB,SAAgB,wBAAiD;AAC/D,QAAO;;;;;ACpBT,SAAgB,gBAAgB,IAAgB;AAC7C;EACC;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACD,CAAC,SAAS,SAAS;AAClB,KAAG,IAAI,qBAAqB,MAAM,EAChC,OAAO,QAAiB,KAAa;AAMnC,OALkB,OACM,KAIT,YAAY,EACzB,QAAO,2CAA2C,KAAK;OAGvD,QAAO;KAGZ,CAAC;GACF;AAGF,IAAG,MAAM,MAAM,OACb,SACA,2BACC,OAAgB,WAAmB,SAAiB,WAAoB;EAUvE,MAAM,IAAI;EACV,MAAM,WAAW,EAAE,OAAO,aAAa,EAAE,OAAO;EAChD,MAAM,UAAU,EAAE,OAAO;EACzB,MAAM,cAAc,EAAE,IACnB,MAAM,UAAU,QAAQ,CACxB,MAAM,eAAe;AACxB,MAAI,CAAC,YACH,QAAO;AACT,MAAI,OACF,QAAO;EAET,MAAM,OAAO,YAAY;EACzB,IAAI,WAAW,YAAY;EAC3B,IAAI,QAAQ;AACZ,SAAO,YAAY,SAAS;GAC1B,MAAM,OAAO,EAAE,OAAO,YAAY,EAAE,OAAO;GAC3C,MAAM,OAAO,EAAE,OAAO;AACtB,OAAI,EAAE,IAAI,MAAM,MAAM,KAAK,CAAC,MAAM,KAAK,OAAO;AAC5C,YAAQ;AACR;;AAEF;;AAEF,MAAI,CAAC,MACH,QAAO;AAIT,EAFkB,EAAE,KAAK,sBAAsB,OAAO,EAAE,CAE9C,QAAQ,SAAS,+BAA+B,OAAO;EAEjE,MAAMC,eAAyB,EAAE;AACjC,OAAK,IAAI,IAAI,YAAY,GAAG,IAAI,UAAU,KAAK;GAC7C,MAAM,OAAO,EAAE,OAAO,KAAK,EAAE,OAAO;GACpC,MAAM,OAAO,EAAE,OAAO;AACtB,gBAAa,KAAK,EAAE,IAAI,MAAM,MAAM,KAAK,CAAC;;AAI5C,IAAE,KAAK,kBAAkB,KAAK,EAAE;EAChC,MAAM,cAAc,EAAE,KAAK,UAAU,IAAI,EAAE;AAC3C,cAAY,UAAU,aAAa,KAAK,KAAK;AAC7C,cAAY,MAAM,CAAC,YAAY,GAAG,SAAS;AAG3C,cAAY,WAAW,EAAE;AACzB,IAAE,GAAG,OAAO,MAAM,YAAY,SAAS,EAAE,IAAK,EAAU,KAAK,YAAY,SAAS;AAClF,IAAE,KAAK,mBAAmB,KAAK,GAAG;AAElC,IAAE,KAAK,uBAAuB,OAAO,GAAG;AAExC,IAAE,OAAO,WAAW;AACpB,SAAO;GAEV;;;;;AChGH,MAAMC,cAAY,IAAI,IAAI;CACxB;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAEF,SAAgB,yBAAyB,IAAgB;AAGvD,IAAG,KAAK,MAAM,KAAK,2BAA2B,UAAmB;EAE/D,MAAM,OADI,MACK,UAAU,EAAE;AAE3B,OAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;GACpC,MAAM,IAAI,KAAK;AACf,OAAI,EAAE,SAAS,gBAAgB,eAAe,KAAK,EAAE,QAAQ,EAAE;IAC7D,MAAM,MAAM,EAAE,SAAS,MAAM,cAAc,GAAG,MAAM;AAEpD,QAAI;KAAC;KAAM;KAAM;KAAO;KAAS;KAAQ;KAAO,CAAC,SAAS,IAAI,CAC5D;AACF,MAAE,OAAO;AACT,MAAE,WAAW,CACX;KACE,MAAM;KACN,SAAS,EAAE;KACX,KAAK,EAAE,SAAS,MAAM,cAAc,GAAG,MAAM;KAC7C,SAAS;KACV,CACF;AACD;;AAEF,OAAI,CAAC,KAAK,EAAE,SAAS,SACnB;AAGF,OAAI,EAAE,SAAS,WAAW,KAAK,EAAE,SAAS,GAAG,SAAS,eAAe;IAEnE,MAAM,MAAM,EAAE,SAAS,GAAG,SAAS,MAAM,cAAc,GAAG,MAAM;AAEhE,QAAI;KAAC;KAAK;KAAQ;KAAU;KAAM;KAAK;KAAK;KAAI,CAAC,SAAS,IAAI,EAAE;AAC9D,OAAE,SAAS,GAAG,UAAU;AACxB,OAAE,SAAS,GAAG,MAAM;AACpB,OAAE,SAAS,KAAK;MACd,MAAM;MACN;MACA,SAAS;MACT,SAAS,KAAK,IAAI;MACnB,CAAQ;UAGT,GAAE,WAAW,CACX;KACE,MAAM;KACN,SAAS;KACT;KACA,SAAS,EAAE,SAAS,GAAG,UAAU,EAAE,SAAS,GAAG;KAChD,CACF;AAEH;cAEO,EAAE,SAAS,WAAW,KAAK,EAAE,SAAS,GAAG,SAAS,iBAAiB,EAAE,SAAS,GAAG,SAAS,eAAe;IAChH,MAAM,MAAM,EAAE,SAAS,GAAG,SAAS,MAAM,cAAc,GAAG,MAAM;AAEhE,QAAI;KAAC;KAAK;KAAQ;KAAU;KAAM;KAAK;KAAK;KAAI,CAAC,SAAS,IAAI,CAC5D;AACF,MAAE,WAAW,CACX;KACE,MAAM;KACN,SAAS;KACT;KACA,SAAS,EAAE,SAAS,KAAI,OAAM,GAAG,QAAQ,CAAC,KAAK,GAAG;KACnD,CACF;AACD;;AAGF,OAAI,CAAC,EAAE,SAAS,WAAW,IAAI,IAAK,EAAU,UAAU,WAAW,EACjE;GAEF,MAAM,MAAM,OAAO,EAAE,QAAQ;GAC7B,MAAM,UAAU,IAAI,MAAM,cAAc,GAAG,IAAI,aAAa,IAAI;AAChE,OAAI,CAAC,QACH;GAGF,MAAM,SADc,aAAa,KAAK,IAAI,IACZA,YAAU,IAAI,QAAQ;GAEpD,MAAM,YAAY;AAElB,OAAI,QAAQ;AAEV,cAAU,WAAW,CACnB;KAAE,MAAM;KAAe,SAAS;KAAK,CACtC;AACD;;AAEF,aAAU,SAAS,SAAS;;GAE9B;;;;;AC5GJ,MAAM,iBAAiB;AAEvB,SAAgB,mBAAmB,IAAgB;CAIjD,MAAM,QAAQ,OAAgB,WAAoB;EAChD,MAAM,IAAI;EACV,MAAM,QAAQ,EAAE;AAChB,MAAI,EAAE,IAAI,WAAW,IACnB,QAAO;AAGT,MAAI,QAAQ,KAAK,EAAE,IAAI,QAAQ,OAAO,IACpC,QAAO;EAGT,MAAM,OAAO,EAAE,IAAI,MAAM,MAAM;EAE/B,MAAM,IAAI,eAAe,KAAK,KAAK;AACnC,MAAI,CAAC,EACH,QAAO;AAET,MAAI,OACF,QAAO;EAET,MAAM,OAAO,EAAE,MAAM;EACrB,MAAM,OAAO,EAAE,MAAM;AAIrB,MAAI,KAAK,SAAS,IAAI,IAAI,KAAK,SAAS,IAAI,CAC1C,QAAO;EACT,MAAM,WAAW,KAAK,QAAQ,IAAI;EAClC,MAAM,kBAAkB,aAAa;EAGrC,MAAM,OAAO,EAAE,KAAK,aAAa,KAAK,EAAE;AACxC,OAAK,QAAQ,CAAC,CAAC,QAAQ,KAAK,CAAC;EAE7B,MAAM,MAAM,EAAE,KAAK,QAAQ,IAAI,EAAE;AACjC,MAAI,UAAU;AAGd,MAAI,iBAAiB;AACnB,KAAE,KAAK,cAAc,KAAK,GAAG;AAE7B,KAAE,OAAO,WAAW;QAKpB,GAAE,OAAO,EAAE,GAAG;AAEhB,SAAO;;AAIT,IAAG,OAAO,MAAM,OAAO,QAAQ,mBAAmB,KAAK;;;;;AC1DzD,SAAgB,mBAAmB,IAAgB;AAKjD,IAAG,KAAK,MAAM,MAAM,UAAU,oBAAoB,UAAmB;EAEnE,MAAM,OADI,MACK,UAAU,EAAE;AAC3B,OAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;GACpC,MAAM,IAAI,KAAK;AACf,OAAI,KAAK,EAAE,SAAS,YAAY,MAAM,QAAQ,EAAE,SAAS,CACvD,KAAI;AACF,MAAE,WAAW,aAAa,EAAE,SAAS;YAEhC,GAAG;AAKR,YAAQ,MAAM,sDAAsD,EAAE;;;GAI5E;;AAGJ,SAAS,aAAa,QAA0C;AAC9D,KAAI,OAAO,SAAS,EAClB,QAAO;AAET,MAAK,IAAI,IAAI,GAAG,KAAK,OAAO,SAAS,GAAG,KAAK;AAC3C,MAAI,CAAC,OAAO,GACV;AACF,MAAI,OAAO,IAAI,SAAS,UAAU,OAAO,GAAG,SAAS,SAAS,IAAI,IAAI,OAAO,IAAI,IAAI,SAAS,aAAa;GACzG,MAAM,QAAQ,OAAO,GAAG,QAAS,MAAM,eAAe;AACtD,OAAI,OAAO;IACT,IAAI,aAAa,OAAO,GAAG,QAAS,MAAM,GAAG,MAAM,MAAM;IACzD,MAAM,gBAAgB,WAAW,MAAM,SAAS;IAChD,MAAM,iBAAiB,EAAE;AACzB,QAAI,eAAe;AACjB,kBAAa,WAAW,MAAM,GAAG,cAAc,MAAM;AACrD,SAAI,WACF,gBAAe,KAAK;MAClB,MAAM;MACN,SAAS;MACT,KAAK;MACN,CAAC;KAEJ,MAAM,OAAO,MAAM;KACnB,MAAM,OAAO,cAAc,GAAG;AAC9B,SAAI,SAAS,EACX,gBAAe,KAAK;MAAE,MAAM;MAAW,KAAK;MAAM,SAAS;MAAG,CAAC;cAExD,SAAS,EAChB,gBAAe,KAAK;MAAE,MAAM;MAAe,KAAK;MAAU,SAAS;MAAG,CAAC;cAEhE,SAAS,GAAG;AACnB,qBAAe,KAAK;OAAE,MAAM;OAAe,KAAK;OAAU,SAAS;OAAG,CAAC;AACvE,qBAAe,KAAK;OAAE,MAAM;OAAW,KAAK;OAAM,SAAS;OAAG,CAAC;;KAEjE,IAAI,OAAO,OAAO,IAAI,IAAI,WAAW;AACrC,SAAI,OAAO,IAAI,IAAI,SAAS,UAAU,CAAC,OAAO,IAAI,GAAG,SAAS,WAAW,IAAI,EAAE;AAC7E,cAAQ,OAAO,IAAI,IAAI,WAAW;AAClC,aAAO,IAAI,GAAG,UAAU;;AAE1B,oBAAe,KACb;MACE,MAAM;MACN,SAAS,CAAC,OAAO,IAAI,IAAI,SAAS,WAAW,IAAI;MACjD;MACA,OAAO;MACP;MACA,UAAU,CACR;OACE,MAAM;OACN,SAAS;OACT,KAAK;OACN,CACF;MACD,KAAK,OAAO,IAAI,KAAK,IAAI,KAAK,GAAG;MAClC,CACF;AACD,SAAI,SAAS,EACX,gBAAe,KAAK;MAAE,MAAM;MAAY,KAAK;MAAM,SAAS;MAAI,CAAC;cAE1D,SAAS,EAChB,gBAAe,KAAK;MAAE,MAAM;MAAgB,KAAK;MAAU,SAAS;MAAI,CAAC;cAElE,SAAS,GAAG;AACnB,qBAAe,KAAK;OAAE,MAAM;OAAY,KAAK;OAAM,SAAS;OAAI,CAAC;AACjE,qBAAe,KAAK;OAAE,MAAM;OAAgB,KAAK;OAAU,SAAS;OAAI,CAAC;;AAE3E,SAAI,OAAO,IAAI,IAAI,SAAS,QAAQ;MAClC,MAAM,YAAY,OAAO,IAAI,GAAG,SAAS,QAAQ,UAAU,GAAG;AAC9D,UAAI,UACF,gBAAe,KAAK;OAClB,MAAM;OACN,SAAS;OACT,KAAK;OACN,CAAC;AAEJ,aAAO,OAAO,GAAG,GAAG,GAAG,eAAe;WAGtC,QAAO,OAAO,GAAG,GAAG,GAAG,eAAe;WAGrC;AACH,SAAI,WACF,gBAAe,KAAK;MAClB,MAAM;MACN,SAAS;MACT,KAAK;MACN,CAAC;KAEJ,MAAM,OAAO,MAAM;KACnB,IAAI,OAAO,OAAO,IAAI,IAAI,WAAW;AACrC,SAAI,OAAO,IAAI,IAAI,SAAS,UAAU,CAAC,OAAO,IAAI,GAAG,SAAS,WAAW,IAAI,EAAE;AAC7E,cAAQ,OAAO,IAAI,IAAI,WAAW;AAClC,aAAO,IAAI,GAAG,UAAU;;AAE1B,oBAAe,KAAK,GAAG,CACrB;MACE,MAAM;MACN,SAAS,CAAC,OAAO,IAAI,IAAI,SAAS,WAAW,IAAI;MACjD;MACA,OAAO;MACP;MACA,UAAU,CACR;OACE,MAAM;OACN,SAAS;OACT,KAAK;OACN,CACF;MACD,KAAK,OAAO,IAAI,KAAK,IAAI,KAAK,GAAG;MAClC,CACF,CAAC;AACF,SAAI,OAAO,IAAI,IAAI,SAAS,QAAQ;MAClC,MAAM,YAAY,OAAO,IAAI,GAAG,SAAS,QAAQ,OAAO,GAAG;AAC3D,UAAI,UACF,gBAAe,KAAK;OAClB,MAAM;OACN,SAAS;OACT,KAAK;OACN,CAAC;AAEJ,aAAO,OAAO,GAAG,GAAG,GAAG,eAAe;WAGtC,QAAO,OAAO,GAAG,GAAG,GAAG,eAAe;;AAG1C,SAAM,eAAe,SAAS;AAC9B;;aAGK,OAAO,GAAG,SAAS,eAAe,OAAO,GAAG,WAAW,aAAa,OAAO,IAAI,IAAI,SAAS,UAAU,OAAO,IAAI,GAAG,SAAS,SAAS,IAAI,EACjJ;OAAI,OAAO,IAAI,IAAI,SAAS,cAAc;IAExC,MAAM,iBAAiB,EAAE;IACzB,MAAM,OAAQ,OAAO,IAAI,GAAG,WAAW;IACvC,IAAI,OAAO,OAAO,GAAG,OAAO,MAAK,SAAQ,KAAK,OAAO,OAAO,GAAG,MAAM;AAErE,QAAI,OAAO,IAAI,IAAI,SAAS,QAAQ;KAClC,MAAM,KAAK,OAAO,IAAI,IAAI,WAAW,IAAI,QAAQ,IAAI;KACrD,MAAM,UAAU,MAAM;AACtB,SAAI,MAAM,IAAI;AACZ,cAAS,OAAO,IAAI,IAAI,SAAS,MAAM,GAAG,EAAE,IAAI;AAChD,aAAO,IAAI,GAAG,UAAU;;AAG1B,oBAAe,KAAK;MAClB,MAAM;MACN;MACA;MACA,OAAO;MACP;MACA,UAAU,CACR;OACE,MAAM;OACN,SAAS;OACT,KAAK;OACN,CACF;MACD,KAAK,OAAO,IAAI,KAAK,IAAI,KAAK,GAAG;MAClC,CAAC;KACF,MAAM,YAAY,OAAO,IAAI,GAAG,SAAS,QAAQ,UAAU,GAAG;AAC9D,SAAI,UACF,gBAAe,KAAK;MAClB,MAAM;MACN,SAAS;MACT,KAAK;MACN,CAAC;AAEJ,YAAO,OAAO,IAAI,GAAG,GAAG,GAAG,eAAe;WAEvC;AACH,oBAAe,KAAK;MAClB,MAAM;MACN,SAAS;MACT;MACA,OAAO;MACP;MACA,UAAU,CACR;OACE,MAAM;OACN,SAAS;OACT,KAAK;OACN,CACF;MACD,KAAK,OAAO,IAAI,KAAK,IAAI,KAAK,GAAG;MAClC,CAAC;AACF,YAAO,OAAO,IAAI,GAAG,GAAG,GAAG,eAAe;;AAE5C;;;AAGJ,MAAI,OAAO,GAAG,SAAS,gBAAgB,OAAO,GAAG,YAAY,MAAM,OAAO,IAAI,IAAI,SAAS,UAAU,OAAO,IAAI,IAAI,SAAS,UAAU,OAAO,IAAI,IAAI,SAAS,aAAa;AAE1K,UAAO,IAAI,GAAG,UAAU;GACxB,MAAM,OAAO,OAAO,IAAI,GAAG,WAAW;GACtC,IAAI,OAAO,OAAO,IAAI,GAAG,QAAQ,KAAK,MAAM;GAC5C,IAAI,QAAQ;AACZ,OAAI,OAAO,GAAG,WAAW,aAAa,OAAO,IAAI,IAAI,SAAS,QAAQ;IACpE,MAAM,KAAK,OAAO,IAAI,IAAI,WAAW,IAAI,QAAQ,IAAI;AACrD,QAAI,MAAM,IAAI;AACZ,aAAS,OAAO,IAAI,IAAI,SAAS,MAAM,GAAG,EAAE,IAAI;AAChD,YAAO,IAAI,GAAG,UAAU;;AAE1B,aAAS;;AAEX,UAAO,OAAO,IAAI,GAAG,OAAO;IAC1B,MAAM;IACN,SAAS;IACT;IACA,OAAO;IACP;IACA,UAAU,CACR;KACE,MAAM;KACN,SAAS;KACT,KAAK;KACN,CACF;IACD,KAAK,OAAO,IAAI,KAAK,IAAI,KAAK,GAAG;IAClC,CAAQ;aAEF,OAAO,GAAG,SAAS,WAAW,KAAK,IAAI,OAAO,IAAI,GAAG,QAAQ,SAAS,IAAI,IAAI,OAAO,IAAI,GAAG,SAAS,UAAU,OAAO,IAAI,GAAG,SAAS,SAAS,IAAI,EAAE;GAC5J,MAAM,OAAO,OAAO,IAAI,GAAG,OAAQ;GACnC,MAAM,iBAAiB,EAAE;GACzB,MAAM,aAAa,OAAO,IAAI,GAAG,QAAS,MAAM,GAAG,OAAO,IAAI,GAAG,QAAS,SAAS,IAAI,KAAK;AAC5F,OAAI,WACF,gBAAe,KAAK;IAClB,MAAM;IACN,SAAS;IACT,KAAK;IACN,CAAC;AAEJ,OAAI,SAAS,EACX,gBAAe,KAAK;IAAE,MAAM;IAAW,KAAK;IAAM,SAAS;IAAG,CAAC;YAExD,SAAS,EAChB,gBAAe,KAAK;IAAE,MAAM;IAAe,KAAK;IAAU,SAAS;IAAG,CAAC;YAEhE,SAAS,GAAG;AACnB,mBAAe,KAAK;KAAE,MAAM;KAAe,KAAK;KAAU,SAAS;KAAG,CAAC;AACvE,mBAAe,KAAK;KAAE,MAAM;KAAW,KAAK;KAAM,SAAS;KAAG,CAAC;;GAEjE,MAAM,OAAO,OAAO,IAAI,GAAG,WAAW;GACtC,IAAI,OAAO,OAAO,GAAG,QAAS,MAAM,EAAE;GACtC,IAAI,UAAU;AACd,OAAI,OAAO,IAAI,IAAI,SAAS,QAAQ;IAClC,MAAM,KAAK,OAAO,IAAI,IAAI,WAAW,IAAI,QAAQ,IAAI;AACrD,cAAU,MAAM;AAChB,QAAI,MAAM,IAAI;AACZ,aAAS,OAAO,IAAI,IAAI,SAAS,MAAM,GAAG,EAAE,IAAI;AAChD,YAAO,IAAI,GAAG,UAAU;;;AAG5B,kBAAe,KAAK;IAClB,MAAM;IACN;IACA;IACA,OAAO;IACP;IACA,UAAU,CACR;KACE,MAAM;KACN,SAAS;KACT,KAAK;KACN,CACF;IACD,KAAK,OAAO,IAAI,KAAK,IAAI,KAAK,GAAG;IAClC,CAAC;AACF,OAAI,SAAS,EACX,gBAAe,KAAK;IAAE,MAAM;IAAY,KAAK;IAAM,SAAS;IAAI,CAAC;YAE1D,SAAS,EAChB,gBAAe,KAAK;IAAE,MAAM;IAAgB,KAAK;IAAU,SAAS;IAAI,CAAC;YAElE,SAAS,GAAG;AACnB,mBAAe,KAAK;KAAE,MAAM;KAAY,KAAK;KAAM,SAAS;KAAI,CAAC;AACjE,mBAAe,KAAK;KAAE,MAAM;KAAgB,KAAK;KAAU,SAAS;KAAI,CAAC;;AAE3E,OAAI,OAAO,IAAI,IAAI,SAAS,QAAQ;IAClC,MAAM,YAAY,OAAO,IAAI,GAAG,SAAS,QAAQ,UAAU,GAAG;AAC9D,QAAI,UACF,gBAAe,KAAK;KAClB,MAAM;KACN,SAAS;KACT,KAAK;KACN,CAAC;AAEJ,WAAO,OAAO,IAAI,GAAG,GAAG,GAAG,eAAe;cAEnC,OAAO,IAAI,IAAI,SAAS,YAE/B,QAAO,OAAO,IAAI,GAAG,IAAI,GAAG,eAAe;OAG3C,QAAO,OAAO,IAAI,GAAG,GAAG,GAAG,eAAe;AAE5C,QAAM,eAAe,SAAS;AAC9B;;;AAGJ,QAAO;;;;;ACxUT,SAAgB,iBAAiB,IAAgB;AAG/C,IAAG,KAAK,MAAM,MAAM,UAAU,yBAAyB,UAAmB;EAExE,MAAM,OADI,MACK,UAAU,EAAE;AAC3B,OAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;GACpC,MAAM,IAAI,KAAK;AACf,OAAI,KAAK,EAAE,SAAS,YAAY,MAAM,QAAQ,EAAE,SAAS,CACvD,KAAI;AACF,MAAE,WAAW,YAAY,EAAE,SAAS;YAE/B,GAAG;AAGR,YAAQ,MAAM,oDAAoD,EAAE;;;GAI1E;;AAGJ,SAAS,YAAY,QAA0C;CAC7D,MAAM,OAAO,OAAO,OAAO,SAAS;CACpC,MAAM,cAAc,OAAO,MAAM,WAAW,GAAG;AAE/C,KAAI,MAAM,SAAS,UAAW,gBAAgB,KAAK,YAAY,IAAI,OAAO,OAAO,SAAS,IAAI,QAAQ,KACpG,QAAO,OAAO,OAAO,SAAS,GAAG,EAAE;AAGrC,QAAO;;;;;AC9BT,SAAgB,qBAAqB,IAAgB;AAInD,IAAG,KAAK,MAAM,MAAM,UAAU,sBAAsB,UAAmB;EAErE,MAAM,OADI,MACK,UAAU,EAAE;AAC3B,OAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;GACpC,MAAM,IAAI,KAAK;AACf,OAAI,KAAK,EAAE,SAAS,YAAY,MAAM,QAAQ,EAAE,SAAS,CACvD,KAAI;AACF,MAAE,WAAW,gBAAgB,EAAE,SAAS;YAEnC,GAAG;AAGR,YAAQ,MAAM,wDAAwD,EAAE;;;GAI9E;;AAGJ,SAAS,gBAAgB,QAA0C;CACjE,MAAM,cAAc,CAAC,GAAG,OAAO;AAC/B,KAAI,OAAO,SAAS,EAClB,QAAO;CACT,MAAM,IAAI,OAAO,SAAS;CAC1B,MAAM,QAAQ,OAAO;AACrB,KAAI,CAAC,MACH,QAAO;CACT,MAAM,YAAY,OAAO,IAAI;CAC7B,MAAM,eAAe,OAAO,MAAM,WAAW,GAAG;AAChD,KAAI,MAAM,SAAS,eAAe,OAAO,IAAI,IAAI,SAAS,aAAa,OAAO,IAAI,IAAI,SAAS,UAAU,OAAO,IAAI,GAAG,SAAS,SAAS,IAAI,EAAE;EAC7I,MAAM,cAAc,OAAO,OAAO,IAAI,GAAG,WAAW,GAAG,CAAC,MAAM,GAAG,GAAG;EAEpE,MAAM,gBAAgB;GACpB;IACE,MAAM;IACN,KAAK;IACL,OAAO;IACP,KAAK;IACL,UAAU;IACV,SAAS;IACT,QAAQ;IACR,MAAM;IACN,MAAM;IACP;GACD,OAAO;GACP,OAAO,IAAI;GACX,OAAO,IAAI;GACX;IACE,MAAM;IACN,KAAK;IACL,OAAO;IACP,KAAK;IACL,UAAU;IACV,SAAS;IACT,QAAQ;IACR,MAAM;IACN,MAAM;IACP;GACF;AACD,MAAI,YACF,eAAc,QAAQ;GACpB,MAAM;GACN,SAAS;GACT,KAAK;GACN,CAAC;AAEJ,cAAY,OAAO,IAAI,GAAG,GAAG,GAAG,cAAc;YAEvC,MAAM,SAAS,UAAU,aAAa,SAAS,IAAI,IAAI,UAAU,SAAS,WAAW;EAE5F,MAAM,aAAa,OAAO,IAAI;EAC9B,MAAM,QAAQ,YAAY,SAAS,SAAS,IAAI;EAChD,MAAM,SAAS;GACb;IACE,MAAM;IACN,KAAK;IACL,OAAO;IACP,KAAK;IACL,UAAU;IACV,SAAS;IACT,QAAQ;IACR,MAAM;IACN,MAAM;IACP;GACD;IACE,MAAM;IACN,SAAS,YAAY,SAAS,SAAS,OAAO,WAAW,WAAW,GAAG,GAAG;IAC3E;GACD;IACE,MAAM;IACN,KAAK;IACL,OAAO;IACP,KAAK;IACL,UAAU;IACV,SAAS;IACT,QAAQ;IACR,MAAM;IACN,MAAM;IACP;GACF;EACD,MAAM,aAAa,aAAa,MAAM,GAAG,GAAG;AAC5C,MAAI,WACF,QAAO,QAAQ;GACb,MAAM;GACN,SAAS;GACT,KAAK;GACN,CAAC;AAEJ,cAAY,OAAO,GAAG,OAAO,GAAG,OAAO;AACvC,SAAO;;AAGT,QAAO;;;;;ACpHT,SAAgB,oBAAoB,IAAgB;AAIlD,IAAG,KAAK,MAAM,MAAM,SAAS,qBAAqB,UAAmB;EACnE,MAAM,IAAI;AACV,MAAI;GAEF,MAAM,QAAQ,eADD,EAAE,UAAU,EAAE,CACO;AAClC,OAAI,MAAM,QAAQ,MAAM,CACtB,GAAE,SAAS;WAER,GAAG;AAER,WAAQ,MAAM,oDAAoD,EAAE;;GAEtE;;AAGJ,SAAS,cAAc;AACrB,QAAO;EACL;GACE,MAAM;GACN,KAAK;GACL,OAAO;GACP,KAAK;GACL,UAAU;GACV,SAAS;GACT,QAAQ;GACR,MAAM;GACN,OAAO;GACP,SAAS;GACT,MAAM;GACP;EACD;GACE,MAAM;GACN,KAAK;GACL,OAAO;GACP,OAAO;GACP,OAAO;GACP,UAAU;GACX;EACD;GACE,MAAM;GACN,KAAK;GACL,OAAO;GACP,OAAO;GACP,OAAO;GACP,UAAU;GACX;EAEF;;AAEH,SAAS,YAAY;AACnB,QAAO;EACL;GACE,MAAM;GACN,KAAK;GACL,OAAO;GACP,OAAO;GACP,OAAO;GACP,UAAU;GACX;EACD;GACE,MAAM;GACN,KAAK;GACL,OAAO;GACP,OAAO;GACP,OAAO;GACP,UAAU;GACX;EACD;GACE,MAAM;GACN,KAAK;GACL,OAAO;GACP,KAAK;GACL,UAAU;GACV,SAAS;GACT,QAAQ;GACR,MAAM;GACN,OAAO;GACP,MAAM;GACP;EACF;;AAEH,SAAS,SAAS,MAAc;AAC9B,QAAO;EAAC;GACN,MAAM;GACN,KAAK;GACL,OAAO;GACP,OAAO;GACP,OAAO;GACP,UAAU;GACX;EAAE;GACD,MAAM;GACN,KAAK;GACL,UAAU,CACR;IACE,KAAK;IACL,MAAM;IACN,OAAO;IACP,SAAS;IACT,UAAU;IACX,CACF;GACD,SAAS;GACT,OAAO;GACP,OAAO;GACP,OAAO;GACR;EAAE;GACD,MAAM;GACN,KAAK;GACL,OAAO;GACP,OAAO;GACP,OAAO;GACP,UAAU;GACX;EAAC;;AAEJ,SAAgB,eAAe,QAA0C;CACvE,MAAM,cAAc,CAAC,GAAG,OAAO;AAC/B,KAAI,OAAO,SAAS,EAClB,QAAO;CACT,MAAM,IAAI,OAAO,SAAS;CAC1B,MAAM,QAAQ,OAAO;AACrB,KAAI,MAAM,SAAS,UAAU;EAC3B,MAAM,WAAW,OAAO,MAAM,WAAW,GAAG;EAC5C,MAAM,eAAe,OAAO,MAAM,WAAW,IAAI,WAAW,GAAG;AAE/D,MAAI,qBAAqB,KAAK,SAAS,EAAE;GAEvC,MAAM,OAAO,aAAa,MAAM,EAAE,CAAC,MAAM,IAAI,CAAC,KAAI,QAAKC,IAAE,MAAM,CAAC,CAAC,OAAO,QAAQ,CAAC,SAAQ,QAAK,SAASA,IAAE,CAAC;GAC1G,MAAM,SAAU;IACd,GAAG,aAAa;IAChB,GAAG;IACH,GAAG,WAAW;IACf;AACD,eAAY,OAAO,IAAI,GAAG,GAAG,GAAG,OAAO;aAEhC,2BAA2B,KAAK,SAAS,EAAE;GAElD,MAAM,OAAO,aAAa,MAAM,GAAG,GAAG,CAAC,MAAM,IAAI,CAAC,KAAI,QAAKA,IAAE,MAAM,CAAC,CAAC,SAAQ,QAAK,SAASA,IAAE,CAAC;GAC9F,MAAM,SAAU;IACd,GAAG,aAAa;IAChB,GAAG;IACH,GAAG,WAAW;IACf;AACD,eAAY,OAAO,IAAI,GAAG,GAAG,GAAG,OAAO;aAEhC,4BAA4B,KAAK,SAAS,EAAE;AACnD,SAAM,UAAU,SAAS,MAAM,GAAG,GAAG;AACrC,SAAM,SAAU,OAAO,GAAG,EAAE;;;AAIhC,QAAO;;;;;AC7JT,SAAgB,kBAAkB,KAAa,UAAkB,MAAc,OAAe;CAC5F,MAAM,MAAM,IAAI;AAGhB,KAAI,SAAS,QAAQ,UAAU,MAAM;EACnC,IAAIC,MAAI;AACR,SAAOA,MAAI,MAAM,GAAG;AAClB,OAAI,IAAIA,SAAO,OAAO,IAAIA,MAAI,OAAO,KAAK;IAExC,IAAI,IAAIA,MAAI;IACZ,IAAI,cAAc;AAClB,WAAO,KAAK,KAAK,IAAI,OAAO,MAAM;AAChC;AACA;;AAEF,QAAI,cAAc,MAAM,EACtB,QAAOA;;AAEX;;AAEF,SAAO;;CAGT,MAAM,WAAW,KAAK,KAAK,SAAS;CACpC,MAAM,WAAW;CACjB,IAAI,QAAQ;CACZ,IAAI,IAAI;AACR,QAAO,IAAI,KAAK;AAEd,MAAI,IAAI,MAAM,GAAG,IAAI,SAAS,OAAO,KAAK,UAAU;GAClD,IAAI,IAAI,IAAI;GACZ,IAAI,cAAc;AAClB,UAAO,KAAK,KAAK,IAAI,OAAO,MAAM;AAChC;AACA;;AAEF,OAAI,cAAc,MAAM,GAAG;AACzB,QAAI,UAAU,EACZ,QAAO;AACT;AACA,SAAK,SAAS;AACd;;;EAIJ,MAAM,KAAK,IAAI;AAEf,MAAI,OAAO,MAAM;AACf,QAAK;AACL;;AAGF,MAAI,OAAO,SACT;WAEO,OAAO,SAAS,SAAS,SAAS,IACzC;OAAI,QAAQ,EACV;;AAEJ;;AAEF,QAAO;;AAGT,gCAAe;;;;AChEf,MAAa,qBAAqB;CAChC;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD;AAED,MAAa,6BAA6B,mBAAmB,KAAI,MAAK,EAAE,QAAQ,wBAAwB,OAAO,CAAC,CAAC,KAAK,IAAI;AAE1H,MAAM,aAAa;AACnB,MAAM,eAAe;AACrB,MAAM,yBAAyB,IAAI,OAAO,GAAG,aAAa,KAAK,2BAA2B,mBAAmB,IAAI;AAKjH,MAAM,yBAAyB,IAAI,OAAO,MAAM,aAAa,OAAO,2BAA2B,SAAS,IAAI;AAC5G,MAAM,kBAAkB;AAOxB,MAAM,yBAAS,IAAI,OAAO,iFAAiF;AAC3G,MAAM,eAAe;AACrB,MAAM,WAAW;AAGjB,MAAM,eAAe;AACrB,SAAgB,WAAW,GAAW;AACpC,KAAI,CAAC,EACH,QAAO;CAQT,MAAM,OAAO,EAAE,QAAQ,WAAW,MAAM;CACxC,MAAM,WAAW,KAAK,MAAM;AAI5B,KAAI,aAAa,KAAK,SAAS,CAC7B,QAAO;AACT,KAAI,SAAS,SAAS,KAAK,CACzB,QAAO;AACT,KAAI,SAAS,SAAS,IACpB,QAAO;AAET,KAAI,sBAAsB,KAAK,EAAE,CAC/B,QAAO;CAIT,MAAM,SAAS,WAAW,KAAK,KAAK;CACpC,MAAM,mBAAmB,uBAAuB,KAAK,KAAK;CAC1D,MAAM,gBAAgB,uBAAuB,KAAK,KAAK;CAGvD,MAAM,cAAc,gBAAgB,KAAK,KAAK;CAG9C,MAAM,WAFmB,4DAES,KAAK,KAAK,IADjB,6DACwC,KAAK,KAAK;CAE7E,MAAM,MAAM,OAAO,KAAK,KAAK;CAE7B,MAAM,WAAW,aAAa,KAAK,KAAK;CAExC,MAAM,QAAQ,SAAS,KAAK,KAAK;AAKjC,QAAO,UAAU,oBAAoB,iBAAiB,eAAe,YAAY,OAAO,YAAY;;;;;ACzEtG,MAAa,iBAAiB;CAC5B;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD;AAOD,MAAa,yBAAyB,eACnC,OAAO,CACP,MAAM,GAAG,MAAM,EAAE,SAAS,EAAE,OAAO,CACnC,KAAI,MAAK,EAAE,QAAQ,yBAAyB,OAAO,CAAC,CACpD,KAAK,IAAI;AACZ,MAAM,sBAAsB;AAG5B,MAAM,gBAAgB;AACtB,MAAM,uBAAuB;AAC7B,MAAM,8BAA8B;AACpC,MAAM,6BAA6B;AAGnC,MAAM,kBAAkB,IAAI,OAAO,GAAG,oBAAoB,iBAAiB,uBAAuB,OAAO,IAAI;AAC7G,MAAM,gCAAgB,IAAI,KAAqB;AAC/C,MAAM,qCAAqB,IAAI,KAAqB;AAEpD,SAAS,aAAa,UAA6C;AACjE,KAAI,CAAC,SACH,QAAO;CACT,MAAM,MAAM,CAAC,GAAG,SAAS;AACzB,KAAI,MAAM,GAAG,MAAM,EAAE,SAAS,EAAE,OAAO;CACvC,MAAM,MAAM,IAAI,KAAK,IAAS;CAC9B,MAAM,SAAS,cAAc,IAAI,IAAI;AACrC,KAAI,OACF,QAAO;CACT,MAAM,iBAAiB,MAAM,IAAI,KAAI,MAAK,EAAE,QAAQ,0BAA0B,OAAO,CAAC,CAAC,KAAK,IAAI,CAAC;CACjG,MAAM,KAAK,IAAI,OAAO,GAAG,oBAAoB,iBAAiB,eAAe,OAAO,IAAI;AACxF,eAAc,IAAI,KAAK,GAAG;AAC1B,QAAO;;AAGT,SAAS,iBAAiB,YAAqB,UAA6C;CAC1F,MAAM,MAAM,aAAa,EAAE,GAAG,CAAC,GAAI,YAAY,EAAE,CAAE;AACnD,KAAI,CAAC,WACH,KAAI,MAAM,GAAG,MAAM,EAAE,SAAS,EAAE,OAAO;CACzC,MAAM,MAAM,aAAa,gBAAgB,IAAI,KAAK,IAAS;CAC3D,MAAM,SAAS,mBAAmB,IAAI,IAAI;AAC1C,KAAI,OACF,QAAO;CACT,MAAM,eAAe,aACjB,CAAC,4BAA4B,uBAAuB,CAAC,OAAO,QAAQ,CAAC,KAAK,IAAI,GAC9E,CACE,IAAI,KAAI,MAAK,EAAE,QAAQ,yBAAyB,OAAO,CAAC,CAAC,KAAK,IAAI,EAClE,2BACD,CAAC,OAAO,QAAQ,CAAC,KAAK,IAAI;CAC/B,MAAM,KAAK,IAAI,OAAO,kBAAkB,aAAa,WAAW,IAAI;AACpE,oBAAmB,IAAI,KAAK,GAAG;AAC/B,QAAO;;AAKT,MAAMC,cAAsC;CAC1C,KAAM;CACN,MAAM;CACN,MAAM;CACN,MAAM;CACN,MAAM;CACP;AAED,SAAS,qBAAqB,GAAW;CACvC,MAAM,KAAK;CAEX,IAAI,IAAI;AAER,QAAY,GAAG,KAAK,EAAE,KAAM,KAC1B;AAEF,QAAO;;AAGT,SAAgB,8BAA8B,GAAW,MAAoB;CAC3E,MAAM,WAAW,MAAM,YAAY;CACnC,MAAM,oBAAoB,MAAM,qBAAqB;CAErD,MAAM,aAAa,MAAM,YAAY;CAGrC,MAAM,KAAK,aAAa,aAAa,SAAY,SAAS;CAE1D,IAAI,MAAM,EAAE,QAAQ,KAAK,GAAW,QAAiB;AACnD,MAAI,YAAY,OAAO,OACrB,QAAO,KAAK,YAAY;AAC1B,MAAI,OAAO,SAAS,SAAS,IAAI,CAC/B,QAAO,KAAK;AACd,SAAO;GACP;AAGF,KAAI,kBACF,OAAM,IAAI,QAAQ,eAAe,QAAQ;CAS3C,IAAI,SAAS;CACb,MAAM,aAAa,iBAAiB,YAAY,aAAa,SAAY,SAAS;AAClF,UAAS,OAAO,QAAQ,aAAa,IAAY,IAAY,OAAe,GAAG,GAAG,IAAI,GAAG,GAAG;AAC5F,UAAS,OAAO,QAAQ,eAAe,eAAe,CACnD,QAAQ,sBAAsB,+BAA+B;AAMhE,UAAS,OAAO,QAAQ,6BAA6B,WAAW;AAGhE,UAAS,OAAO,QAAQ,4BAA4B,SAAS;AAC7D,QAAO;;AAET,SAAgB,UAAU,IAAgB,UAAwB;CAEhE,MAAM,cAAc,OAAgB,WAAoB;EACtD,MAAM,IAAI;AAEV,MAAI,WAAW,KAAK,EAAE,IAAI,CACxB,QAAO;EAET,MAAMC,aAAiC;GACrC,CAAC,MAAM,KAAK;GACZ,CAAC,OAAO,MAAM;GACd,CAAC,KAAM,IAAK;GACb;EAED,IAAI,YAAY;EAChB,IAAI,aAAa;AAEjB,OAAK,MAAM,CAAC,MAAM,UAAU,YAAY;GAEtC,MAAM,MAAM,EAAE;GACd,IAAI,WAAW;GACf,MAAM,YAAY,SAAiB;AAEjC,QAAI,SAAS,eAAe,QAAQ,KAClC,QAAO;AAET,QAAI,SAAS,MAAM;AACjB,OAAE,MAAM,EAAE,MAAM,KAAK;AACrB,iBAAY,EAAE;AACd;;AAEF,QAAI,SAAS,SAAS,SAAS,OAAO;KACpC,MAAMC,MAAI,EAAE,KAAK,gBAAgB,IAAI,EAAE;AACvC,SAAE,UAAU,SAAS,QAAQ,MAAM;AACnC,SAAE,SAAS;AACX,OAAE,MAAM,EAAE,MAAM,KAAK;AACrB,iBAAY,EAAE;AACd;;AAGF,QAAI,CAAC,KACH;IAEF,MAAM,IAAI,EAAE,KAAK,QAAQ,IAAI,EAAE;AAC/B,MAAE,UAAU;AACZ,MAAE,MAAM,EAAE,MAAM,KAAK;AACrB,gBAAY,EAAE;;AAGhB,UAAO,MAAM;AACX,QAAI,aAAa,IAAI,OACnB;IACF,MAAM,QAAQ,IAAI,QAAQ,MAAM,UAAU;AAC1C,QAAI,UAAU,GACZ;AAMF,QAAI,QAAQ,GAAG;KACb,IAAI,IAAI,QAAQ;AAEhB,YAAO,KAAK,KAAK,IAAI,OAAO,IAC1B;AACF,SAAI,KAAK,KAAK,IAAI,OAAO,IACvB,QAAO;;IAMX,MAAM,SAASC,0BAAkB,KAAK,QAAQ,KAAK,QAAQ,MAAM,MAAM;AAEvE,QAAI,WAAW,IAAI;KAEjB,MAAMC,YAAU,IAAI,MAAM,QAAQ,KAAK,OAAO;AAC9C,SAAIA,UAAQ,SAAS,KAAK,EAAE;AAC1B,kBAAY,IAAI,QAAQ,MAAM,QAAQ,KAAK,OAAO;AAClD;;AAEF,SAAI,WAAW,IAAI;AACjB,UAAI,WAAWA,UAAQ,EAAE;AACvB,mBAAY,QAAQ,KAAK;AACzB,kBAAW;AACX,WAAI,CAAC,QAAQ;AACX,UAAE,UAAU;QAEZ,MAAM,iBAAiB,qBADF,aAAa,IAAI,MAAM,YAAY,UAAU,GAAG,IAAI,MAAM,GAAG,UAAU,CACnC,GAAG,MAAM;AAElE,YAAI,WACF,UAAS,IAAI,MAAM,YAAY,UAAU,CAAC;aAEvC;SACH,IAAI,OAAO,IAAI,MAAM,GAAG,UAAU;AAClC,aAAI,KAAK,SAAS,KAAK,CACrB,QAAO,KAAK,MAAM,GAAG,KAAK,SAAS,KAAK,OAAO;AACjD,kBAAS,KAAK;;AAEhB,YAAI,gBAAgB;SAClB,MAAM,cAAc,EAAE,KAAK,eAAe,IAAI,EAAE;AAChD,qBAAY,SAAS,IAAI,MAAM,GAAG,QAAQ,EAAE;SAC5C,MAAM,QAAQ,EAAE,KAAK,eAAe,QAAQ,EAAE;AAC9C,eAAM,UAAU,8BAA8BA,WAAS,SAAS;AAChE,eAAM,SAAS,SAAS,OAAO,OAAO,SAAS,QAAQ,WAAW,SAAS,MAAM,MAAM;AACvF,eAAM,MAAM,GAAG,OAAOA,YAAU;AAChC,eAAM,UAAU;AAChB,qBAAY,UAAUA;AACtB,WAAE,KAAK,gBAAgB,IAAI,EAAE;eAE1B;SACH,MAAM,QAAQ,EAAE,KAAK,eAAe,QAAQ,EAAE;AAC9C,eAAM,UAAU,8BAA8BA,WAAS,SAAS;AAChE,eAAM,SAAS,SAAS,OAAO,OAAO,SAAS,QAAQ,WAAW,SAAS,MAAM,MAAM;AACvF,eAAM,MAAM,GAAG,OAAOA,YAAU;AAChC,eAAM,UAAU;;AAGlB,UAAE,MAAM,IAAI;;AAEd,mBAAY,IAAI;AAChB,oBAAa;;AAEf;;;IAGJ,MAAM,UAAU,IAAI,MAAM,QAAQ,KAAK,QAAQ,OAAO;AACtD,QAAI,CAAC,WAAW,QAAQ,EAAE;AAGxB,iBAAY,SAAS,MAAM;KAC3B,MAAM,OAAO,IAAI,MAAM,EAAE,KAAK,UAAU;AACxC,SAAI,CAAC,EAAE,QACL,UAAS,KAAK;AAChB;;AAEF,eAAW;AAEX,QAAI,CAAC,QAAQ;KAEX,MAAM,SAAS,IAAI,MAAM,GAAG,MAAM;KAIlC,IAAI,eADiB,IAAI,MAAM,GAAG,UAAU,GACV,IAAI,MAAM,YAAY,MAAM,GAAG;KACjE,MAAM,iBAAiB,qBAAqB,aAAa,GAAG,MAAM;AAClE,SAAI,UAAU,EAAE,OAAO,eACrB,gBAAe,EAAE,UAAU,IAAI,MAAM,EAAE,KAAK,MAAM;AAIpD,SAAI,EAAE,YAAY,cAAc;AAC9B,QAAE,UAAU;AACZ,UAAI,gBAAgB;OAClB,MAAM,SAAS,aAAa,MAAM,QAAQ;OAC1C,MAAM,QAAQ,aAAa,MAAM,OAAQ,QAAS,OAAQ,GAAG,OAAO;AACpE,gBAAS,aAAa,MAAM,GAAG,OAAQ,MAAO,CAAC;OAC/C,MAAM,cAAc,EAAE,KAAK,eAAe,IAAI,EAAE;AAChD,mBAAY,SAAS,OAAQ;OAC7B,MAAM,YAAY,EAAE,KAAK,QAAQ,IAAI,EAAE;AACvC,iBAAU,UAAU;AACpB,SAAE,KAAK,gBAAgB,IAAI,EAAE;YAG7B,UAAS,aAAa;;AAG1B,SAAI,gBAAgB;MAClB,MAAM,cAAc,EAAE,KAAK,eAAe,IAAI,EAAE;AAChD,kBAAY,SAAS;MACrB,MAAM,QAAQ,EAAE,KAAK,eAAe,QAAQ,EAAE;AAC9C,YAAM,UAAU,8BAA8B,SAAS,SAAS;AAChE,YAAM,SAAS,SAAS,OAAO,OAAO,SAAS,QAAQ,WAAW,SAAS,MAAM,MAAM;AACvF,YAAM,MAAM,GAAG,OAAO,UAAU;AAChC,YAAM,UAAU;MAChB,MAAM,MAAM,IAAI,MAAM,SAAS,MAAM,OAAO;MAC5C,MAAM,gBAAgB,IAAI,WAAW,IAAI;AACzC,UAAI,cACF,GAAE,KAAK,gBAAgB,IAAI,EAAE;AAE/B,UAAI,KAAK;OACP,MAAM,mBAAmB,EAAE,KAAK,QAAQ,IAAI,EAAE;AAC9C,wBAAiB,WAAW,OAAO,OAAO,KAAK,OAAO,IAAI,EAAE,QAAQ,QAAQ,GAAG;;AAEjF,UAAI,CAAC,cACH,GAAE,KAAK,gBAAgB,IAAI,EAAE;AAC/B,QAAE,MAAM,IAAI;AACZ,kBAAY,IAAI;AAChB,mBAAa;AACb;YAEG;MACH,MAAM,QAAQ,EAAE,KAAK,eAAe,QAAQ,EAAE;AAC9C,YAAM,UAAU,8BAA8B,SAAS,SAAS;AAChE,YAAM,SAAS,SAAS,OAAO,OAAO,SAAS,QAAQ,WAAW,SAAS,MAAM,MAAM;AACvF,YAAM,MAAM,GAAG,OAAO,UAAU;AAChC,YAAM,UAAU;;;AAIpB,gBAAY,SAAS,MAAM;AAC3B,iBAAa;AACb,MAAE,MAAM;;AAGV,OAAI,UAAU;AACZ,QAAI,CAAC,QAAQ;AAEX,SAAI,YAAY,IAAI,OAClB,UAAS,IAAI,MAAM,UAAU,CAAC;AAEhC,OAAE,MAAM,IAAI;UAIZ,GAAE,MAAM;AAGV,WAAO;;;AAIX,SAAO;;CAIT,MAAM,aACJ,OACA,WACA,SACA,WACG;EACH,MAAM,IAAI;EACV,MAAMH,aAAiC;GACrC,CAAC,OAAO,MAAM;GACd,CAAC,KAAM,IAAK;GACZ,CAAC,MAAM,KAAK;GACb;EACD,MAAM,WAAW,EAAE,OAAO,aAAa,EAAE,OAAO;EAChD,MAAM,WAAW,EAAE,IAAI,MAAM,UAAU,EAAE,OAAO,WAAW,CAAC,MAAM;EAClE,IAAI,UAAU;EACd,IAAI,YAAY;EAChB,IAAI,aAAa;AACjB,OAAK,MAAM,CAAC,MAAM,UAAU,WAC1B,KAAI,aAAa,QAAQ,SAAS,WAAW,KAAK,CAChD,KAAI,KAAK,SAAS,IAAI,EACpB;OAAI,SAAS,QAAQ,MAAM,GAAG,KAAK,KAAK;AACtC,QAAI,YAAY,IAAI,SAAS;AAC3B,eAAU;AACV,iBAAY;AACZ,kBAAa;AACb;;AAEF;;SAGC;AACH,aAAU;AACV,eAAY;AACZ,gBAAa;AACb;;AAKN,MAAI,CAAC,QACH,QAAO;AACT,MAAI,OACF,QAAO;AAET,MACE,SAAS,SAAS,WAAW,IAC1B,SAAS,QAAQ,WAAW,GAAG,UAAU,QAC5C;GACA,MAAM,kBAAkB,SAAS,QAAQ,UAAU;GACnD,MAAM,gBAAgB,SAAS,QAC7B,YACA,kBAAkB,UAAU,OAC7B;GACD,MAAMG,YAAU,SAAS,MACvB,kBAAkB,UAAU,QAC5B,cACD;GACD,MAAMC,UAAa,EAAE,KAAK,cAAc,QAAQ,EAAE;AAClD,WAAM,UAAU,8BAA8BD,UAAQ;AACtD,WAAM,SACF,cAAc,OAAO,OAAO,cAAc,MAAM,OAAO;AAC3D,WAAM,MAAM,CAAC,WAAW,YAAY,EAAE;AACtC,WAAM,MAAM,GAAG,YAAYA,YAAU;AACrC,WAAM,QAAQ;AACd,WAAM,UAAU;AAChB,KAAE,OAAO,YAAY;AACrB,UAAO;;EAGT,IAAI,WAAW;EACf,IAAI,UAAU;EACd,IAAI,QAAQ;EAEZ,MAAM,mBACF,aAAa,YAAY,KAAK,SAAS,MAAM,UAAU,OAAO;AAElE,MAAI,iBAAiB,SAAS,WAAW,EAAE;GACzC,MAAM,WAAW,iBAAiB,QAAQ,WAAW;AACrD,aAAU,iBAAiB,MAAM,GAAG,SAAS;AAC7C,WAAQ;AACR,cAAW;SAER;AACH,OAAI,iBACF,WAAU;AAEZ,QAAK,WAAW,YAAY,GAAG,WAAW,SAAS,YAAY;IAC7D,MAAM,YAAY,EAAE,OAAO,YAAY,EAAE,OAAO;IAChD,MAAM,UAAU,EAAE,OAAO;IACzB,MAAM,cAAc,EAAE,IAAI,MAAM,YAAY,GAAG,QAAQ;AACvD,QAAI,YAAY,MAAM,KAAK,YAAY;AACrC,aAAQ;AACR;eAEO,YAAY,SAAS,WAAW,EAAE;AACzC,aAAQ;KACR,MAAM,WAAW,YAAY,QAAQ,WAAW;AAChD,iBAAY,UAAU,OAAO,MAAM,YAAY,MAAM,GAAG,SAAS;AACjE;;AAEF,gBAAY,UAAU,OAAO,MAAM;;;EAIvC,MAAMC,QAAa,EAAE,KAAK,cAAc,QAAQ,EAAE;AAClD,QAAM,UAAU,8BAA8B,QAAQ;AACtD,QAAM,SACF,cAAc,OAAO,OAAO,cAAc,MAAM,OAAO;AAC3D,QAAM,MAAM,GAAG,YAAY,UAAU,QAAQ,WAAW,KAAK,GAAG,OAAO,KAAK;AAC5E,QAAM,MAAM,CAAC,WAAW,WAAW,EAAE;AACrC,QAAM,QAAQ;AACd,QAAM,UAAU,CAAC;AACjB,IAAE,OAAO,WAAW;AACpB,SAAO;;AAOT,IAAG,OAAO,MAAM,OAAO,UAAU,QAAQ,WAAW;AACpD,IAAG,MAAM,MAAM,OAAO,aAAa,cAAc,WAAW,EAC1D,KAAK;EAAC;EAAa;EAAa;EAAc;EAAO,EACtD,CAAC;;;;;AC/hBJ,SAAgB,iBAAiB,IAAgB;CAI/C,MAAM,eACF,GAAG,SAAS,MAAM,SACf,SAAU,QAAiB,KAAa,SAAkB,KAAc,MAAe;EACxF,MAAM,YAAY;EAClB,MAAM,YAAY;AAClB,SAAO,UAAU,cAAc,UAAU,YAAY,WAAW,KAAK,QAAQ,GAAG;;AAGtF,IAAG,SAAS,MAAM,SAChB,QACA,KACA,SACA,KACA,SACG;EACH,MAAM,YAAY;AAIlB,EAHc,UAAU,KAGb,UAAU,WAAW,OAAO;AAEvC,SADuB,aACD,WAAW,KAAK,SAAS,KAAK,KAAK;;AAG3D,IAAG,SAAS,MAAM,QACd,GAAG,SAAS,MAAM,WACb,QAAiB,QAAgB;EAGpC,MAAM,aAFY,OACM;EAExB,MAAM,OAAO,OAAO,WAAW,QAAQ,GAAG,CAAC,MAAM;AAKjD,SAAO,eAJW,OACd,YAAY,GAAG,MAAM,WAAY,KAAgB,MAAM,OAAO,CAAC,GAAG,KAClE,GAE4B,UADnB,GAAG,MAAM,WAAW,OAAO,WAAW,WAAW,GAAG,CAAC,CACnB;;;;;;ACrBvD,SAAgB,QAAQ,OAAuB,EAAE,EAAE;CACjD,MAAM,KAAK,IAAI,WAAW;EACxB,MAAM;EACN,SAAS;EACT,aAAa;EACb,QAAQ;EACR,GAAI,KAAK,qBAAqB,EAAE;EACjC,CAAC;AAEF,KAAI,KAAK,cAAc,KAErB,WAAU,IAD6B;EAAE,GAAI,uBAAuB,IAAI,EAAE;EAAG,GAAI,KAAK,eAAe,EAAE;EAAG,CAC1E;AAElC,KAAI,KAAK,oBAAoB,KAC3B,iBAAgB,GAAG;AAKrB,oBAAmB,GAAG;AAGtB,oBAAmB,GAAG;AAEtB,sBAAqB,GAAG;AAExB,kBAAiB,GAAG;AAEpB,qBAAoB,GAAG;AACvB,kBAAiB,GAAG;AACpB,0BAAyB,GAAG;AAE5B,QAAO;;;;;AClDT,SAAgB,mBAAmB,OAAoC;CACrE,MAAM,YAAa,MAAM,QAAQ,EAAE;AACnC,QAAO;EACL,MAAM;EACN,SAAS,UAAU,YAAY;EAC/B,KAAK,UAAU,UAAU,QAAQ;EAClC;;AAGH,SAAgB,wBAAwB,OAA+B;CACrE,MAAM,WAAW;CACjB,MAAM,UAAU,SAAS,UAAU,SAAS,QAAQ,UAAU,GAAG;CACjE,MAAM,UAAU,YAAY,MAAM,YAAY;AAC9C,QAAO;EACL,MAAM;EACN;EACA,KAAK,UAAU,QAAQ;EACxB;;;;;ACjBH,SAAgB,gBAAgB,OAAiC;CAC/D,MAAM,OAAO,OAAO,MAAM,WAAW,GAAG;AAExC,QAAO;EACL,MAAM;EACN;EACA,QAJa,OAAO,MAAM,UAAU,GAAG;EAKvC,KAAK,IAAI,KAAK;EACf;;;;;ACPH,SAAgB,mBACd,QACA,YAIA;CACA,MAAMC,WAAyB,EAAE;CACjC,IAAI,SAAS;CACb,IAAI,IAAI,aAAa;CACrB,MAAMC,cAA+B,EAAE;AAGvC,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,YAAY;AACzD,YAAU,OAAO,OAAO,GAAG,WAAW,GAAG;AACzC,cAAY,KAAK,OAAO,GAAG;AAC3B;;AAIF,UAAS,KAAK,GAAG,kBAAkB,YAAY,CAAC;AAWhD,QAAO;EAAE,MATkB;GACzB,MAAM;GACN;GACA,KAAK,IAAI,OAAO;GACjB;EAKc,WAFG,IAAI,OAAO,SAAS,IAAI,IAAI,OAAO;EAE3B;;;;;AC/B5B,MAAM,yBAAyB;AAE/B,MAAM,uBAAuB;CAAC;CAAS;CAAU;CAAQ;CAAQ;CAAM;AAEvE,MAAM,aAAa;AAEnB,SAAS,iBAAiB,SAAiB;CACzC,MAAMC,OAAiB,EAAE;CACzB,MAAMC,UAAoB,EAAE;AAC5B,MAAK,MAAM,WAAW,QAAQ,MAAM,WAAW,EAAE;EAC/C,MAAM,OAAO;AAEb,MAAI,qBAAqB,MAAK,MAAK,KAAK,WAAW,EAAE,CAAC,CACpD;AAEF,MAAI,KAAK,UAAU,KAAK,KAAK,OAAO,OAAO,KAAK,OAAO,IACrD,MAAK,KAAK,IAAI,KAAK,MAAM,EAAE,GAAG;WAEvB,KAAK,UAAU,KAAK,KAAK,OAAO,OAAO,KAAK,OAAO,IAC1D,SAAQ,KAAK,IAAI,KAAK,MAAM,EAAE,GAAG;OAE9B;AAEH,QAAK,KAAK,KAAK;AACf,WAAQ,KAAK,KAAK;;;AAGtB,QAAO;EACL,UAAU,KAAK,KAAK,KAAK;EACzB,SAAS,QAAQ,KAAK,KAAK;EAC5B;;AAGH,SAAgB,gBAAgB,OAAqC;CACnE,MAAM,SAAS,MAAM,QAAQ,MAAM,IAAI,IAAI,MAAM,IAAI,WAAW;CAChE,MAAM,YAAa,MAAM,QAAQ,EAAE;CACnC,MAAM,SAAS,OAAO,UAAU,WAAW,YAAY,UAAU,SAAS;CAC1E,MAAM,OAAO,OAAO,MAAM,QAAQ,GAAG;CACrC,MAAM,OAAO,KAAK,WAAW,OAAO;CACpC,MAAM,WAAW,cACN;EACL,MAAM,IAAI;EACV,MAAM,KAAK,EAAE,QAAQ,IAAI;AACzB,SAAO,OAAO,KACV,KACA,OAAO,EAAE,MAAM,KAAK,EAAE,IAAI,GAAG;KAC/B,GACJ;CAUJ,IAAI,UAAU,OAAO,MAAM,WAAW,GAAG;AACzC,KAAI,uBAAuB,KAAK,QAAQ,CACtC,WAAU,QAAQ,QAAQ,wBAAwB,GAAG;AAEvD,KAAI,MAAM;EACR,MAAM,EAAE,UAAU,YAAY,iBAAiB,QAAQ;AAEvD,SAAO;GACL,MAAM;GACN;GACA,MAAM,OAAO,WAAW,GAAG;GAC3B,KAAK,OAAO,WAAW,GAAG;GAC1B;GACA,SAAS,WAAW,OAAO,QAAQ,WAAW,QAAQ,OAAO,CAAC;GAC9D,cAAc;GACd,aAAa;GACd;;AAGH,QAAO;EACL,MAAM;EACN;EACA,MAAM,OAAO,WAAW,GAAG;EAC3B,KAAK,OAAO,WAAW,GAAG;EAC1B;EACA,SAAS,WAAW,OAAO,QAAQ,WAAW,QAAQ,OAAO,CAAC;EAC/D;;;;;ACpFH,SAAgB,sBACd,OACuB;CACvB,MAAM,YAAa,MAAM,QAAQ,EAAE;AACnC,QAAO;EACL,MAAM;EACN,IAAI,OAAO,UAAU,SAAS,GAAG;EACjC,KAAK,KAAK,OAAO,UAAU,SAAS,GAAG,CAAC;EACzC;;;;;ACRH,SAAgB,sBAAqC;AACnD,QAAO;EACL,MAAM;EACN,KAAK;EACN;;;;;ACHH,SAAgB,oBACd,QACA,YAIA;CACA,MAAMC,WAAyB,EAAE;CACjC,IAAI,WAAW;CACf,IAAI,IAAI,aAAa;CACrB,MAAMC,cAA+B,EAAE;AAGvC,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,cAAc;AAC3D,cAAY,OAAO,OAAO,GAAG,WAAW,GAAG;AAC3C,cAAY,KAAK,OAAO,GAAG;AAC3B;;AAIF,UAAS,KAAK,GAAG,kBAAkB,YAAY,CAAC;AAWhD,QAAO;EAAE,MATmB;GAC1B,MAAM;GACN;GACA,KAAK,KAAK,SAAS;GACpB;EAKc,WAFG,IAAI,OAAO,SAAS,IAAI,IAAI,OAAO;EAE3B;;;;;AC/B5B,SAAgB,yBAAyB,OAAsB,QAAyB,GAAiC;CACvH,IAAI,OAAO,OAAO,MAAM,WAAW,GAAG,CAAC,MAAM;CAC7C,MAAM,YAAY,OAAO,IAAI;CAC7B,MAAM,aAAa,OAAO,IAAI;CAG9B,MAAM,WAAW,KAAK,MAAM,gBAAgB;CAC5C,MAAM,MAAM,WAAW,SAAS,GAAG,aAAa,GAAG;CAGnD,SAAS,aAAa,MAAc;EAElC,MAAM,IAAI,KAAK,MAAM,8BAA8B;AACnD,SAAO,IAAI,EAAE,KAAK;;AAGpB,KAAI,QAAQ,KAAK;EACf,IAAI,UAAU;AACd,MAAI,CAAC,aAAc,WAAW,SAAS,WAAW,CAAC,cAAc,WAAW,SAAS,kBAAmB,CAAC,UACvG,WAAU;AAEZ,MAAI,WAAW,SAAS,WAAW,YAAY,SAAS,iBAAiB,CAAC,aAAa;GAErF,MAAM,YAAY,KAAK,MAAM,kEAAkE;GAC/F,MAAM,OAAO,YAAa,UAAU,MAAM,UAAU,MAAM,UAAU,KAAM;GAC1E,IAAI,QAAQ,IAAI;AAChB,OAAI,UAAU,SAAS,QAAQ;AAC7B,WAAO,UAAU,SAAS,QAAQ,WAAW,GAAG,IAAI;AAEpD,YAAQ,IAAI;;AAEd,OAAI,YAAY,SAAS,iBAAiB,UAAU,SAAS,OAC3D,SAAQ,IAAI;GAEd,MAAM,QAAQ,QAAQ,QAAQ;AAC9B,UAAO,CACL;IACE,MAAM;IACN,MAAM,OAAO,QAAQ,GAAG;IACxB,OAAO;IACP,MAAM;IACN,UAAU,CACR;KAAE,MAAM;KAAQ,SAAS;KAAO,KAAK;KAAO,CAC7C;IACD;IACA,KAAK;IACN,EACD,MACD;;;AAIL,KAAI,QAAQ,OAAO,QAAQ,OAAO;EAChC,MAAM,QAAQ,aAAa,KAAK,IAAI;AACpC,SAAO,CACL;GACE,MAAM;GACN,UAAU,CACR;IAAE,MAAM;IAAQ,SAAS;IAAO,KAAK;IAAO,CAC7C;GACD,KAAK;GACN,EACD,IAAI,EACL;;AAGH,QAAO,CACL;EACE,MAAM;EACN;EACA,KAAK;EACN,EACD,IAAI,EACL;;;;;AC1EH,SAAgB,gBAAgB,OAAsB,UAAU,OAAkB;CAIhF,IAAI,QAAQ,MAAM,SAAS,EAAE;CAK7B,IAAIC,iBAAsB;AAC1B,MAAK,CAAC,SAAS,MAAM,WAAW,MAAM,MAAM,QAAQ,MAAM,SAAS,CACjE,MAAK,MAAM,SAAS,MAAM,UAAU;EAElC,MAAM,aAAc,OAAe;AACnC,MAAI,MAAM,QAAQ,WAAW,IAAI,WAAW,SAAS,GAAG;AACtD,WAAQ;AACR,oBAAiB;AACjB;;;CAIN,MAAM,MAAM,OAAO,MAAM,MAAK,SAAQ,KAAK,OAAO,MAAM,GAAG,MAAM,GAAG;CACpE,MAAM,UAAU,MAAM,MAAK,SAAQ,KAAK,OAAO,MAAM,GAAG;CAKxD,IAAI,MAAM;AACV,KAAI,WAAW,QAAQ,OAAO,QAAQ,CAAC,SAAS,EAC9C,OAAM,OAAO,QAAQ;UAEd,gBAAgB,WAAW,QAAQ,OAAO,eAAe,QAAQ,CAAC,SAAS,EAClF,OAAM,OAAO,eAAe,QAAQ;UAE7B,MAAM,QAAQ,gBAAgB,SAAS,IAAI,eAAe,SAAS,IAAI,QAG9E,OAAM,OAAO,eAAe,SAAS,GAAG,QAAQ;UAEzC,MAAM,QAAQ,MAAM,SAAS,IAAI,MAAM,SAAS,IAAI,QAC3D,OAAM,OAAO,MAAM,SAAS,GAAG,QAAQ;UAEhC,MAAM,WAAW,QAAQ,OAAO,MAAM,QAAQ,CAAC,SAAS,EAC/D,OAAM,OAAO,MAAM,QAAQ;CAG7B,MAAM,SAAS,MAAM,MAAK,SAAQ,KAAK,OAAO,QAAQ,GAAG,MAAM;CAC/D,MAAM,QAAQ,WAAW,OAAO,OAAO,OAAO,OAAO;CACrD,MAAM,MAAM,OAAO,MAAM,WAAW,GAAG;AAEvC,QAAO;EACL,MAAM;EACN;EACA;EACA;EACA;EACA;EACD;;;;;ACzDH,SAAgB,qBAAqB,OAAsC;CACzE,MAAM,OAAO,OAAO,MAAM,WAAW,GAAG;AACxC,QAAO;EACL,MAAM;EACN;EACA,KAAK;EACN;;;;;ACLH,SAAgB,iBACd,QACA,YAIA;CACA,MAAMC,WAAyB,EAAE;CACjC,IAAI,UAAU;CACd,IAAI,IAAI,aAAa;CACrB,MAAMC,cAA+B,EAAE;AAGvC,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,aAAa;AAC1D,aAAW,OAAO,OAAO,GAAG,WAAW,GAAG;AAC1C,cAAY,KAAK,OAAO,GAAG;AAC3B;;AAIF,UAAS,KAAK,GAAG,kBAAkB,YAAY,CAAC;AAWhD,QAAO;EAAE,MATgB;GACvB,MAAM;GACN;GACA,KAAK,KAAK,OAAO,QAAQ,CAAC;GAC3B;EAKc,WAFG,IAAI,OAAO,SAAS,IAAI,IAAI,OAAO;EAE3B;;;;;AC/B5B,SAAgB,eACd,QACA,YAIA;CAEA,MAAM,QADY,OAAO,YACD,SAAS,EAAE;CACnC,MAAM,OAAO,OAAO,MAAM,MAAK,SAAQ,KAAK,OAAO,OAAO,GAAG,MAAM,GAAG;CACtE,MAAM,SAAS,MAAM,MAAK,SAAQ,KAAK,OAAO,QAAQ,GAAG,MAAM;CAC/D,MAAM,QAAQ,WAAW,OAAO,OAAO,OAAO,OAAO;CAErD,IAAI,IAAI,aAAa;CACrB,MAAMC,aAA8B,EAAE;CACtC,IAAI,UAAU;AAGd,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,cAAc;AAC3D,aAAW,KAAK,OAAO,GAAG;AAC1B;;AAGF,KAAI,OAAO,IAAI,SAAS,aACtB,WAAU;CAIZ,MAAM,WAAW,kBAAkB,WAAW;CAC9C,MAAM,WAAW,SACd,KAAK,SAAS;EACb,MAAM,UAAU;AAChB,MAAI,aAAa,KACf,QAAO,OAAO,QAAQ,WAAW,GAAG;AACtC,SAAO,OAAO,QAAQ,OAAO,GAAG;GAChC,CACD,KAAK,GAAG;AAeX,QAAO;EAAE,MAbc;GACrB,MAAM;GACN;GACA;GACA,MAAM;GACN;GACA,KAAK,OAAO,IAAI,SAAS,IAAI,OAAO,QAAQ,KAAK,MAAM,KAAK,GAAG,GAAG;GAClE;GACD;EAKc,WAFG,IAAI,OAAO,SAAS,IAAI,IAAI,OAAO;EAE3B;;;;;ACnD5B,SAAgB,qBAAqB,OAAsC;AACzE,QAAO;EACL,MAAM;EACN,SAAS,OAAO,MAAM,WAAW,GAAG;EACpC,SAAS,CAAC,CAAC,MAAM;EACjB,KAAK,MAAM;EACZ;;;;;ACNH,SAAgB,oBAAoB,OAAqC;AAGvE,QAAO;EACL,MAAM;EACN,IAJS,OAAO,MAAM,WAAW,GAAG;EAKpC,KAJU,OAAO,MAAM,UAAU,IAAI,MAAM,WAAW,GAAG,GAAG;EAK7D;;;;;ACHH,SAAgB,wBACd,QACA,YAIA;CACA,MAAMC,WAAyB,EAAE;CACjC,IAAI,QAAQ;CACZ,IAAI,IAAI,aAAa;CACrB,MAAMC,cAA+B,EAAE;AAGvC,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,WAAW;AACxD,WAAS,OAAO,OAAO,GAAG,WAAW,GAAG;AACxC,cAAY,KAAK,OAAO,GAAG;AAC3B;;AAIF,UAAS,KAAK,GAAG,kBAAkB,YAAY,CAAC;AAWhD,QAAO;EAAE,MATuB;GAC9B,MAAM;GACN;GACA,KAAK,KAAK,MAAM;GACjB;EAKc,WAFG,IAAI,OAAO,SAAS,IAAI,IAAI,OAAO;EAE3B;;;;;ACnC5B,SAAgB,iBACd,QACA,YACA,KAIA;CACA,MAAMC,WAAyB,EAAE;CACjC,IAAI,aAAa;CACjB,IAAI,IAAI,aAAa;CACrB,MAAMC,cAA+B,EAAE;AAGvC,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,gBAAgB;AAC7D,gBAAc,OAAO,OAAO,GAAG,WAAW,GAAG;AAC7C,cAAY,KAAK,OAAO,GAAG;AAC3B;;AAIF,UAAS,KAAK,GAAG,kBAAkB,aAAa,IAAI,CAAC;AAWrD,QAAO;EAAE,MATgB;GACvB,MAAM;GACN;GACA,KAAK,KAAK,OAAO,WAAW,CAAC;GAC9B;EAKc,WAFG,IAAI,OAAO,SAAS,IAAI,IAAI,OAAO;EAE3B;;;;;AChC5B,SAAgB,oBACd,QACA,YAIA;CACA,MAAMC,WAAyB,EAAE;CACjC,IAAI,UAAU;CACd,IAAI,IAAI,aAAa;CACrB,MAAMC,cAA+B,EAAE;AAGvC,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,aAAa;AAC1D,aAAW,OAAO,OAAO,GAAG,WAAW,GAAG;AAC1C,cAAY,KAAK,OAAO,GAAG;AAC3B;;AAIF,UAAS,KAAK,GAAG,kBAAkB,YAAY,CAAC;CAEhD,MAAM,eAAe,OAAO,OAAO,YAAY,WAAW,GAAG;CAC7D,MAAM,UAAU,WAAW;AAmB3B,QAAO;EAAE,MAlBmB;GAC1B,MAAM;GACN,UAAU,SAAS,SAAS,IACxB,WACA,CACE;IACE,MAAM;IAEN,SAAS;IACT,KAAK;IACN,CACF;GACL,KAAK,IAAI,QAAQ;GAClB;EAKc,WAFG,IAAI,OAAO,SAAS,IAAI,IAAI,OAAO;EAE3B;;;;;AC1C5B,SAAgB,sBACd,QACA,YAIA;CACA,MAAMC,WAAyB,EAAE;CACjC,IAAI,UAAU;CACd,IAAI,IAAI,aAAa;CACrB,MAAMC,cAA+B,EAAE;AAGvC,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,aAAa;AAC1D,aAAW,OAAO,OAAO,GAAG,WAAW,GAAG;AAC1C,cAAY,KAAK,OAAO,GAAG;AAC3B;;AAIF,UAAS,KAAK,GAAG,kBAAkB,YAAY,CAAC;AAqBhD,QAAO;EAAE,MAnBqB;GAC5B,MAAM;GACN,UACE,SAAS,SAAS,IACd,WACA,CACE;IACE,MAAM;IAEN,SAAS,WAAW,OAAO,OAAO,YAAY,WAAW,GAAG;IAC5D,KAAK,WAAW,OAAO,OAAO,YAAY,WAAW,GAAG;IACzD,CACF;GACP,KAAK,IAAI,WAAW,OAAO,OAAO,YAAY,WAAW,GAAG,CAAC;GAC9D;EAKc,WAFG,IAAI,OAAO,SAAS,IAAI,IAAI,OAAO;EAE3B;;;;;AC1C5B,SAAgB,eAAe,OAAgC;CAC7D,MAAM,UAAU,OAAO,MAAM,WAAW,GAAG;AAC3C,QAAO;EACL,MAAM;EACN;EACA,KAAK;EACN;;;;;ACcH,MAAM,iBAAiB;AAGvB,SAAS,aAAa,KAAa;AACjC,QAAO,IAAI,QAAQ,uBAAuB,OAAO;;AAKnD,MAAM,uBAAuB;AAC7B,MAAM,sBAAsB;AAE5B,SAAgB,YAAY,MAAe;AACzC,KAAI,CAAC,KACH,QAAO;AACT,QAAO,qBAAqB,KAAK,KAAK,IAAI,oBAAoB,KAAK,KAAK;;AAI1E,SAAgB,kBAAkB,QAAyB,KAAc,WAAyC;AAChH,KAAI,CAAC,UAAU,OAAO,WAAW,EAC/B,QAAO,EAAE;CAEX,MAAMC,SAAuB,EAAE;CAC/B,IAAIC,kBAAmC;CAEvC,IAAI,IAAI;CAOR,SAAS,uBAAuB;AAC9B,oBAAkB;;CAGpB,SAAS,+BAA+B,SAAiB,OAA+B;AAEtF,MAAI,kBAAkB,KAAK,QAAQ,EAAE;GACnC,IAAI,MAAM,QAAQ,QAAQ,KAAK;AAC/B,OAAI,QAAQ,GACV,OAAM;GACR,MAAM,QAAQ,QAAQ,MAAM,GAAG,IAAI;AACnC,OAAI,MACF,KAAI,iBAAiB;AACnB,oBAAgB,WAAW;AAC3B,oBAAgB,OAAO;UAEpB;AACH,sBAAkB;KAChB,MAAM;KACN,SAAS,OAAO,SAAS,GAAG;KAC5B,KAAK,OAAO,MAAM,WAAW,GAAG;KACjC;AACD,WAAO,KAAK,gBAAgB;;GAIhC,MAAM,EAAE,SAAS,wBAAwB;IACvC;KAAE,MAAM;KAAU,KAAK;KAAK,SAAS;KAAI,QAAQ;KAAK,MAAM;KAAI,MAAM;KAAM;IAC5E;KAAE,MAAM;KAAQ,KAAK;KAAI,SAHE,QAAQ,MAAM,IAAI,CAGU,QAAQ,MAAM,GAAG;KAAE,QAAQ;KAAI,MAAM;KAAI,MAAM;KAAM;IAC5G;KAAE,MAAM;KAAW,KAAK;KAAK,SAAS;KAAI,QAAQ;KAAK,MAAM;KAAI,MAAM;KAAM;IAC9E,EAAE,EAAE;AACL,yBAAsB;AACtB,YAAS,KAAK;AACd;AACA,UAAO;;AAIT,MAAI,OAAO,KAAK,QAAQ,EAAE;GACxB,MAAM,UAAU,QAAQ,QAAQ,KAAK;GACrC,MAAM,aAAa,UAAU,KAAK,QAAQ,MAAM,GAAG,QAAQ,GAAG;AAC9D,OAAI,WACF,UAAS,YAAY,WAAW;AAGlC,OAAI,YAAY,IAAI;AAClB;AACA,WAAO;;GAIT,MAAM,OAAO,eAAe,KAAK,QAAQ;GACzC,IAAI,QAAQ;GACZ,IAAI,QAAQ;AACZ,OAAI,QAAQ,OAAO,KAAK,UAAU,UAAU;AAC1C,YAAQ,KAAK;AACb,YAAQ,QAAQ,MAAM,KAAK,QAAQ,KAAK,GAAG,OAAO;UAE/C;AAEH,YAAQ,QAAQ,MAAM,UAAU,EAAE;AAClC,YAAQ;;GAGV,MAAM,EAAE,SAAS,iBAAiB;IAChC;KAAE,MAAM;KAAe,KAAK;KAAU,SAAS;KAAI,QAAQ;KAAK,MAAM;KAAI,MAAM;KAAM;IACtF;KAAE,MAAM;KAAQ,KAAK;KAAI,SAAS;KAAO,QAAQ;KAAI,MAAM;KAAI,MAAM;KAAM;IAC3E;KAAE,MAAM;KAAgB,KAAK;KAAU,SAAS;KAAI,QAAQ;KAAK,MAAM;KAAI,MAAM;KAAM;IACxF,EAAE,GAAG,IAAI;AAEV,yBAAsB;AACtB,YAAS,KAAK;AAEd,OAAI,OAAO;AACT,gBAAY;KACV,MAAM;KACN,SAAS;KACT,KAAK;KACN,CAAC;AACF;;AAGF;AACA,UAAO;;AAIT,MAAI,eAAe,KAAK,QAAQ,EAAE;GAChC,IAAI,MAAM,QAAQ,QAAQ,IAAI;AAC9B,OAAI,QAAQ,GACV,OAAM;GACR,MAAM,QAAQ,QAAQ,MAAM,GAAG,IAAI;AACnC,OAAI,MACF,KAAI,iBAAiB;AACnB,oBAAgB,WAAW;AAC3B,oBAAgB,OAAO;UAEpB;AACH,sBAAkB;KAAE,MAAM;KAAQ,SAAS,OAAO,SAAS,GAAG;KAAE,KAAK,OAAO,MAAM,WAAW,GAAG;KAAE;AAClG,WAAO,KAAK,gBAAgB;;GAIhC,MAAM,EAAE,SAAS,mBAAmB;IAClC;KAAE,MAAM;KAAW,KAAK;KAAM,SAAS;KAAI,QAAQ;KAAK,MAAM;KAAI,MAAM;KAAM;IAC9E;KAAE,MAAM;KAAQ,KAAK;KAAI,SAHH,QAAQ,MAAM,IAAI,CAGU,QAAQ,OAAO,GAAG;KAAE,QAAQ;KAAI,MAAM;KAAI,MAAM;KAAM;IACxG;KAAE,MAAM;KAAY,KAAK;KAAM,SAAS;KAAI,QAAQ;KAAK,MAAM;KAAI,MAAM;KAAM;IAChF,EAAE,EAAE;AACL,yBAAsB;AACtB,YAAS,KAAK;AACd;AACA,UAAO;;AAGT,SAAO;;CAGT,SAAS,wBAAwB,SAAiB,QAAgC;AAChF,MAAI,CAAC,SAAS,KAAK,QAAQ,CACzB,QAAO;AAGT,wBAAsB;EACtB,MAAM,aAAa,QAAQ,QAAQ,IAAI;EACvC,MAAM,WAAW,QAAQ,QAAQ,KAAK,aAAa,EAAE;EACrD,MAAM,QAAQ,QAAQ,MAAM,GAAG,WAAW;EAC1C,MAAM,cAAc,aAAa,KAAK,QAAQ,MAAM,WAAW,GAAG,QAAQ,MAAM,YAAY,SAAS;EACrG,MAAM,QAAQ,aAAa,KAAK,KAAK,QAAQ,MAAM,WAAW,EAAE;AAChE,MAAI,MAKF,KAAI,CAFY,+BAA+B,OAAO,OAAO,CAG3D,UAAS,OAAO,MAAM;MAGtB;EAIJ,MAAM,OAAO,YAAY,QAAQ,MAAM,GAAG;AAC1C,aAAW;GACT,MAAM;GACN;GACA,KAAK,OAAO,QAAQ,GAAG;GACxB,CAAe;AAGhB,MAAI,OAAO;AACT,eAAY;IACV,MAAM;IACN,SAAS;IACT,KAAK,OAAO,SAAS,GAAG;IACzB,CAAC;AACF;aAEO,aAAa,IAGpB;OADkB,OAAO,IAAI,IACd;IACb,IAAI,aAAa;AACjB,SAAK,IAAI,IAAI,IAAI,GAAG,IAAI,OAAO,QAAQ,IACrC,eAAc,QAAS,OAAO,GAAG,WAAW,OAAO,OAAO,GAAG,UAAU,IAAK;AAE9E,QAAI,OAAO,SAAS;AACpB,gBAAY;KACV,MAAM;KACN,SAAS;KACT,KAAK,OAAO,cAAc,GAAG;KAC9B,CAAC;;;AAGN;AACA,SAAO;;CAGT,SAAS,WAAW,MAAkB;AAEpC,wBAAsB;AACtB,SAAO,KAAK,KAAK;;CAGnB,SAAS,UAAU,OAAsB;AAEvC,wBAAsB;AACtB,SAAO,KAAK,MAAoB;;CAIlC,SAAS,SAAS,MAAkB;AAClC,aAAW,KAAK;;CAGlB,SAAS,SAAS,SAAiB,OAAc;AAC/C,MAAI,iBAAiB;AACnB,mBAAgB,WAAW;AAC3B,mBAAgB,OAAOC,SAAO;SAE3B;AACH,qBAAkB;IAChB,MAAM;IACN,SAAS,OAAO,WAAW,GAAG;IAC9B,KAAK,OAAOA,SAAO,WAAW,GAAG;IAClC;AACD,UAAO,KAAK,gBAAgB;;;AAIhC,QAAO,IAAI,OAAO,QAAQ;EACxB,MAAM,QAAQ,OAAO;AACrB,cAAY,MAAM;;CAGpB,SAAS,YAAY,OAAsB;AACzC,UAAQ,MAAM,MAAd;GACE,KAAK;AACH,oBAAgB,MAAM;AACtB;GAGF,KAAK;AACH,QAAI,iBAAiB;AAEnB,qBAAgB,WAAW;AAC3B,qBAAgB,OAAO;;AAGzB;AACA;GAEF,KAAK;AACH,aAAS,qBAAqB,MAAM,CAAC;AACrC;AACA;GACF,KAAK,eAAe;IAClB,MAAM,CAAC,MAAM,SAAS,yBAAyB,OAAO,QAAQ,EAAE;AAChE,aAAS,KAAK;AACd,QAAI;AACJ;;GAGF,KAAK;AACH,mBAAe,MAAM;AACrB;GAGF,KAAK;AACH,0BAAsB;AACtB,aAAS,gBAAgB,MAAM,CAAC;AAChC;AACA;GAEF,KAAK,eAAe;AAClB,0BAAsB;IACtB,MAAM,EAAE,MAAM,cAAc,iBAAiB,QAAQ,GAAG,MAAM,QAAQ;AACtE,aAAS,KAAK;AACd,QAAI;AACJ;;GAGF,KAAK,WAAW;AACd,0BAAsB;IACtB,MAAM,EAAE,MAAM,cAAc,mBAAmB,QAAQ,EAAE;AACzD,aAAS,KAAK;AACd,QAAI;AACJ;;GAGF,KAAK,UAAU;AACb,0BAAsB;IACtB,MAAM,EAAE,MAAM,cAAc,wBAAwB,QAAQ,EAAE;AAC9D,aAAS,KAAK;AACd,QAAI;AACJ;;GAGF,KAAK,aAAa;AAChB,0BAAsB;IACtB,MAAM,EAAE,MAAM,cAAc,oBAAoB,QAAQ,EAAE;AAC1D,aAAS,KAAK;AACd,QAAI;AACJ;;GAGF,KAAK,YAAY;AACf,0BAAsB;IACtB,MAAM,EAAE,MAAM,cAAc,iBAAiB,QAAQ,EAAE;AACvD,aAAS,KAAK;AACd,QAAI;AACJ;;GAGF,KAAK,YAAY;AACf,0BAAsB;IACtB,MAAM,EAAE,MAAM,cAAc,oBAAoB,QAAQ,EAAE;AAC1D,aAAS,KAAK;AACd,QAAI;AACJ;;GAGF,KAAK,YAAY;AACf,0BAAsB;IACtB,MAAM,EAAE,MAAM,cAAc,sBAAsB,QAAQ,EAAE;AAC5D,aAAS,KAAK;AACd,QAAI;AACJ;;GAGF,KAAK;AACH,0BAAsB;AACtB,aAAS;KACP,MAAM;KACN,UAAU,CACR;MACE,MAAM;MACN,SAAS,OAAO,MAAM,WAAW,GAAG;MACpC,KAAK,OAAO,MAAM,WAAW,GAAG;MACjC,CACF;KACD,KAAK,IAAI,OAAO,MAAM,WAAW,GAAG,CAAC;KACtC,CAAC;AACF;AACA;GAEF,KAAK;AACH,0BAAsB;AACtB,aAAS;KACP,MAAM;KACN,UAAU,CACR;MACE,MAAM;MACN,SAAS,OAAO,MAAM,WAAW,GAAG;MACpC,KAAK,OAAO,MAAM,WAAW,GAAG;MACjC,CACF;KACD,KAAK,IAAI,OAAO,MAAM,WAAW,GAAG,CAAC;KACtC,CAAC;AACF;AACA;GAEF,KAAK,SAAS;AACZ,0BAAsB;IACtB,MAAM,WAAW,OAAO,IAAI;AAC5B,QAAI,UAAU,SAAS,UAAU,QAAQ,KAAK,OAAO,SAAS,WAAW,GAAG,CAAC,CAE3E,UAAS,IAAI,GAAG;QAGhB,UAAS,gBAAgB,MAAM,CAAC;AAElC;AACA;;GAEF,KAAK;AACH,0BAAsB;AACtB,aAAS,mBAAmB,MAAM,CAAC;AACnC;AACA;GACF,KAAK;AACH,0BAAsB;AACtB,aAAS,wBAAwB,MAAM,CAAC;AACxC;AACA;GACF,KAAK;AACH,0BAAsB;AACtB,aAAS,sBAAsB,MAAM,CAAC;AACtC;AACA;GAEF,KAAK;AACH,0BAAsB;AACtB,aAAS,qBAAqB,CAAC;AAC/B;AACA;GAEF,KAAK;AACH,0BAAsB;AAEtB,aAAS,gBAAgB,OAAO,GAAG,CAAC;AACpC;AACA;GAGF,KAAK;AACH,0BAAsB;AACtB,aAAS,qBAAqB,MAAM,CAAC;AACrC;AACA;GAGF,KAAK;AACH,oBAAgB,MAAM;AACtB;GAGF;AAEE,cAAU,MAAM;AAChB;AACA;;;CAIN,SAAS,gBAAgB,OAAsB;EAE7C,IAAI,QAAQ,OAAO,SAAS;EAC5B,IAAI,UAAU,OAAO,MAAM,WAAW,GAAG,CAAC,QAAQ,OAAO,GAAG;AAC5D,MAAI,QAAQ,WAAW,IAAI,IAAI,OAAO,OAAO,SAAS,IAAI,SAAS,OACjE,WAAU,QAAQ,MAAM,EAAE;AAG5B,MAAI,QAAQ,SAAS,YAAY,IAAI,CAAC,KAAK,SAAS,YAAY,CAC9D,WAAU,QAAQ,MAAM,GAAG,GAAG;AAEhC,SAAY,SAAS,GAAG,SAAS;GAC/B,MAAM,OAAO,OAAO;AACpB,OAAI,KAAK,SAAS,QAAQ;AACxB,sBAAkB;AAClB,cAAU,KAAK,UAAU;AACzB;;AAEF;;AAGF,MAAI,QAAQ,OAAO,SAAS,EAC1B,QAAO,OAAO,QAAQ,EAAE;EAE1B,MAAM,YAAY,OAAO,IAAI;AAC7B,MAAI,WAAW,SAAS,oBAAoB,OAAO,KAAK,QAAQ,EAAE;AAChE;AACA;;AAEF,MAAI,YAAY,OAAO,YAAY,OAAO,YAAY,OAAO,QAAQ,KAAK,QAAQ,EAAE;AAClF;AACA;;AAEF,MAAI,CAAC,aAAa,iBAAiB,KAAK,QAAQ,CAC9C,WAAU,QAAQ,QAAQ,UAAU,GAAG;AAEzC,MAAI,mBAAmB,QAAQ,CAC7B;EACF,MAAM,WAAW,OAAO,IAAI;AAC5B,MAAK,YAAY,OAAO,CAAC,WAAW,QAAQ,SAAS,IAAI,IAAM,YAAY,OAAO,CAAC,SAAS,QAAQ,SAAS,IAAI,EAAG;AAClH;AACA;;AAEF,MAAI,wBAAwB,SAAS,MAAM,CACzC;AACF,MAAI,+BAA+B,SAAS,MAAM,CAChD;AACF,MAAI,yBAAyB,SAAS,MAAM,CAC1C;EAEF,MAAM,WAAW,eAAe;GAAE,GAAG;GAAO;GAAS,CAAC;AAEtD,MAAI,wBAAwB,SAAS,MAAM,CACzC;AACF,MAAI,iBAAiB;AAEnB,mBAAgB,WAAW,SAAS,QAAQ,QAAQ,gBAAgB,GAAG;AACvE,mBAAgB,OAAO,SAAS;SAE7B;GACH,MAAM,YAAY,UAAU,QAAQ,QAAQ,OAAO,IAAI,IAAI,YAAY;AAGvE,OAAI,CADc,OAAO,IAAI,GAE3B,UAAS,UAAU,SAAS,QAAQ,QAAQ,gBAAgB,GAAG;AAEjE,qBAAkB;AAClB,mBAAgB,SAAS;AACzB,UAAO,KAAK,gBAAgB;;AAE9B;;CAGF,SAAS,eAAe,OAAsB;AAE5C,wBAAsB;EACtB,MAAM,OAAO,MAAM,OAAO,MAAM,CAAC,UAAU,SAAS,OAAO,GAAG;AAG9D,MAAI,OAAO,OAAO,IAAI,GAAG,SAAS,QAAQ;GACxC,MAAM,OAAO,OAAO,OAAO,IAAI,IAAI,WAAW,GAAG;GACjD,MAAM,UAAU,aAAa,KAAK;AAElC,OAAI,kBADQ,IAAI,OAAO,MAAM,QAAQ,SAAS,EACrC,KAAK,IAAI,EAAE;IAIlB,MAAMC,aAAW,MAAM,OAAO,MAAM,CAAC,UAAU,SAAS,OAAO,GAAG,MAAM;AAIxE,QADoB,CAAC,IAAI,SAAS,IAAI,IAAK,YAAY,OAAOA,WAAS,CAAC,EACxD;AACd,2BAAsB;AAWtB,gBAVa;MACX,MAAM;MACN,MAAM,OAAOA,WAAS;MACtB,OAAO;MACP;MACA,UAAU,CACR;OAAE,MAAM;OAAQ,SAAS;OAAM,KAAK;OAAM,CAC3C;MACD,SAAS;MACV,CACe;AAChB,UAAK;AACL;;AAGF,aAAS,MAAM,KAAK;AACpB,SAAK;AACL;;;AAGJ,MAAI,OAAO,MAAM;GACf,MAAM,8BAAc,IAAI,OAAO,UAAU,aAAa,KAAK,CAAC,SAAS;GACrE,MAAM,MAAM,OAAO,SAAS,IAAI,OAAO,OAAO,SAAS,KAAK;GAC5D,MAAM,UAAU,CAAC,YAAY,KAAK,IAAI;AACtC,OAAI,WAAW,KAAK;IAClB,IAAI,UAAU;AACd,QAAI,KACF;SAAI,IAAI,SAAS,OACf,WAAU,OAAQ,IAAiB,QAAQ,GAAG;cACvC,IAAI,SAAS,OACpB,WAAU,OAAQ,IAAiB,WAAW,GAAG;cACxC,IAA8B,WAAY,OAAQ,IAA8B,YAAY,SACrG,WAAU,OAAQ,IAA6B,WAAW,GAAG,CAAC,MAAM,GAAG,GAAG;;AAG9E,yBADoB,IAAI,OAAO,MAAM,aAAa,QAAQ,CAAC,YAAY,EACvD,KAAK,IAAI,EAAE;KACzB,MAAM,OAAO,OAAO,WAAW,GAAG;AAClC,2BAAsB;KACtB,MAAMC,SAAO;MACX,MAAM;MACN,MAAM;MACN,OAAO;MACP;MACA,UAAU,CACR;OAAE,MAAM;OAAQ,SAAS;OAAM,KAAK;OAAM,CAC3C;MACD;MACD;AACD,YAAO,OAAO,OAAO,SAAS,GAAG,GAAGA,OAAK;AACzC,UAAK;AACL,SAAI,OAAO,OAAO,IAAI,WAAW,GAAG,KAAK,IACvC;AACF;;;;EAIN,MAAM,EAAE,MAAM,cAAc,eAAe,QAAQ,EAAE;AACrD,MAAI;EAIJ,MAAM,WAAW,MAAM,OAAO,MAAM,CAAC,UAAU,SAAS,OAAO,GAAG;EAClE,MAAM,UAAU,OAAO,YAAY,GAAG;AAMtC,MAAI,OAAO,SAAS;GAIlB,MAAM,UAAU,IAAI,QAAQ,KAAK;AACjC,OAAI,YAAY,IAAI,QAIf;IACH,MAAM,WAAW,IAAI,QAAQ,KAAK,UAAU,EAAE;AAC9C,QAAI,aAAa,GACf,MAAK,UAAU;aAIA,IAAI,MAAM,UAAU,GAAG,SAAS,CACpC,SAAS,QAAQ,CAC1B,MAAK,UAAU;QAEf,MAAK,UAAU;;;AAIvB,aAAW,KAAK;;CAGlB,SAAS,gBAAgB,OAAsB;AAE7C,wBAAsB;EACtB,MAAM,YAAY,OAAO,IAAI;EAC7B,MAAM,WAAW,OAAO,IAAI;EAC5B,MAAM,YAAY,OAAO,OAAO,SAAS;EAEzC,MAAM,8BAA8B,WAAW,SAAS,UAAU,CAAG,OAAO,UAAU,WAAW,GAAG,CAAE,WAAW,IAAI;EACrH,MAAM,oCAAoC,UAAU,SAAS,UAAU,YAAY,KAAK,OAAO,SAAS,WAAW,GAAG,CAAC;AAEvH,MAAI,+BAA+B,kCACjC,UAAS,oBAAoB,MAAM,CAAC;WAE7B,aAAa,UAAU,SAAS,OACvC,WAAU,UAAU,OAAO,MAAM,UAAU,GAAG,GAAG,OAAO,UAAU,WAAW,GAAG;WAEzE,aAAa,UAAU,SAAS,QAAQ;AAC/C,aAAU,UAAU,OAAO,UAAU,WAAW,GAAG,GAAG,OAAO,MAAM,UAAU,GAAG;AAChF,aAAU,MAAM,OAAO,UAAU,OAAO,GAAG,GAAG,OAAO,MAAM,UAAU,GAAG;;AAE1E;;CAGF,SAAS,wBAAwB,SAAiB,QAAgC;EAChF,MAAM,YAAY,QAAQ,QAAQ,IAAI;AACtC,MAAI,cAAc,GAChB,QAAO;EAET,IAAI,kBAAkB,QAAQ,MAAM,GAAG,UAAU;EACjD,MAAM,UAAU,QAAQ,QAAQ,MAAM,UAAU;AAChD,MAAI,YAAY,IAAI;GAClB,MAAM,YAAY,OAAO,IAAI;GAC7B,IAAI,OAAO,QAAQ,MAAM,YAAY,GAAG,QAAQ;AAChD,OAAI,KAAK,SAAS,IAAI,EAAE;IACtB,MAAM,kBAAkB,KAAK,QAAQ,IAAI;AAEzC,uBAAmB,QAAQ,MAAM,GAAG,YAAY,kBAAkB,EAAE;IACpE,MAAM,eAAe,YAAY,kBAAkB;AACnD,WAAO,QAAQ,MAAM,eAAe,GAAG,QAAQ;;GAEjD,MAAM,YAAY,OAAO,IAAI;AAC7B,OAAI,QAAQ,SAAS,KAAK,IAAI,WAAW,SAAS,eAAe,WAAW;IAC1E,MAAM,OAAO,OAAO,IAAI;IACxB,IAAI,QAAQ;IACZ,IAAIC,YAAU;AACd,QAAI,MAAM,SAAS,UAAU,KAAK,YAAY,KAAK;AACjD;AACA,iBAAU;eAEH,MAAM,SAAS,UAAU,KAAK,YAAY,IACjD;AAGF,QAAI,gBACF,UAAS,iBAAiB,gBAAgB;AAE5C,eAAW;KACT,MAAM;KACN,MAAM,OAAO,UAAU,WAAW,GAAG;KACrC,OAAO;KACP;KACA,UAAU,CAAC;MAAE,MAAM;MAAQ,SAAS;MAAM,KAAK;MAAM,CAAC;KACtD;KACD,CAAe;AAChB,SAAK;AACL,WAAO;;GAGT,MAAM,iBAAiB,QAAQ,QAAQ,KAAK,QAAQ;GACpD,MAAM,OAAO,mBAAmB,KAAK,QAAQ,MAAM,UAAU,GAAG,eAAe,GAAG;GAClF,MAAM,UAAU,mBAAmB;AAEnC,OAAI,gBACF,UAAS,iBAAiB,gBAAgB;AAE5C,cAAW;IACT,MAAM;IACN;IACA,OAAO;IACP;IACA,UAAU,CAAC;KAAE,MAAM;KAAQ,SAAS;KAAM,KAAK;KAAM,CAAC;IACtD;IACD,CAAe;GAEhB,MAAM,YAAY,mBAAmB,KAAK,QAAQ,MAAM,iBAAiB,EAAE,GAAG;AAC9E,OAAI,WAAW;AACb,gBAAY;KAAE,MAAM;KAAQ,SAAS;KAAW,KAAK;KAAW,CAA6B;AAC7F;;AAEF;AACA,UAAO;;AAGT,SAAO;;CAGT,SAAS,yBAAyB,SAAiB,OAA+B;EAChF,MAAM,aAAa,QAAQ,QAAQ,KAAK;AACxC,MAAI,eAAe,GACjB,QAAO;EAET,MAAM,kBAAkB,QAAQ,MAAM,GAAG,WAAW;AACpD,MAAI,CAAC,gBACH,mBAAkB;GAChB,MAAM;GACN,SAAS;GACT,KAAK;GACN;MAGD,iBAAgB,WAAW;AAE7B,SAAO,KAAK,gBAAgB;AAC5B,oBAAkB;AAClB,aAAW,gBAAgB,OAAO,KAAK,CAAe;AACtD;AACA,SAAO;;CAGT,SAAS,mBAAmB,SAA0B;AAEpD,MAAI,EAAE,SAAS,WAAW,IAAI,IAAI,WAAW,SAAS,kBACpD,QAAO;EAGT,MAAM,IADW,QAAQ,MAAM,EAAE,CACd,MAAM,UAAU;AACnC,MAAI,MAAM,MAAM;AACd;AACA,UAAO;;AAGT,MAAI,KAAK,KAAK,KAAK,EAAE,GAAG,EAAE;GACxB,MAAM,UAAU,EAAE,OAAO,OAAO,EAAE,OAAO;AACzC,cAAW;IACT,MAAM;IACN;IACA,KAAK,UAAU,QAAQ;IACxB,CAAe;AAChB;AACA,UAAO;;AAGT,SAAO;;AAGT,QAAO;;;;;ACpxBT,SAAgB,gBACd,QACA,OAC0B;CAC1B,MAAMC,qBAAmC,EAAE;CAC3C,IAAI,IAAI,QAAQ;AAGhB,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,mBAC7C,KAAI,OAAO,GAAG,SAAS,kBAAkB;EACvC,MAAM,eAAe,OAAO,IAAI;AAChC,qBAAmB,KAAK;GACtB,MAAM;GACN,UAAU,kBAAkB,aAAa,YAAY,EAAE,CAAC;GACxD,KAAK,OAAO,aAAa,WAAW,GAAG;GACxC,CAAC;AACF,OAAK;YAGL,OAAO,GAAG,SAAS,sBAChB,OAAO,GAAG,SAAS,qBACtB;EAEA,MAAM,CAAC,UAAU,YAAY,UAAU,QAAQ,EAAE;AACjD,qBAAmB,KAAK,SAAS;AACjC,MAAI;OAGJ;AAUJ,QAAO,CANgC;EACrC,MAAM;EACN,UAAU;EACV,KAAK,mBAAmB,KAAI,UAAS,MAAM,IAAI,CAAC,KAAK,KAAK;EAC3D,EAEuB,IAAI,EAAE;;;;;ACvChC,SAAgB,eAAe,OAAqC;AAIlE,KAAI,MAAM,MAAM,WAAW,OAAO,CAChC,QAAO,gBAAgB,MAAM;CAG/B,MAAM,aAAa,OAAO,MAAM,WAAW,GAAG;CAC9C,MAAM,QAAQ,WAAW,MAAM,yCAAyC;AACxE,KAAI,QAAQ,GAGV,OAAM,UAAU,WACb,QAAQ,uBAAuB,GAAG,CAClC,QAAQ,oBAAoB,GAAG;CAEpC,MAAM,SAAS,MAAM,QAAQ,MAAM,IAAI,IAAI,MAAM,IAAI,WAAW;AAChE,QAAO;EACL,MAAM;EACN,UAAU,QAAQ,MAAM,KAAK,OAAO,MAAM,QAAQ,GAAG;EACrD,MAAM,OAAO,MAAM,WAAW,GAAG;EACjC,KAAK,OAAO,MAAM,WAAW,GAAG;EAChC,SAAS,CAAC;EACX;;;;;ACnBH,SAAgB,oBACd,QACA,OAC8B;CAC9B,MAAMC,QAA8B,EAAE;CACtC,IAAI,IAAI,QAAQ;CAChB,IAAIC,YAA0B,EAAE;CAChC,IAAIC,kBAAgC,EAAE;AAEtC,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,WAC7C,KAAI,OAAO,GAAG,SAAS,WAAW;EAEhC,MAAM,YAAY,OAAO,IAAI;AAC7B,cAAY,kBAAkB,UAAU,YAAY,EAAE,CAAC;AACvD,OAAK;YAEE,OAAO,GAAG,SAAS,WAAW;EAErC,IAAI,IAAI,IAAI;AACZ,oBAAkB,EAAE;AAEpB,SAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,WAC7C,KAAI,OAAO,GAAG,SAAS,kBAAkB;GACvC,MAAM,eAAe,OAAO,IAAI;AAChC,mBAAgB,KAAK;IACnB,MAAM;IACN,UAAU,kBAAkB,aAAa,YAAY,EAAE,EAAE,OAAO,aAAa,WAAW,GAAG,CAAC;IAC5F,KAAK,OAAO,aAAa,WAAW,GAAG;IACxC,CAAC;AACF,QAAK;QAGL;AAKJ,MAAI,UAAU,SAAS,GAAG;AACxB,SAAM,KAAK;IACT,MAAM;IACN,MAAM;IACN,YAAY;IACZ,KAAK,GAAG,UAAU,KAAI,SAAQ,KAAK,IAAI,CAAC,KAAK,GAAG,CAAC,IAAI,gBAClD,KAAI,QAAO,IAAI,IAAI,CACnB,KAAK,KAAK;IACd,CAAC;AAGF,eAAY,EAAE;;AAGhB,MAAI,IAAI;OAGR;AAUJ,QAAO,CANwC;EAC7C,MAAM;EACN;EACA,KAAK,MAAM,KAAI,SAAQ,KAAK,IAAI,CAAC,KAAK,KAAK;EAC5C,EAE2B,IAAI,EAAE;;;;;ACrEpC,SAAgB,cACd,QACA,OACwB;CAExB,MAAM,OADQ,OAAO,OACD,QAAQ,EAAE;CAC9B,MAAM,KAAK,OAAO,MAAM,SAAS,IAAI;CACrC,MAAMC,mBAAiC,EAAE;CACzC,IAAI,IAAI,QAAQ;AAEhB,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,iBAC7C,KAAI,OAAO,GAAG,SAAS,kBAAkB;EACvC,MAAM,eAAe,OAAO,IAAI;AAChC,mBAAiB,KAAK;GACpB,MAAM;GACN,UAAU,kBAAkB,aAAa,YAAY,EAAE,CAAC;GACxD,KAAK,OAAO,aAAa,WAAW,GAAG;GACxC,CAAC;AACF,OAAK;OAGL;AAWJ,QAAO,CAP4B;EACjC,MAAM;EACN;EACA,UAAU;EACV,KAAK,KAAK,GAAG,KAAK,iBAAiB,KAAI,UAAS,MAAM,IAAI,CAAC,KAAK,KAAK;EACtE,EAEqB,IAAI,EAAE;;;;;AChC9B,SAAgB,aACd,QACA,OACa;CACb,MAAM,QAAQ,OAAO;CACrB,MAAM,WAAW,OAAO,MAAM,KAAK,UAAU,EAAE,IAAI,IAAI;CACvD,MAAM,eAAe,OAAO,SAAS,UAAU,GAAG;CAClD,MAAM,sBAAsB,OAAO,QAAQ;CAC3C,MAAM,iBAAiB,OAAO,oBAAoB,WAAW,GAAG;AAEhE,QAAO;EACL,MAAM;EACN,OAAO;EACP,MAAM;EACN,UAAU,kBAAkB,oBAAoB,YAAY,EAAE,CAAC;EAC/D,KAAK;EACN;;;;;AChBH,SAAgB,eAAe,OAAqC;AAClE,QAAO;EACL,MAAM;EACN,SAAS,OAAO,MAAM,WAAW,GAAG;EACpC,SAAS,CAAC,CAAC,MAAM;EACjB,KAAK,OAAO,MAAM,OAAO,GAAG;EAC7B;;;;;ACDH,SAAgB,WACd,QACA,OACqB;CACrB,IAAI,IAAI,QAAQ;CAChB,IAAIC,YAAiC;CACrC,MAAMC,OAAuB,EAAE;CAC/B,IAAI,WAAW;AAEf,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,cAC7C,KAAI,OAAO,GAAG,SAAS,cAAc;AACnC,aAAW;AACX;YAEO,OAAO,GAAG,SAAS,eAAe;AACzC,aAAW;AACX;YAGA,OAAO,GAAG,SAAS,gBAChB,OAAO,GAAG,SAAS,cAEtB;UAEO,OAAO,GAAG,SAAS,WAAW;EACrC,MAAMC,QAAyB,EAAE;EACjC,IAAI,IAAI,IAAI;AAEZ,SAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,WAC7C,KAAI,OAAO,GAAG,SAAS,aAAa,OAAO,GAAG,SAAS,WAAW;GAChE,MAAM,eAAe,OAAO,GAAG,SAAS;GACxC,MAAM,eAAe,OAAO,IAAI;GAChC,MAAM,UAAU,OAAO,aAAa,WAAW,GAAG;AAElD,SAAM,KAAK;IACT,MAAM;IACN,QAAQ,gBAAgB;IACxB,UAAU,kBAAkB,aAAa,YAAY,EAAE,EAAE,QAAQ;IACjE,KAAK;IACN,CAAC;AAEF,QAAK;QAGL;EAIJ,MAAMC,UAAwB;GAC5B,MAAM;GACN;GACA,KAAK,MAAM,KAAI,SAAQ,KAAK,IAAI,CAAC,KAAK,IAAI;GAC3C;AAED,MAAI,SACF,aAAY;MAGZ,MAAK,KAAK,QAAQ;AAGpB,MAAI,IAAI;OAGR;AAIJ,KAAI,CAAC,UAEH,aAAY;EACV,MAAM;EACN,OAAO,EAAE;EACT,KAAK;EACN;AAWH,QAAO,CARsB;EAC3B,MAAM;EACN,QAAQ;EACR;EACA,SAAS,OAAO,OAAO,WAAW;EAClC,KAAK,CAAC,WAAW,GAAG,KAAK,CAAC,KAAI,QAAO,IAAI,IAAI,CAAC,KAAK,KAAK;EACzD,EAEkB,IAAI,EAAE;;;;;AC3F3B,SAAgB,qBAAwC;AACtD,QAAO;EACL,MAAM;EACN,KAAK;EACN;;;;;ACYH,SAAgB,UACd,QACA,OACoB;CACpB,MAAM,QAAQ,OAAO;CACrB,MAAMC,YAA4B,EAAE;CACpC,IAAI,IAAI,QAAQ;AAEhB,QACE,IAAI,OAAO,UACR,OAAO,GAAG,SAAS,uBACnB,OAAO,GAAG,SAAS,qBAEtB,KAAI,OAAO,GAAG,SAAS,kBAAkB;EAKvC,MAAMC,eAA6B,EAAE;EACrC,IAAI,IAAI,IAAI;AACZ,SAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,kBAE7C,KAAI,OAAO,GAAG,SAAS,kBAAkB;GACvC,MAAM,eAAe,OAAO,IAAI;GAChC,MAAM,WAAW,OAAO,IAAI;GAC5B,MAAM,aAAa,OAAO,aAAa,WAAW,GAAG;AACrD,OAAI,SAAS,KAAK,WAAW,EAAE;AAC7B,iBAAa,UAAU,WAAW,QAAQ,UAAU,GAAG;AACvD,iBAAa,UAAU,OAAO,IAAI,EAAE;;AAEtC,gBAAa,KAAK;IAChB,MAAM;IACN,UAAU,kBAAkB,aAAa,YAAY,EAAE,EAAE,OAAO,aAAa,WAAW,GAAG,EAAE,SAAS;IACtG,KAAK,OAAO,aAAa,WAAW,GAAG;IACxC,CAAC;AACF,QAAK;aAEE,OAAO,GAAG,SAAS,mBAAmB;GAE7C,MAAM,CAAC,gBAAgB,YAAY,gBAAgB,QAAQ,EAAE;AAC7D,gBAAa,KAAK,eAAe;AACjC,OAAI;aAGJ,OAAO,GAAG,SAAS,sBAChB,OAAO,GAAG,SAAS,qBACtB;AACA,OAAI,OAAO,GAAG,WAAW,KAAK;AAC5B;AACA;;GAGF,MAAM,CAAC,gBAAgB,YAAY,gBAAgB,QAAQ,EAAE;AAC7D,gBAAa,KAAK,eAAe;AACjC,OAAI;aAEG,OAAO,GAAG,SAAS,cAAc;AAExC,gBAAa,KAAK,eAAe,OAAO,GAAG,CAAC;AAC5C,QAAK;aAEE,OAAO,GAAG,SAAS,SAAS;AAEnC,gBAAa,KAAK,gBAAgB,OAAO,GAAG,CAAC;AAC7C,QAAK;aAEE,OAAO,GAAG,SAAS,cAAc;AAExC,gBAAa,KAAK,eAAe,OAAO,GAAG,CAAC;AAC5C,QAAK;aAEE,OAAO,GAAG,SAAS,cAAc;GAExC,MAAM,CAAC,WAAW,YAAY,WAAW,QAAQ,EAAE;AACnD,gBAAa,KAAK,UAAU;AAC5B,OAAI;aAEG,OAAO,GAAG,SAAS,WAAW;GAErC,MAAM,CAAC,aAAa,YAAY,oBAAoB,QAAQ,EAAE;AAC9D,gBAAa,KAAK,YAAY;AAC9B,OAAI;aAEG,OAAO,GAAG,SAAS,iBAAiB;GAE3C,MAAM,CAAC,cAAc,YAAY,cAAc,QAAQ,EAAE;AACzD,gBAAa,KAAK,aAAa;AAC/B,OAAI;aAEG,OAAO,GAAG,SAAS,gBAAgB;GAE1C,MAAM,cAAc,aAAa,QAAQ,EAAE;AAC3C,gBAAa,KAAK,YAAY;AAC9B,QAAK;aAEE,OAAO,GAAG,SAAS,MAAM;AAEhC,gBAAa,KAAK,oBAAoB,CAAC;AACvC,QAAK;aAEE,OAAO,GAAG,SAAS,kBAAkB;GAE5C,MAAM,QACF,sDAAsD,KACtD,OAAO,OAAO,GAAG,QAAQ,GAAG,CAC7B;AACH,OAAI,OAAO;IACT,MAAM,CAAC,gBAAgB,YAAY,gBAAgB,QAAQ,GAAG,MAAM;AACpE,iBAAa,KAAK,eAAe;AACjC,QAAI;SAGJ,MAAK;QAIP,MAAK;AAIT,YAAU,KAAK;GACb,MAAM;GACN,UAAU;GACV,KAAK,aAAa,KAAI,UAAS,MAAM,IAAI,CAAC,KAAK,GAAG;GACnD,CAAC;AAEF,MAAI,IAAI;OAGR,MAAK;AAsBT,QAAO,CAlBoB;EACzB,MAAM;EACN,SAAS,MAAM,SAAS;EAExB,cAAc;AACZ,OAAI,MAAM,SAAS,MAAM,MAAM,QAAQ;IACrC,MAAM,QAAQ,MAAM,MAAM,MAAK,MAAK,EAAE,OAAO,QAAQ;AACrD,QAAI,OAAO;KACT,MAAM,SAAS,OAAO,MAAM,GAAG;AAC/B,YAAO,OAAO,SAAS,OAAO,IAAI,WAAW,IAAI,SAAS;;;MAI5D;EACJ,OAAO;EACP,KAAK,UAAU,KAAI,SAAQ,KAAK,IAAI,CAAC,KAAK,KAAK;EAChD,EAEiB,IAAI,EAAE;;AAI1B,SAAS,gBACP,QACA,OACoB;CAGpB,MAAM,cAAc,OAAO;CAC3B,MAAMC,cAA8B,EAAE;CACtC,IAAI,IAAI,QAAQ;AAEhB,QACE,IAAI,OAAO,UACR,OAAO,GAAG,SAAS,uBACnB,OAAO,GAAG,SAAS,qBAEtB,KAAI,OAAO,GAAG,SAAS,kBAAkB;EACvC,MAAMD,eAA6B,EAAE;EACrC,IAAI,IAAI,IAAI;AAEZ,SAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,kBAE7C,KAAI,OAAO,GAAG,SAAS,kBAAkB;GACvC,MAAM,eAAe,OAAO,IAAI;GAChC,MAAM,WAAW,OAAO,IAAI;AAC5B,gBAAa,KAAK;IAChB,MAAM;IACN,UAAU,kBAAkB,aAAa,YAAY,EAAE,EAAE,OAAO,aAAa,WAAW,GAAG,EAAE,SAAS;IACtG,KAAK,OAAO,aAAa,WAAW,GAAG;IACxC,CAAC;AACF,QAAK;aAGL,OAAO,GAAG,SAAS,sBAChB,OAAO,GAAG,SAAS,qBACtB;AACA,OAAI,OAAO,GAAG,WAAW,KAAK;AAC5B;AACA;;GAIF,MAAM,CAAC,sBAAsB,YAAY,gBAAgB,QAAQ,EAAE;AACnE,gBAAa,KAAK,qBAAqB;AACvC,OAAI;aAEG,OAAO,GAAG,SAAS,cAAc;AACxC,gBAAa,KAAK,eAAe,OAAO,GAAG,CAAC;AAC5C,QAAK;aAEE,OAAO,GAAG,SAAS,SAAS;AACnC,gBAAa,KAAK,gBAAgB,OAAO,GAAG,CAAC;AAC7C,QAAK;aAEE,OAAO,GAAG,SAAS,cAAc;AAExC,gBAAa,KAAK,eAAe,OAAO,GAAG,CAAC;AAC5C,QAAK;QAIL,MAAK;AAIT,cAAY,KAAK;GACf,MAAM;GACN,UAAU;GACV,KAAK,aAAa,KAAI,UAAS,MAAM,IAAI,CAAC,KAAK,GAAG;GACnD,CAAC;AAEF,MAAI,IAAI;OAGR,MAAK;AAqBT,QAAO,CAjB0B;EAC/B,MAAM;EACN,SAAS,YAAY,SAAS;EAC9B,cAAc;AACZ,OAAI,YAAY,SAAS,YAAY,MAAM,QAAQ;IACjD,MAAM,QAAQ,YAAY,MAAM,MAAK,MAAK,EAAE,OAAO,QAAQ;AAC3D,QAAI,OAAO;KACT,MAAM,SAAS,OAAO,MAAM,GAAG;AAC/B,YAAO,OAAO,SAAS,OAAO,IAAI,WAAW,IAAI,SAAS;;;MAI5D;EACJ,OAAO;EACP,KAAK,YAAY,KAAI,SAAQ,KAAK,IAAI,CAAC,KAAK,KAAK;EAClD,EAEuB,IAAI,EAAE;;;;;ACvQhC,SAAgB,gBACd,QACA,OACA,OAC0B;CAC1B,MAAM,OAAO,OAAO,MAAM,MAAM,OAAO;CACvC,MAAM,QAAQ,OAAO,MAAM,MAAO,KAAK,OAAO,EAAE,CAAC,aAAa,GAAG,KAAK,MAAM,EAAE,CAAE;CAChF,MAAME,qBAAmC,EAAE;CAC3C,IAAI,IAAI,QAAQ;AAEhB,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,kBAC7C,KAAI,OAAO,GAAG,SAAS,kBAAkB;EACvC,MAAM,eAAe,OAAO,IAAI;AAChC,MAAI,aACF,oBAAmB,KAAK;GACtB,MAAM;GACN,UAAU,kBAAkB,aAAa,YAAY,EAAE,CAAC;GACxD,KAAK,OAAO,aAAa,WAAW,GAAG;GACxC,CAAC;AAEJ,OAAK;YAGL,OAAO,GAAG,SAAS,sBAChB,OAAO,GAAG,SAAS,qBACtB;EAEA,MAAM,CAAC,UAAU,YAAY,UAAU,QAAQ,EAAE;AACjD,qBAAmB,KAAK,SAAS;AACjC,MAAI;OAGJ;AAcJ,QAAO,CAVgC;EACrC,MAAM;EACN;EACA;EACA,UAAU;EACV,KAAK,MAAM,KAAK,GAAG,MAAM,IAAI,mBAC1B,KAAI,UAAS,MAAM,IAAI,CACvB,KAAK,KAAK,CAAC;EACf,EAEuB,IAAI,EAAE;;;;;AC9ChC,SAAgB,eACd,QACA,OAC0B;CAC1B,MAAM,YAAY,OAAO;CAGzB,IAAI,OAAO;CACX,IAAI,QAAQ;CAEZ,MAAM,YAAY,UAAU,KAAK,MAAM,yBAAyB;AAChE,KAAI,WAAW;AACb,SAAO,UAAU;EAEjB,MAAM,OAAO,OAAO,UAAU,QAAQ,GAAG,CAAC,MAAM;AAChD,MAAI,QAAQ,CAAC,KAAK,WAAW,MAAM,EAAE;GAEnC,MAAM,QAAQ,KAAK,wBAAQ,IAAI,OAAO,IAAI,OAAO,EAAE,GAAG,CAAC,MAAM;AAC7D,OAAI,MACF,SAAQ;;QAGT;EAEH,MAAM,OAAO,OAAO,UAAU,QAAQ,GAAG,CAAC,MAAM;EAEhD,MAAM,QAEF,4DAA4D,KAAK,KAAK;AAC1E,MAAI,OAAO;AACT,UAAO,MAAM;AACb,WAAQ,OAAO,MAAM,MAAM,GAAG;;;AAIlC,KAAI,CAAC,MACH,SAAQ,KAAK,OAAO,EAAE,CAAC,aAAa,GAAG,KAAK,MAAM,EAAE;CAEtD,MAAMC,WAAyB,EAAE;CACjC,IAAI,IAAI,QAAQ;CAGhB,MAAM,4BAAY,IAAI,OAAO,cAAc,KAAK,SAAS;AAEzD,QACE,IAAI,OAAO,UACR,OAAO,GAAG,SAAS,qBACnB,CAAC,UAAU,KAAK,OAAO,GAAG,KAAK,CAElC,KAAI,OAAO,GAAG,SAAS,kBAAkB;EACvC,MAAM,eAAe,OAAO,IAAI;AAChC,MAAI,cAAc;GAChB,MAAM,cAAe,aAAa,YAAgC,EAAE;GACpE,IAAI,IAAI;AACR,QAAK,IAAI,IAAI,YAAY,SAAS,GAAG,KAAK,GAAG,KAAK;IAChD,MAAM,IAAI,YAAY;AACtB,QAAI,EAAE,SAAS,UAAU,KAAK,KAAK,EAAE,QAAQ,EAAE;AAC7C,SAAI;AACJ;;;GAGJ,MAAM,YAAY,MAAM,KAAK,YAAY,MAAM,GAAG,EAAE,GAAG;AACvD,YAAS,KAAK;IACZ,MAAM;IACN,UAAU,kBAAkB,aAAa,EAAE,CAAC;IAC5C,KAAK,OAAO,aAAa,WAAW,GAAG,CAAC,QAAQ,SAAS,GAAG,CAAC,QAAQ,gBAAgB,GAAG;IACzF,CAAC;;AAEJ,OAAK;YAGL,OAAO,GAAG,SAAS,sBAChB,OAAO,GAAG,SAAS,qBACtB;EACA,MAAM,CAAC,UAAU,YAAY,UAAU,QAAQ,EAAE;AACjD,WAAS,KAAK,SAAS;AACvB,MAAI;OAGJ;AAcJ,QAAO,CAVgC;EACrC,MAAM;EACN;EACA;EACA;EACA,KAAK,MAAM,KAAK,GAAG,MAAM,IAAI,SAAS,KAAI,MAAK,EAAE,IAAI,CAAC,KAAK,KAAK,CAAC;EAClE,EAGoB,IACkB,EAAE;;;;;AC/F3C,SAAgB,iBAAgC;AAC9C,QAAO;EACL,MAAM;EACN,KAAK;EACN;;;;;ACHH,MAAM,YAAY,IAAI,IAAI;CACxB;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAGF,MAAM,qCAAqB,IAAI,KAAqB;AAEpD,SAAgB,eAAe,OAAqC;CAClE,MAAM,MAAM,OAAO,MAAM,WAAW,GAAG;AAGvC,KAAI,WAAW,KAAK,IAAI,IAAI,SAAS,KAAK,IAAI,IAAI,UAAU,KAAK,IAAI,CACnE,QAAO;EACL,MAAM;EACN,SAAS;EACT;EACA,KAAK;EACL,SAAS;EACV;CAKH,MAAM,OADW,IAAI,MAAM,uBAAuB,GAC1B,MAAM,IAAI,aAAa;AAG/C,KAAI,CAAC,IACH,QAAO;EACL,MAAM;EACN,SAAS;EACT;EACA,KAAK;EACL,SAAS;EACV;CAIH,MAAM,cAAc,mBAAmB,KAAK,IAAI;CAChD,MAAM,SAAS,UAAU,IAAI,IAAI;CAGjC,IAAI,UAAU,mBAAmB,IAAI,IAAI;AACzC,KAAI,CAAC,SAAS;AACZ,YAAU,IAAI,OAAO,WAAW,IAAI,MAAM,IAAI;AAC9C,qBAAmB,IAAI,KAAK,QAAQ;;CAEtC,MAAM,aAAa,QAAQ,KAAK,IAAI;CAEpC,MAAM,UAAU,EAAE,UAAU,eAAe;AAM3C,QAAO;EACL,MAAM;EACN,SANc,UACZ,GAAG,IAAI,QAAQ,WAAW,GAAG,CAAC,MAAM,IAAI,KACxC;EAKF;EACA;EACA;EACD;;;;;ACzEH,SAAgB,eACd,QACA,OACe;CACf,MAAM,wBAAwB,OAAO,QAAQ;CAC7C,MAAM,mBAAmB,OAAO,sBAAsB,WAAW,GAAG;AAEpE,QAAO;EACL,MAAM;EACN,UAAU,kBAAkB,sBAAsB,YAAY,EAAE,EAAE,iBAAiB;EACnF,KAAK;EACN;;;;;ACKH,SAAgB,yBACd,UACA,IACA,UAAwB,EAAE,EACZ;CAEd,IAAI,gBAAgB,YAAY,IAAI,UAAU,CAAC,QAAQ,kBAAkB,YAAY;AACrF,KAAI,aAAa,SAAS,MAAM,CAE9B,gBAAe,aAAa,QAAQ,SAAS,QAAQ;AAEvD,KAAI,aAAa,KAAK,aAAa,CAEjC,gBAAe,aAAa,QAAQ,cAAc,KAAK;UAEhD,aAAa,KAAK,aAAa,CAEtC,gBAAe,aAAa,QAAQ,qBAAqB,KAAK;CAIhE,MAAM,SAAS,GAAG,MAAM,cAAc,EAAE,CAAC;AAEzC,KAAI,CAAC,UAAU,CAAC,MAAM,QAAQ,OAAO,CACnC,QAAO,EAAE;CAGX,MAAM,MAAM,QAAQ;CACpB,MAAM,OAAO,QAAQ;CACrB,IAAI,oBAAoB;AACxB,KAAI,OAAO,OAAO,QAAQ,WACxB,qBAAoB,IAAI,kBAAkB,IAAI;CAIhD,IAAI,SAAS,cAAc,kBAAkB;AAI7C,KAAI,QAAQ,OAAO,SAAS,YAAY;EACtC,MAAM,aAAa,KAAK,kBAAkB;AAC1C,MAAI,MAAM,QAAQ,WAAW,EAAE;GAG7B,MAAM,QAAS,WAAyB;GACxC,MAAM,YAAa,OAAmC;AACtD,OAAI,SAAS,OAAO,cAAc,SAChC,UAAS,cAAc,WAAyC;OAIhE,UAAS;;;AAIf,QAAO;;AAIT,SAAgB,cAAc,QAAuC;AAEnE,KAAI,CAAC,UAAU,CAAC,MAAM,QAAQ,OAAO,CACnC,QAAO,EAAE;CAEX,MAAMC,SAAuB,EAAE;CAC/B,IAAI,IAAI;AAMR,QAAO,IAAI,OAAO,QAAQ;EACxB,MAAM,QAAQ,OAAO;AACrB,UAAQ,MAAM,MAAd;GACE,KAAK;GACL,KAAK;GACL,KAAK;GACL,KAAK;GACL,KAAK;GACL,KAAK;GACL,KAAK,wBAAwB;IAC3B,MAAM,CAAC,aAAa,YAAY,eAAe,QAAQ,EAAE;AACzD,WAAO,KAAK,YAAY;AACxB,QAAI;AACJ;;GAGF,KAAK;AACH,WAAO,KAAK,aAAa,QAAQ,EAAE,CAAC;AACpC,SAAK;AACL;GAEF,KAAK;AACH,WAAO,KAAK,eAAe,QAAQ,EAAE,CAAC;AACtC,SAAK;AACL;GAEF,KAAK;AACH,WAAO,KAAK,eAAe,MAAM,CAAC;AAClC,SAAK;AACL;GACF,KAAK;AACH,WAAO,KAAK,eAAe,OAAO,GAAG,CAAC;AACtC,SAAK;AACL;GAEF,KAAK;AACH,WAAO,KAAK,gBAAgB,OAAO,GAAG,CAAC;AACvC,SAAK;AACL;GAEF,KAAK;GACL,KAAK,qBAAqB;IACxB,MAAM,CAAC,UAAU,YAAY,UAAU,QAAQ,EAAE;AACjD,WAAO,KAAK,SAAS;AACrB,QAAI;AACJ;;GAGF,KAAK;AACH,WAAO,KAAK,oBAAoB,CAAC;AACjC,SAAK;AACL;GAEF,KAAK,mBAAmB;IACtB,MAAM,CAAC,gBAAgB,YAAY,gBAAgB,QAAQ,EAAE;AAC7D,WAAO,KAAK,eAAe;AAC3B,QAAI;AACJ;;GAGF,KAAK,cAAc;IACjB,MAAM,CAAC,WAAW,YAAY,WAAW,QAAQ,EAAE;AACnD,WAAO,KAAK,UAAU;AACtB,QAAI;AACJ;;GAGF,KAAK,WAAW;IACd,MAAM,CAAC,oBAAoB,YAAY,oBAAoB,QAAQ,EAAE;AACrE,WAAO,KAAK,mBAAmB;AAC/B,QAAI;AACJ;;GAGF,KAAK,iBAAiB;IACpB,MAAM,CAAC,cAAc,YAAY,cAAc,QAAQ,EAAE;AACzD,WAAO,KAAK,aAAa;AACzB,QAAI;AACJ;;GAGF,KAAK,kBAAkB;IACrB,MAAM,QACF,4DAA4D,KAC5D,OAAO,MAAM,QAAQ,GAAG,CACzB;AACH,QAAI,OAAO;KACT,MAAM,CAAC,gBAAgB,YAAY,gBAAgB,QAAQ,GAAG,MAAM;AACpE,YAAO,KAAK,eAAe;AAC3B,SAAI;UAGJ,MAAK;AAEP;;GAGF,KAAK;AACH,WAAO,KAAK,gBAAgB,CAAC;AAC7B;AACA;GAEF,KAAK;AACH,WAAO,KAAK,eAAe,OAAO,GAAG,CAAC;AACtC,SAAK;AACL;GAEF;AAEE,SAAK;AACL;;;AAIN,QAAO;;;;;AC9JT,SAAgB,YAAY,QAAgB,UAAU,KAAK,KAAK,IAAI,UAA8B,EAAE,EAAE;CAEpG,MAAM,KAAK,QAAQ,QAAQ;CAG3B,MAAMC,sBAA8C,EAClD,eAAe,QAChB;CAED,IAAIC;AACJ,KAAI,OAAO,QAAQ,SAAS,WAC1B,KAAI,QAAQ;UAEL,QAAQ,QAAQ,OAAO,QAAQ,SAAS,UAAU;EACzD,MAAM,UAAU,QAAQ;AACxB,OAAK,QAAgB,QAAQ,QAAQ,oBAAoB,QAAQ;OAGjE,MAAK,QAAgB,oBAAoB,QAAQ;AAInD,KAAI,MAAM,QAAQ,QAAQ,OAAO,CAC/B,MAAK,MAAM,KAAK,QAAQ,QAAQ;EAE9B,MAAM,aAAa;AACnB,MAAI,MAAM,QAAQ,WAAW,EAAE;GAC7B,MAAM,KAAK,WAAW;GACtB,MAAM,OAAO,WAAW;AACxB,OAAI,OAAO,OAAO,WAChB,IAAG,IAAI,IAAI,KAAK;aAEX,OAAO,eAAe,WAC7B,IAAG,IAAI,WAA+B;;AAO5C,KAAI,MAAM,QAAQ,QAAQ,MAAM,CAC9B,MAAK,MAAM,MAAM,QAAQ,MACvB,KAAI;AACF,KAAG,GAAG;UAED,GAAG;AAGR,UAAQ,MAAM,+CAA+C,EAAE;;AAMrE,IAAG,IAAI,cAAc;AACrB,IAAG,IAAI,cAAc;AACrB,IAAG,IAAI,eAAe;AACtB,IAAG,IAAIC,KAAgB;CAGvB,MAAM,2BAA6B,mBAEhC,WAAW;AACd,IAAG,IAAI,yBAAyB;AAChC,IAAG,IAAI,cAAc;AACrB,IAAG,IAAI,mBAAmB;AAG1B,IAAG,KAAK,MAAM,MAAM,SAAS,sBAAsB,UAAmB;EACpE,MAAM,IAAI;EAKV,MAAM,QADc,EAAE,IACJ,MAAM,QAAQ;AAChC,OAAK,MAAM,SAAS,EAAE,QAAQ;AAC5B,OAAI,MAAM,SAAS,WAAW,CAAC,MAAM,OAAO,CAAC,MAAM,OACjD;GACF,MAAMC,WAAmB,MAAM,IAAI;GACnC,MAAMC,UAAkB,MAAM,IAAI;GAClC,MAAMC,SAAiB,MAAM;GAC7B,MAAM,SAAS,OAAO;GACtB,MAAM,SAAS,OAAO;GAGtB,MAAM,OAAO,MADG,KAAK,IAAI,GAAG,UAAU,EAAE,KACT;GAC/B,IAAI,IAAI;AACR,UAAO,IAAI,KAAK,WAAW,KAAK,OAAO,OAAO,KAAK,OAAO,KAAO;GACjE,IAAI,QAAQ;AACZ,UAAO,IAAI,QAAQ,KAAK,UAAU,KAAK,IAAI,WAAW,OAAQ;GAC9D,IAAI,IAAI,IAAI;AACZ,UAAO,IAAI,KAAK,WAAW,KAAK,OAAO,OAAO,KAAK,OAAO,KAAO;GACjE,MAAM,SAAS,UAAU,WAAW,KAAK,SAAS,UAAU,MAAM,KAAK;GACvE,MAAM,aAAa;AACnB,cAAW,OAAO,WAAW,QAAQ,EAAE;AACtC,GAAC,WAAW,KAAiC,WAAW,CAAC;AAEzD,GAAC,WAAW,KAAiC,SAAS,CAAC,CAAC;;GAE3D;CAGF,MAAM,YAAY,OAAgB,WAAoB;EACpD,MAAM,IAAI;EACV,MAAM,QAAQ,EAAE;AAChB,MAAI,EAAE,IAAI,WAAW,IACnB,QAAO;EACT,MAAM,WAAW,EAAE,IAAI,QAAQ;EAC/B,MAAM,WAAW,EAAE,IAAI,QAAQ;AAC/B,MAAI,KAAK,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS,EAAE;AAC9C,OAAI,CAAC,QAAQ;IACX,MAAM,QAAQ,EAAE,KAAK,QAAQ,IAAI,EAAE;AACnC,UAAM,UAAU;;AAElB,KAAE,OAAO;AACT,UAAO;;AAET,SAAO;;AAGT,IAAG,OAAO,MAAM,OAAO,OAAO,QAAQ,SAAS;AAG/C,IAAG,SAAS,MAAM,SAAS,QAAiB,QAAgB;EAG1D,MAAM,aAFY,OACM;EAExB,MAAM,OAAO,OAAO,WAAW,QAAQ,GAAG,CAAC,MAAM;EACjD,MAAM,MAAM,OAAO,WAAW,WAAW,GAAG;EAC5C,MAAM,cAAc,KAAK,SAAS,mBAAmB,IAAI,CAAC,CAAC;EAC3D,MAAM,WAAW,OAAO,QAAQ,OAAO;AAGvC,SAAO,sCAAsC,YAAY,eAAe,SAAS,QAFhE,UAAU,MAAM,GAAG,IAAI,GAAG,WAEuD;;kCAEpE,SAAS,aAAa,CAAC;iDACR,YAAY,IAAI,EACvD,cACD,CAAC;;;;;CAOR,MAAM,eAAe;CACrB,MAAM,mBAAmB,OAAgB,WAAoB;EAC3D,MAAM,IAAI;AACV,MAAI,EAAE,IAAI,EAAE,SAAS,IACnB,QAAO;EACT,MAAM,QAAQ,aAAa,KAAK,EAAE,IAAI,MAAM,EAAE,IAAI,CAAC;AACnD,MAAI,CAAC,MACH,QAAO;AACT,MAAI,CAAC,QAAQ;GACX,MAAM,KAAK,MAAM;GACjB,MAAM,QAAQ,EAAE,KAAK,aAAa,QAAQ,EAAE;AAC5C,SAAM,UAAU;AAChB,SAAM,SAAS,MAAM;;AAEvB,IAAE,OAAO,MAAM,GAAG;AAClB,SAAO;;AAGT,IAAG,OAAO,MAAM,OAAO,UAAU,aAAa,gBAAgB;AAC9D,IAAG,SAAS,MAAM,aAAa,QAAiB,QAAgB;EAC9D,MAAM,YAAY;EAClB,MAAM,KAAK,OAAO,UAAU,KAAK,WAAW,GAAG;AAC/C,SAAO,mDAAmD,GAAG,+DAA+D,GAAG;;AAGjI,QAAO"}
|
|
1
|
+
{"version":3,"file":"index.js","names":["defaultMathOptions: MathOptions | undefined","contentLines: string[]","VOID_TAGS","tagStack: [string, number][]","i","i","CONTROL_MAP: Record<string, string>","delimiters: [string, string][]","t","findMatchingClose","content","token: any","children: ParsedNode[]","innerTokens: MarkdownToken[]","orig: string[]","updated: string[]","children: ParsedNode[]","innerTokens: MarkdownToken[]","childWithAttrs: any","children: ParsedNode[]","innerTokens: MarkdownToken[]","linkTokens: MarkdownToken[]","children: ParsedNode[]","innerTokens: MarkdownToken[]","children: ParsedNode[]","innerTokens: MarkdownToken[]","children: ParsedNode[]","innerTokens: MarkdownToken[]","children: ParsedNode[]","innerTokens: MarkdownToken[]","result: ParsedNode[]","currentTextNode: TextNode | null","raw","hrefAttr","node","loading","blockquoteChildren: ParsedNode[]","items: DefinitionItemNode[]","termNodes: ParsedNode[]","definitionNodes: ParsedNode[]","footnoteChildren: ParsedNode[]","headerRow: TableRowNode | null","rows: TableRowNode[]","cells: TableCellNode[]","rowNode: TableRowNode","listItems: ListItemNode[]","itemChildren: ParsedNode[]","nestedItems: ListItemNode[]","admonitionChildren: ParsedNode[]","children: ParsedNode[]","result: ParsedNode[]","defaultTranslations: Record<string, string>","t: (key: string) => string","markdownItEmoji","openLine: number","endLine: number","markup: string"],"sources":["../src/config.ts","../src/plugins/containers.ts","../src/plugins/fixHtmlInline.ts","../src/plugins/fixLinkInline.ts","../src/plugins/fixLinkTokens.ts","../src/plugins/fixListItem.ts","../src/plugins/fixStrongTokens.ts","../src/plugins/fixTableTokens.ts","../src/findMatchingClose.ts","../src/plugins/isMathLike.ts","../src/plugins/math.ts","../src/renderers/index.ts","../src/factory.ts","../src/parser/inline-parsers/checkbox-parser.ts","../src/parser/inline-parsers/emoji-parser.ts","../src/parser/inline-parsers/emphasis-parser.ts","../src/parser/inline-parsers/fence-parser.ts","../src/parser/inline-parsers/footnote-ref-parser.ts","../src/parser/inline-parsers/hardbreak-parser.ts","../src/parser/inline-parsers/highlight-parser.ts","../src/parser/inline-parsers/html-inline-code-parser.ts","../src/parser/inline-parsers/image-parser.ts","../src/parser/inline-parsers/inline-code-parser.ts","../src/parser/inline-parsers/insert-parser.ts","../src/parser/inline-parsers/link-parser.ts","../src/parser/inline-parsers/math-inline-parser.ts","../src/parser/inline-parsers/reference-parser.ts","../src/parser/inline-parsers/strikethrough-parser.ts","../src/parser/inline-parsers/strong-parser.ts","../src/parser/inline-parsers/subscript-parser.ts","../src/parser/inline-parsers/superscript-parser.ts","../src/parser/inline-parsers/text-parser.ts","../src/parser/inline-parsers/index.ts","../src/parser/node-parsers/blockquote-parser.ts","../src/parser/node-parsers/code-block-parser.ts","../src/parser/node-parsers/definition-list-parser.ts","../src/parser/node-parsers/footnote-parser.ts","../src/parser/node-parsers/heading-parser.ts","../src/parser/node-parsers/math-block-parser.ts","../src/parser/node-parsers/table-parser.ts","../src/parser/node-parsers/thematic-break-parser.ts","../src/parser/node-parsers/list-parser.ts","../src/parser/node-parsers/admonition-parser.ts","../src/parser/node-parsers/container-parser.ts","../src/parser/node-parsers/hardbreak-parser.ts","../src/parser/node-parsers/html-block-parser.ts","../src/parser/node-parsers/paragraph-parser.ts","../src/parser/index.ts","../src/index.ts"],"sourcesContent":["/**\n * MathOptions control how the math plugin normalizes content before\n * handing it to KaTeX (or other math renderers).\n *\n * - commands: list of command words that should be auto-prefixed with a\n * backslash if not already escaped (e.g. 'infty' -> '\\\\infty'). Use a\n * conservative list to avoid false positives in prose.\n * - escapeExclamation: whether to escape standalone '!' to '\\\\!' (default true).\n */\nexport interface MathOptions {\n /** List of command words to auto-escape. */\n commands?: readonly string[]\n /** Whether to escape standalone '!' (default: true). */\n escapeExclamation?: boolean\n}\n\nlet defaultMathOptions: MathOptions | undefined\n\nexport function setDefaultMathOptions(opts: MathOptions | undefined) {\n defaultMathOptions = opts\n}\n\nexport function getDefaultMathOptions(): MathOptions | undefined {\n return defaultMathOptions\n}\n","import type { MarkdownIt } from 'markdown-it-ts'\nimport markdownItContainer from 'markdown-it-container'\n\nexport function applyContainers(md: MarkdownIt) {\n ;[\n 'admonition',\n 'info',\n 'warning',\n 'error',\n 'tip',\n 'danger',\n 'note',\n 'caution',\n ].forEach((name) => {\n md.use(markdownItContainer, name, {\n render(tokens: unknown, idx: number) {\n const tokensAny = tokens as unknown as import('../types').MarkdownToken[]\n const token = tokensAny[idx]\n // `nesting` is a runtime-only property present on MarkdownIt tokens.\n // Narrow the shape with `unknown` -> specific minimal interface to avoid `as any`.\n const tokenShape = token as unknown as { nesting?: number }\n if (tokenShape.nesting === 1) {\n return `<div class=\"vmr-container vmr-container-${name}\">`\n }\n else {\n return '</div>\\n'\n }\n },\n })\n })\n\n // fallback for simple ::: blocks (kept for backwards compat)\n md.block.ruler.before(\n 'fence',\n 'vmr_container_fallback',\n (state: unknown, startLine: number, endLine: number, silent: boolean) => {\n interface ParserState {\n bMarks: number[]\n tShift: number[]\n eMarks: number[]\n src: string\n push: (type: string, tag?: string, nesting?: number) => any\n md: any\n line: number\n }\n const s = state as unknown as ParserState\n const startPos = s.bMarks[startLine] + s.tShift[startLine]\n const lineMax = s.eMarks[startLine]\n const markerMatch = s.src\n .slice(startPos, lineMax)\n .match(/^:::\\s*(\\w+)/)\n if (!markerMatch)\n return false\n if (silent)\n return true\n\n const name = markerMatch[1]\n let nextLine = startLine + 1\n let found = false\n while (nextLine <= endLine) {\n const sPos = s.bMarks[nextLine] + s.tShift[nextLine]\n const ePos = s.eMarks[nextLine]\n if (s.src.slice(sPos, ePos).trim() === ':::') {\n found = true\n break\n }\n nextLine++\n }\n if (!found)\n return false\n\n const tokenOpen = s.push('vmr_container_open', 'div', 1)\n // `tokenOpen` is runtime token object; keep using runtime helpers but avoid casting `s` to `any`.\n tokenOpen.attrSet('class', `vmr-container vmr-container-${name}`)\n\n const contentLines: string[] = []\n for (let i = startLine + 1; i < nextLine; i++) {\n const sPos = s.bMarks[i] + s.tShift[i]\n const ePos = s.eMarks[i]\n contentLines.push(s.src.slice(sPos, ePos))\n }\n\n // Open a paragraph, push inline content and then close paragraph\n s.push('paragraph_open', 'p', 1)\n const inlineToken = s.push('inline', '', 0)\n inlineToken.content = contentLines.join('\\n')\n inlineToken.map = [startLine + 1, nextLine]\n // Ensure children exist and parse the inline content into them so the renderer\n // won't encounter a null children array (which causes .length read errors).\n inlineToken.children = []\n s.md.inline.parse(inlineToken.content, s.md, (s as any).env, inlineToken.children)\n s.push('paragraph_close', 'p', -1)\n\n s.push('vmr_container_close', 'div', -1)\n\n s.line = nextLine + 1\n return true\n },\n )\n}\n","import type { MarkdownIt, Token } from 'markdown-it-ts'\n\nconst VOID_TAGS = new Set([\n 'area',\n 'base',\n 'br',\n 'col',\n 'embed',\n 'hr',\n 'img',\n 'input',\n 'link',\n 'meta',\n 'param',\n 'source',\n 'track',\n 'wbr',\n])\n\nexport function applyFixHtmlInlineTokens(md: MarkdownIt) {\n // Fix certain single-token inline HTML cases by expanding into [openTag, text, closeTag]\n // This helps downstream inline parsers (e.g., <a>text</a>) to recognize inner text reliably.\n md.core.ruler.push('fix_html_inline_tokens', (state: unknown) => {\n const s = state as unknown as { tokens?: Token[] }\n const toks = s.tokens ?? []\n\n // 有一些很特殊的场景,比如 html_block 开始 <thinking>,但是后面跟着很多段落,如果没匹配到</thinking>,中间的都应该合并为html_block的 content\n const tagStack: [string, number][] = []\n for (let i = 0; i < toks.length; i++) {\n const t = toks[i] as Token & { content?: string, children: any[] }\n if (t.type === 'html_block') {\n const tag = t.content?.match(/<([^\\s>/]+)/)?.[1] ?? ''\n const isClosingTag = /<\\s*\\/\\s*[^\\s>]+\\s*>/.test(t.content || '')\n if (!isClosingTag) {\n // 开始标签,入栈\n tagStack.push([tag, i])\n }\n else {\n // 结束标签,出栈\n if (tagStack.length > 0 && tagStack[tagStack.length - 1][0] === tag) {\n tagStack.pop()\n }\n }\n continue\n }\n else if (tagStack.length > 0) {\n // 如果在标签栈中,说明是未闭合标签的内容,合并到上一个 html_block\n if (t.type === 'paragraph_open' || t.type === 'paragraph_close') {\n // 应该删除这些标签\n toks.splice(i, 1)\n i-- // 调整索引\n continue\n }\n const content = t.content || ''\n const CLOSING_TAG_REGEX = new RegExp(`<\\\\s*\\\\/\\\\s*${tagStack[tagStack.length - 1][0]}\\\\s*>`)\n const isClosingTag = CLOSING_TAG_REGEX.test(content)\n\n if (content) {\n // 插入到栈顶标签对应的 html_block 中\n const [, openIndex] = tagStack[tagStack.length - 1]\n const openToken = toks[openIndex] as Token & { content?: string, loading: boolean }\n openToken.content = `${openToken.content || ''}\\n${content}`\n if (openToken.loading !== false)\n openToken.loading = !isClosingTag\n }\n if (isClosingTag) {\n tagStack.pop()\n }\n // 删除当前 token\n toks.splice(i, 1)\n i-- // 调整索引\n }\n else {\n continue\n }\n }\n\n for (let i = 0; i < toks.length; i++) {\n const t = toks[i] as Token & { content?: string, children: any[], loading?: boolean }\n if (t.type === 'html_block') {\n const tag = t.content?.match(/<([^\\s>/]+)/)?.[1] ?? ''\n // 如果是常见的 block 标签,则跳过,否则转换成 inline 处理\n if (['br', 'hr', 'img', 'input', 'link', 'meta', 'div', 'p', 'ul', 'li'].includes(tag))\n continue\n t.type = 'inline'\n const loading = t.content?.includes(`</${tag}>`) ? false : t.loading !== undefined ? t.loading : true\n t.children = [\n {\n type: 'html_block',\n content: t.content,\n tag: t.content?.match(/<([^\\s>/]+)/)?.[1] ?? '',\n loading,\n },\n ] as any[]\n continue\n }\n if (!t || t.type !== 'inline')\n continue\n\n // 修复children 是单个 html_inline的场景\n if (t.children.length === 2 && t.children[0].type === 'html_inline') {\n // 补充一个闭合标签\n const tag = t.children[0].content?.match(/<([^\\s>/]+)/)?.[1] ?? ''\n // 如果是常见的 inline标签,则只追加结尾标签,否则转换成 html_block\n if (['a', 'span', 'strong', 'em', 'b', 'i', 'u'].includes(tag)) {\n t.children[0].loading = true\n t.children[0].tag = tag\n t.children.push({\n type: 'html_inline',\n tag,\n loading: true,\n content: `</${tag}>`,\n } as any)\n }\n else {\n t.children = [\n {\n type: 'html_block',\n loading: true,\n tag,\n content: t.children[0].content + t.children[1].content,\n } as any,\n ]\n }\n continue\n }\n else if (t.children.length === 3 && t.children[0].type === 'html_inline' && t.children[2].type === 'html_inline') {\n const tag = t.children[0].content?.match(/<([^\\s>/]+)/)?.[1] ?? ''\n // 如果是常见的 inline标签,则不处理,否则转换成 html_block\n if (['a', 'span', 'strong', 'em', 'b', 'i', 'u'].includes(tag))\n continue\n t.children = [\n {\n type: 'html_block',\n loading: false,\n tag,\n content: t.children.map(ct => ct.content).join(''),\n } as any,\n ]\n continue\n }\n // Only handle pathological cases where inline content is a single HTML-ish chunk\n if (!t.content?.startsWith('<') || (t as any).children?.length !== 1)\n continue\n\n const raw = String(t.content)\n const tagName = raw.match(/<([^\\s>/]+)/)?.[1]?.toLowerCase() ?? ''\n if (!tagName)\n continue\n\n const selfClosing = /\\/\\s*>\\s*$/.test(raw)\n const isVoid = selfClosing || VOID_TAGS.has(tagName)\n\n const htmlToken = t as unknown as { children: Array<{ type: string, content: string }> }\n\n if (isVoid) {\n // For void/self-closing tags, keep a single html_inline token\n htmlToken.children = [\n { type: 'html_inline', content: raw },\n ] as any\n continue\n }\n htmlToken.children.length = 0\n }\n })\n}\n","import type { MarkdownIt } from 'markdown-it-ts'\n\n// Match link prefix: \"[text](href\" without requiring a closing ')'.\n// The href part may be empty, so use '*' (no extra '?').\nconst LINK_PREFIX_RE = /^\\[([^\\]]*)\\]\\(([^)\\s]*)/\n\nexport function applyFixLinkInline(md: MarkdownIt) {\n // Inline tokenizer that tries to recognize [text](href) and loading\n // link forms like \"[x](http://a\" earlier, producing link_open/text/link_close\n // tokens so downstream code sees them as links during the inline pass.\n const rule = (state: unknown, silent: boolean) => {\n const s = state as unknown as { src: string, pos: number, push: (type: string, tag?: string, nesting?: number) => any }\n const start = s.pos\n if (s.src[start] !== '[')\n return false\n\n // Don't handle image syntax here\n if (start > 0 && s.src[start - 1] === '!')\n return false\n\n // Look for closing ']' and opening '(' after it\n const rest = s.src.slice(start)\n\n const m = LINK_PREFIX_RE.exec(rest)\n if (!m)\n return false\n\n if (silent)\n return true\n\n const text = m[1] ?? ''\n const href = m[2] ?? ''\n // Be conservative: if the link text contains characters that indicate\n // emphasis or emoji shortcodes (e.g. '*' or ':'), don't pre-tokenize\n // here — let the core inline parser handle these ambiguous mid-states.\n if (text.includes('*') || text.includes(':'))\n return false\n const idxClose = rest.indexOf(')')\n const hasClosingParen = idxClose !== -1\n\n // push link_open\n const open = s.push('link_open', 'a', 1)\n open.attrs = [['href', href]]\n // push inner text\n const txt = s.push('text', '', 0)\n txt.content = text\n\n // only emit link_close if the source actually contained a closing paren\n if (hasClosingParen) {\n s.push('link_close', 'a', -1)\n // consume through the closing paren\n s.pos += idxClose + 1\n }\n else {\n // consume the matched prefix (e.g. \"[x](http://a\") but do not\n // emit a link_close so downstream logic treats this as a loading link\n s.pos += m[0].length\n }\n return true\n }\n\n // Insert before default 'link' rule to take precedence\n md.inline.ruler.before('link', 'fix_link_inline', rule)\n}\n","import type { MarkdownIt } from 'markdown-it-ts'\nimport type { MarkdownToken } from '../types'\n\n// todo: The code below has been refactored because it involves a lot of repetitive data transformations and needs to accommodate different scenarios, such as plain text. It should now be correctly converted to a link.\nexport function applyFixLinkTokens(md: MarkdownIt) {\n // Run after the inline rule so markdown-it has produced inline tokens\n // for block-level tokens; we then adjust each inline token's children\n // so downstream code receives corrected token arrays during the same\n // parsing pass.\n md.core.ruler.after('inline', 'fix_link_tokens', (state: unknown) => {\n const s = state as unknown as { tokens?: Array<{ type?: string, children?: any[] }> }\n const toks = s.tokens ?? []\n for (let i = 0; i < toks.length; i++) {\n const t = toks[i]\n if (t && t.type === 'inline' && Array.isArray(t.children)) {\n try {\n t.children = fixLinkToken(t.children)\n }\n catch (e) {\n // Swallow errors to avoid breaking parsing; keep original children\n // so parse still succeeds even if our fix fails for an unexpected shape.\n // Errors should be rare and indicate malformed token arrays.\n\n console.error('[applyFixLinkTokens] failed to fix inline children', e)\n }\n }\n }\n })\n}\n\nfunction fixLinkToken(tokens: MarkdownToken[]): MarkdownToken[] {\n if (tokens.length < 4)\n return tokens\n\n for (let i = 0; i <= tokens.length - 1; i++) {\n if (!tokens[i])\n break\n if (tokens[i]?.type === 'text' && tokens[i].content?.endsWith('(') && tokens[i + 1]?.type === 'link_open') {\n const match = tokens[i].content!.match(/\\[([^\\]]+)\\]/)\n if (match) {\n let beforeText = tokens[i].content!.slice(0, match.index)\n const emphasisMatch = beforeText.match(/(\\*+)$/)\n const replacerTokens = []\n if (emphasisMatch) {\n beforeText = beforeText.slice(0, emphasisMatch.index)\n if (beforeText) {\n replacerTokens.push({\n type: 'text',\n content: beforeText,\n raw: beforeText,\n })\n }\n const text = match[1]\n const type = emphasisMatch[1].length\n if (type === 1) {\n replacerTokens.push({ type: 'em_open', tag: 'em', nesting: 1 })\n }\n else if (type === 2) {\n replacerTokens.push({ type: 'strong_open', tag: 'strong', nesting: 1 })\n }\n else if (type === 3) {\n replacerTokens.push({ type: 'strong_open', tag: 'strong', nesting: 1 })\n replacerTokens.push({ type: 'em_open', tag: 'em', nesting: 1 })\n }\n let href = tokens[i + 2]?.content || ''\n if (tokens[i + 4]?.type === 'text' && !tokens[i + 4].content?.startsWith(')')) {\n href += tokens[i + 4]?.content || ''\n tokens[i + 4].content = ''\n }\n replacerTokens.push(\n {\n type: 'link',\n loading: !tokens[i + 4]?.content?.startsWith(')'),\n href,\n title: '',\n text,\n children: [\n {\n type: 'text',\n content: text,\n raw: text,\n },\n ],\n raw: String(`[${text}](${href})`),\n },\n )\n if (type === 1) {\n replacerTokens.push({ type: 'em_close', tag: 'em', nesting: -1 })\n }\n else if (type === 2) {\n replacerTokens.push({ type: 'strong_close', tag: 'strong', nesting: -1 })\n }\n else if (type === 3) {\n replacerTokens.push({ type: 'em_close', tag: 'em', nesting: -1 })\n replacerTokens.push({ type: 'strong_close', tag: 'strong', nesting: -1 })\n }\n if (tokens[i + 4]?.type === 'text') {\n const afterText = tokens[i + 4].content?.replace(/^\\)\\**/, '')\n if (afterText) {\n replacerTokens.push({\n type: 'text',\n content: afterText,\n raw: afterText,\n })\n }\n tokens.splice(i, 5, ...replacerTokens)\n }\n else {\n tokens.splice(i, 4, ...replacerTokens)\n }\n }\n else {\n if (beforeText) {\n replacerTokens.push({\n type: 'text',\n content: beforeText,\n raw: beforeText,\n })\n }\n const text = match[1]\n let href = tokens[i + 2]?.content || ''\n if (tokens[i + 4]?.type === 'text' && !tokens[i + 4].content?.startsWith(')')) {\n href += tokens[i + 4]?.content || ''\n tokens[i + 4].content = ''\n }\n replacerTokens.push(...[\n {\n type: 'link',\n loading: !tokens[i + 4]?.content?.startsWith(')'),\n href,\n title: '',\n text,\n children: [\n {\n type: 'text',\n content: text,\n raw: text,\n },\n ],\n raw: String(`[${text}](${href})`),\n },\n ])\n if (tokens[i + 4]?.type === 'text') {\n const afterText = tokens[i + 4].content?.replace(/^\\)/, '')\n if (afterText) {\n replacerTokens.push({\n type: 'text',\n content: afterText,\n raw: afterText,\n })\n }\n tokens.splice(i, 5, ...replacerTokens)\n }\n else {\n tokens.splice(i, 4, ...replacerTokens)\n }\n }\n i -= (replacerTokens.length - 1)\n continue\n }\n }\n else if (tokens[i].type === 'link_open' && tokens[i].markup === 'linkify' && tokens[i - 1]?.type === 'text' && tokens[i - 1].content?.endsWith('(')) {\n if (tokens[i - 2]?.type === 'link_close') {\n // 合并link\n const replacerTokens = []\n const text = (tokens[i - 3].content || '')\n let href = tokens[i].attrs?.find(attr => attr[0] === 'href')?.[1] || ''\n\n if (tokens[i + 3]?.type === 'text') {\n const m = (tokens[i + 3]?.content ?? '').indexOf(')')\n const loading = m === -1\n if (m === -1) {\n href += (tokens[i + 3]?.content?.slice(0, m) || '')\n tokens[i + 3].content = ''\n }\n\n replacerTokens.push({\n type: 'link',\n loading,\n href,\n title: '',\n text,\n children: [\n {\n type: 'text',\n content: text,\n raw: text,\n },\n ],\n raw: String(`[${text}](${href})`),\n })\n const afterText = tokens[i + 3].content?.replace(/^\\)\\**/, '')\n if (afterText) {\n replacerTokens.push({\n type: 'text',\n content: afterText,\n raw: afterText,\n })\n }\n tokens.splice(i - 4, 8, ...replacerTokens)\n }\n else {\n replacerTokens.push({\n type: 'link',\n loading: true,\n href,\n title: '',\n text,\n children: [\n {\n type: 'text',\n content: href,\n raw: href,\n },\n ],\n raw: String(`[${text}](${href})`),\n })\n tokens.splice(i - 4, 7, ...replacerTokens)\n }\n continue\n }\n }\n if (tokens[i].type === 'link_close' && tokens[i].nesting === -1 && tokens[i + 1]?.type === 'text' && tokens[i - 1]?.type === 'text' && tokens[i + 2]?.type !== 'link_open') {\n // 修复链接后多余文本被包含在链接内的问题\n tokens[i - 2].loading = true\n const text = tokens[i - 1].content || ''\n let href = tokens[i - 2].attrs?.[0]?.[1] || ''\n let count = 3\n if (tokens[i].markup === 'linkify' && tokens[i + 1]?.type === 'text') {\n const m = (tokens[i + 1]?.content ?? '').indexOf(')')\n if (m === -1) {\n href += (tokens[i + 1]?.content?.slice(0, m) || '')\n tokens[i + 1].content = ''\n }\n count += 1\n }\n tokens.splice(i - 2, count, {\n type: 'link',\n loading: false,\n href,\n title: '',\n text,\n children: [\n {\n type: 'text',\n content: text,\n raw: text,\n },\n ],\n raw: String(`[${text}](${href})`),\n } as any)\n }\n else if (tokens[i].content?.startsWith('](') && tokens[i - 1].markup?.includes('*') && tokens[i - 4].type === 'text' && tokens[i - 4].content?.endsWith('[')) {\n const type = tokens[i - 1].markup!.length\n const replacerTokens = []\n const beforeText = tokens[i - 4].content!.slice(0, tokens[i - 4].content!.length - 1 - type)\n if (beforeText) {\n replacerTokens.push({\n type: 'text',\n content: beforeText,\n raw: beforeText,\n })\n }\n if (type === 1) {\n replacerTokens.push({ type: 'em_open', tag: 'em', nesting: 1 })\n }\n else if (type === 2) {\n replacerTokens.push({ type: 'strong_open', tag: 'strong', nesting: 1 })\n }\n else if (type === 3) {\n replacerTokens.push({ type: 'strong_open', tag: 'strong', nesting: 1 })\n replacerTokens.push({ type: 'em_open', tag: 'em', nesting: 1 })\n }\n const text = tokens[i - 2].content || ''\n let href = tokens[i].content!.slice(2)\n let loading = true\n if (tokens[i + 1]?.type === 'text') {\n const m = (tokens[i + 1]?.content ?? '').indexOf(')')\n loading = m === -1\n if (m === -1) {\n href += (tokens[i + 1]?.content?.slice(0, m) || '')\n tokens[i + 1].content = ''\n }\n }\n replacerTokens.push({\n type: 'link',\n loading,\n href,\n title: '',\n text,\n children: [\n {\n type: 'text',\n content: text,\n raw: text,\n },\n ],\n raw: String(`[${text}](${href})`),\n })\n if (type === 1) {\n replacerTokens.push({ type: 'em_close', tag: 'em', nesting: -1 })\n }\n else if (type === 2) {\n replacerTokens.push({ type: 'strong_close', tag: 'strong', nesting: -1 })\n }\n else if (type === 3) {\n replacerTokens.push({ type: 'em_close', tag: 'em', nesting: -1 })\n replacerTokens.push({ type: 'strong_close', tag: 'strong', nesting: -1 })\n }\n if (tokens[i + 1]?.type === 'text') {\n const afterText = tokens[i + 1].content?.replace(/^\\)\\**/, '')\n if (afterText) {\n replacerTokens.push({\n type: 'text',\n content: afterText,\n raw: afterText,\n })\n }\n tokens.splice(i - 4, 8, ...replacerTokens)\n }\n else if (tokens[i + 1]?.type === 'link_open') {\n // 特殊情况其实要把href也处理,这里可以直接跳过\n tokens.splice(i - 4, 10, ...replacerTokens)\n }\n else {\n tokens.splice(i - 4, 7, ...replacerTokens)\n }\n i -= (replacerTokens.length - 1)\n continue\n }\n }\n return tokens\n}\n","import type { MarkdownIt } from 'markdown-it-ts'\nimport type { MarkdownToken } from '../types'\n\nexport function applyFixListItem(md: MarkdownIt) {\n // Normalize list-item related inline tokens after inline tokenization\n // so downstream parsers see corrected children.\n md.core.ruler.after('inline', 'fix_list_item_tokens', (state: unknown) => {\n const s = state as unknown as { tokens?: Array<{ type?: string, children?: any[] }> }\n const toks = s.tokens ?? []\n for (let i = 0; i < toks.length; i++) {\n const t = toks[i]\n if (t && t.type === 'inline' && Array.isArray(t.children)) {\n try {\n t.children = fixListItem(t.children)\n }\n catch (e) {\n // Keep original children on error to avoid breaking parsing\n\n console.error('[applyFixListItem] failed to fix inline children', e)\n }\n }\n }\n })\n}\n\nfunction fixListItem(tokens: MarkdownToken[]): MarkdownToken[] {\n const last = tokens[tokens.length - 1]\n const lastContent = String(last?.content ?? '')\n\n if (last?.type === 'text' && (/^\\s*\\d+\\.\\s*$/.test(lastContent) && tokens[tokens.length - 2]?.tag === 'br')) {\n tokens.splice(tokens.length - 1, 1)\n }\n\n return tokens\n}\n","import type { MarkdownIt } from 'markdown-it-ts'\nimport type { MarkdownToken } from 'stream-markdown-parser'\n\nexport function applyFixStrongTokens(md: MarkdownIt) {\n // Run after inline tokenization to normalize strong/em tokens in\n // each inline token's children. This ensures downstream inline\n // parsers receive a normalized token list.\n md.core.ruler.after('inline', 'fix_strong_tokens', (state: unknown) => {\n const s = state as unknown as { tokens?: Array<{ type?: string, children?: any[] }> }\n const toks = s.tokens ?? []\n for (let i = 0; i < toks.length; i++) {\n const t = toks[i]\n if (t && t.type === 'inline' && Array.isArray(t.children)) {\n try {\n t.children = fixStrongTokens(t.children)\n }\n catch (e) {\n // don't break parsing on plugin error\n\n console.error('[applyFixStrongTokens] failed to fix inline children', e)\n }\n }\n }\n })\n}\n\nfunction fixStrongTokens(tokens: MarkdownToken[]): MarkdownToken[] {\n const fixedTokens = [...tokens]\n if (tokens.length < 4)\n return fixedTokens\n const i = tokens.length - 4\n const token = tokens[i]\n if (!token)\n return fixedTokens\n const nextToken = tokens[i + 1]\n const tokenContent = String(token.content ?? '')\n if (token.type === 'link_open' && tokens[i - 1]?.type === 'em_open' && tokens[i - 2]?.type === 'text' && tokens[i - 2].content?.endsWith('*')) {\n const textContent = String(tokens[i - 2].content ?? '').slice(0, -1)\n\n const replaceTokens = [\n {\n type: 'strong_open',\n tag: 'strong',\n attrs: null,\n map: null,\n children: null,\n content: '',\n markup: '**',\n info: '',\n meta: null,\n },\n tokens[i],\n tokens[i + 1],\n tokens[i + 2],\n {\n type: 'strong_close',\n tag: 'strong',\n attrs: null,\n map: null,\n children: null,\n content: '',\n markup: '**',\n info: '',\n meta: null,\n },\n ]\n if (textContent) {\n replaceTokens.unshift({\n type: 'text',\n content: textContent,\n raw: textContent,\n })\n }\n fixedTokens.splice(i - 2, 6, ...replaceTokens)\n }\n else if (token.type === 'text' && tokenContent.endsWith('*') && nextToken.type === 'em_open') {\n // 解析有问题,要合并 emphasis 和 前面的 * 为 strong\n const _nextToken = tokens[i + 2]\n const count = _nextToken?.type === 'text' ? 4 : 3\n const insert = [\n {\n type: 'strong_open',\n tag: 'strong',\n attrs: null,\n map: null,\n children: null,\n content: '',\n markup: '**',\n info: '',\n meta: null,\n },\n {\n type: 'text',\n content: _nextToken?.type === 'text' ? String(_nextToken.content ?? '') : '',\n },\n {\n type: 'strong_close',\n tag: 'strong',\n attrs: null,\n map: null,\n children: null,\n content: '',\n markup: '**',\n info: '',\n meta: null,\n },\n ] as MarkdownToken[]\n const beforeText = tokenContent.slice(0, -1)\n if (beforeText) {\n insert.unshift({\n type: 'text',\n content: beforeText,\n raw: beforeText,\n })\n }\n fixedTokens.splice(i, count, ...insert)\n return fixedTokens\n }\n\n return fixedTokens\n}\n","import type { MarkdownIt } from 'markdown-it-ts'\nimport type { MarkdownToken } from '../types'\n\nexport function applyFixTableTokens(md: MarkdownIt) {\n // Run after block parsing so block-level tokens (including inline\n // children) are present. We replace the token array with the\n // fixed version returned by `fixTableTokens`.\n md.core.ruler.after('block', 'fix_table_tokens', (state: unknown) => {\n const s = state as unknown as { tokens?: any[] }\n try {\n const toks = s.tokens ?? []\n const fixed = fixTableTokens(toks)\n if (Array.isArray(fixed))\n s.tokens = fixed\n }\n catch (e) {\n // swallow errors to avoid breaking parsing; keep original tokens\n console.error('[applyFixTableTokens] failed to fix table tokens', e)\n }\n })\n}\n\nfunction createStart() {\n return [\n {\n type: 'table_open',\n tag: 'table',\n attrs: null,\n map: null,\n children: null,\n content: '',\n markup: '',\n info: '',\n level: 0,\n loading: true,\n meta: null,\n },\n {\n type: 'thead_open',\n tag: 'thead',\n attrs: null,\n block: true,\n level: 1,\n children: null,\n },\n {\n type: 'tr_open',\n tag: 'tr',\n attrs: null,\n block: true,\n level: 2,\n children: null,\n },\n\n ]\n}\nfunction createEnd() {\n return [\n {\n type: 'tr_close',\n tag: 'tr',\n attrs: null,\n block: true,\n level: 2,\n children: null,\n },\n {\n type: 'thead_close',\n tag: 'thead',\n attrs: null,\n block: true,\n level: 1,\n children: null,\n },\n {\n type: 'table_close',\n tag: 'table',\n attrs: null,\n map: null,\n children: null,\n content: '',\n markup: '',\n info: '',\n level: 0,\n meta: null,\n },\n ]\n}\nfunction createTh(text: string) {\n return [{\n type: 'th_open',\n tag: 'th',\n attrs: null,\n block: true,\n level: 3,\n children: null,\n }, {\n type: 'inline',\n tag: '',\n children: [\n {\n tag: '',\n type: 'text',\n block: false,\n content: text,\n children: null,\n },\n ],\n content: text,\n level: 4,\n attrs: null,\n block: true,\n }, {\n type: 'th_close',\n tag: 'th',\n attrs: null,\n block: true,\n level: 3,\n children: null,\n }]\n}\nexport function fixTableTokens(tokens: MarkdownToken[]): MarkdownToken[] {\n const fixedTokens = [...tokens]\n if (tokens.length < 3)\n return fixedTokens\n const i = tokens.length - 2\n const token = tokens[i]\n if (token.type === 'inline') {\n const tcontent = String(token.content ?? '')\n const childContent = String(token.children?.[0]?.content ?? '')\n\n if (/^\\|(?:[^|\\n]+\\|?)+/.test(tcontent)) {\n // 解析 table\n const body = childContent.slice(1).split('|').map(i => i.trim()).filter(Boolean).flatMap(i => createTh(i))\n const insert = ([\n ...createStart(),\n ...body,\n ...createEnd(),\n ] as unknown) as MarkdownToken[]\n fixedTokens.splice(i - 1, 3, ...insert)\n }\n else if (/^\\|(?:[^|\\n]+\\|)+\\n\\|:?-/.test(tcontent)) {\n // 解析 table\n const body = childContent.slice(1, -1).split('|').map(i => i.trim()).flatMap(i => createTh(i))\n const insert = ([\n ...createStart(),\n ...body,\n ...createEnd(),\n ] as unknown) as MarkdownToken[]\n fixedTokens.splice(i - 1, 3, ...insert)\n }\n else if (/^\\|(?:[^|\\n:]+\\|)+\\n\\|:?$/.test(tcontent)) {\n token.content = tcontent.slice(0, -2)\n token.children!.splice(2, 1)\n }\n }\n\n return fixedTokens\n}\n","export function findMatchingClose(src: string, startIdx: number, open: string, close: string) {\n const len = src.length\n // Special-case $$ since it's a two-char delimiter that shouldn't\n // be interpreted as nested parentheses.\n if (open === '$$' && close === '$$') {\n let i = startIdx\n while (i < len - 1) {\n if (src[i] === '$' && src[i + 1] === '$') {\n // ensure not escaped\n let k = i - 1\n let backslashes = 0\n while (k >= 0 && src[k] === '\\\\') {\n backslashes++\n k--\n }\n if (backslashes % 2 === 0)\n return i\n }\n i++\n }\n return -1\n }\n\n const openChar = open[open.length - 1]\n const closeSeq = close\n let depth = 0\n let i = startIdx\n while (i < len) {\n // If there's an unescaped close sequence here\n if (src.slice(i, i + closeSeq.length) === closeSeq) {\n let k = i - 1\n let backslashes = 0\n while (k >= 0 && src[k] === '\\\\') {\n backslashes++\n k--\n }\n if (backslashes % 2 === 0) {\n if (depth === 0)\n return i\n depth--\n i += closeSeq.length\n continue\n }\n }\n\n const ch = src[i]\n // skip escaped characters\n if (ch === '\\\\') {\n i += 2\n continue\n }\n\n if (ch === openChar) {\n depth++\n }\n else if (ch === closeSeq[closeSeq.length - 1]) {\n if (depth > 0)\n depth--\n }\n i++\n }\n return -1\n}\n\nexport default findMatchingClose\n","export const TEX_BRACE_COMMANDS = [\n 'mathbf',\n 'boldsymbol',\n 'mathbb',\n 'mathcal',\n 'mathfrak',\n 'mathrm',\n 'mathit',\n 'mathsf',\n 'vec',\n 'hat',\n 'bar',\n 'tilde',\n 'overline',\n 'underline',\n 'mathscr',\n 'mathnormal',\n 'operatorname',\n 'mathbf*',\n]\n\nexport const ESCAPED_TEX_BRACE_COMMANDS = TEX_BRACE_COMMANDS.map(c => c.replace(/[.*+?^${}()|[\\\\]\"\\]/g, '\\\\$&')).join('|')\n\nconst TEX_CMD_RE = /\\\\[a-z]+/i\nconst PREFIX_CLASS = '(?:\\\\\\\\|\\\\u0008)'\nconst TEX_CMD_WITH_BRACES_RE = new RegExp(`${PREFIX_CLASS}(?:${ESCAPED_TEX_BRACE_COMMANDS})\\\\s*\\\\{[^}]+\\\\}`, 'i')\n// Detect brace-taking TeX commands even when the leading backslash or the\n// closing brace/content is missing (e.g. \"operatorname{\" or \"operatorname{span\").\n// This helps the heuristic treat incomplete but clearly TeX-like fragments\n// as math-like instead of plain text.\nconst TEX_BRACE_CMD_START_RE = new RegExp(`(?:${PREFIX_CLASS})?(?:${ESCAPED_TEX_BRACE_COMMANDS})\\s*\\{`, 'i')\nconst TEX_SPECIFIC_RE = /\\\\(?:text|frac|left|right|times)/\n// Match common math operator symbols or named commands.\n// Avoid treating the C/C++ increment operator (\"++\") as a math operator by\n// ensuring a lone '+' isn't matched when it's part of a '++' sequence.\n// Use a RegExp constructed from a string to avoid issues escaping '/' in a\n// regex literal on some platforms/linters.\n// eslint-disable-next-line prefer-regex-literals\nconst OPS_RE = new RegExp('(?<!\\\\+)\\\\+(?!\\\\+)|[=\\\\-*/^<>]|\\\\\\\\times|\\\\\\\\pm|\\\\\\\\cdot|\\\\\\\\le|\\\\\\\\ge|\\\\\\\\neq')\nconst FUNC_CALL_RE = /[A-Z]+\\s*\\([^)]+\\)/i\nconst WORDS_RE = /\\b(?:sin|cos|tan|log|ln|exp|sqrt|frac|sum|lim|int|prod)\\b/\n// Heuristic to detect common date/time patterns like 2025/9/30 21:37:24 and\n// avoid classifying them as math merely because they contain '/' or ':'\nconst DATE_TIME_RE = /\\b\\d{4}\\/\\d{1,2}\\/\\d{1,2}(?:[ T]\\d{1,2}:\\d{2}(?::\\d{2})?)?\\b/\nexport function isMathLike(s: string) {\n if (!s)\n return false\n\n // Normalize accidental control characters that may appear if a single\n // backslash sequence was interpreted in a JS string literal (for example\n // '\\\\b' becoming a backspace U+0008). Convert such control characters\n // back into their two-character escaped forms so our regexes can match\n // TeX commands reliably.\n // eslint-disable-next-line no-control-regex\n const norm = s.replace(/\\u0008/g, '\\\\b')\n const stripped = norm.trim()\n\n // quick bailouts\n // If the content looks like a timestamp or date, it's not math.\n if (DATE_TIME_RE.test(stripped))\n return false\n if (stripped.includes('**'))\n return false\n if (stripped.length > 2000)\n return true // very long blocks likely math\n\n if (/[./]\\s*\\D|\\D\\s*[./]/.test(s)) {\n return false\n }\n\n // TeX commands e.g. \\frac, \\alpha\n const texCmd = TEX_CMD_RE.test(norm)\n const texCmdWithBraces = TEX_CMD_WITH_BRACES_RE.test(norm)\n const texBraceStart = TEX_BRACE_CMD_START_RE.test(norm)\n\n // Explicit common TeX tokens (keeps compatibility with previous heuristic)\n const texSpecific = TEX_SPECIFIC_RE.test(norm)\n const subscriptPattern = /(?:^|[^\\w\\\\])(?:[A-Z]|\\\\[A-Z]+)_(?:\\{[^}]+\\}|[A-Z0-9\\\\])/i\n const superscriptPattern = /(?:^|[^\\w\\\\])(?:[A-Z]|\\\\[A-Z]+)\\^(?:\\{[^}]+\\}|[A-Z0-9\\\\])/i\n const superSub = subscriptPattern.test(norm) || superscriptPattern.test(norm)\n // common math operator symbols or named commands\n const ops = OPS_RE.test(norm)\n // function-like patterns: f(x), sin(x)\n const funcCall = FUNC_CALL_RE.test(norm)\n // common math words\n const words = WORDS_RE.test(norm)\n // 纯单个英文字命,也渲染成数学公式\n // e.g. (w) (x) (y) (z)\n // const pureWord = /^\\([a-zA-Z]\\)$/i.test(stripped)\n\n return texCmd || texCmdWithBraces || texBraceStart || texSpecific || superSub || ops || funcCall || words\n}\n","import type { MarkdownIt } from 'markdown-it-ts'\nimport type { MathOptions } from '../config'\n\nimport findMatchingClose from '../findMatchingClose'\nimport { ESCAPED_TEX_BRACE_COMMANDS, isMathLike } from './isMathLike'\n\n// Heuristic to decide whether a piece of text is likely math.\n// Matches common TeX commands, math operators, function-call patterns like f(x),\n// superscripts/subscripts, and common math words.\n// Common TeX formatting commands that take a brace argument, e.g. \\boldsymbol{...}\n// Keep this list in a single constant so it's easy to extend/test.\n\n// Precompute an escaped, |-joined string of TEX brace commands so we don't\n// rebuild it on every call to `isMathLike`.\n\n// Common KaTeX/TeX command names that might lose their leading backslash.\n// Keep this list conservative to avoid false-positives in normal text.\nexport const KATEX_COMMANDS = [\n 'ldots',\n 'cdots',\n 'quad',\n 'in',\n 'end',\n 'infty',\n 'perp',\n 'mid',\n 'operatorname',\n 'to',\n 'rightarrow',\n 'leftarrow',\n 'math',\n 'mathrm',\n 'mathbf',\n 'mathit',\n 'mathbb',\n 'mathcal',\n 'mathfrak',\n 'alpha',\n 'beta',\n 'gamma',\n 'delta',\n 'epsilon',\n 'lambda',\n 'sum',\n 'prod',\n 'int',\n 'sqrt',\n 'fbox',\n 'boxed',\n 'color',\n 'rule',\n 'edef',\n 'fcolorbox',\n 'hline',\n 'hdashline',\n 'cdot',\n 'times',\n 'pm',\n 'le',\n 'ge',\n 'neq',\n 'sin',\n 'cos',\n 'tan',\n 'log',\n 'ln',\n 'exp',\n 'lim',\n 'frac',\n 'text',\n 'left',\n 'right',\n]\n\n// Precompute escaped KATEX commands and default regex used by\n// `normalizeStandaloneBackslashT` when no custom commands are provided.\n// Sort commands by length (desc) before joining so longer commands like\n// 'operatorname' are preferred over shorter substrings like 'to'. This\n// avoids accidental partial matches when building the regex.\nexport const ESCAPED_KATEX_COMMANDS = KATEX_COMMANDS\n .slice()\n .sort((a, b) => b.length - a.length)\n .map(c => c.replace(/[.*+?^${}()|[\\\\]\\\\\\]/g, '\\\\$&'))\n .join('|')\nconst CONTROL_CHARS_CLASS = '[\\t\\r\\b\\f\\v]'\n\n// Precompiled helpers reused by normalization\nconst SPAN_CURLY_RE = /span\\{([^}]+)\\}/\nconst OPERATORNAME_SPAN_RE = /\\\\operatorname\\{span\\}\\{((?:[^{}]|\\{[^}]*\\})+)\\}/\nconst SINGLE_BACKSLASH_NEWLINE_RE = /(^|[^\\\\])\\\\\\r?\\n/g\nconst ENDING_SINGLE_BACKSLASH_RE = /(^|[^\\\\])\\\\$/g\n\n// Cache for dynamically built regexes depending on commands list\nconst DEFAULT_MATH_RE = new RegExp(`${CONTROL_CHARS_CLASS}|(?<!\\\\\\\\|\\\\w)(${ESCAPED_KATEX_COMMANDS})\\\\b`, 'g')\nconst MATH_RE_CACHE = new Map<string, RegExp>()\nconst BRACE_CMD_RE_CACHE = new Map<string, RegExp>()\n\nfunction getMathRegex(commands: ReadonlyArray<string> | undefined) {\n if (!commands)\n return DEFAULT_MATH_RE\n const arr = [...commands]\n arr.sort((a, b) => b.length - a.length)\n const key = arr.join('\\u0001')\n const cached = MATH_RE_CACHE.get(key)\n if (cached)\n return cached\n const commandPattern = `(?:${arr.map(c => c.replace(/[.*+?^${}()|[\\\\]\\\\\"\\]/g, '\\\\$&')).join('|')})`\n const re = new RegExp(`${CONTROL_CHARS_CLASS}|(?<!\\\\\\\\|\\\\w)(${commandPattern})\\\\b`, 'g')\n MATH_RE_CACHE.set(key, re)\n return re\n}\n\nfunction getBraceCmdRegex(useDefault: boolean, commands: ReadonlyArray<string> | undefined) {\n const arr = useDefault ? [] : [...(commands ?? [])]\n if (!useDefault)\n arr.sort((a, b) => b.length - a.length)\n const key = useDefault ? '__default__' : arr.join('\\u0001')\n const cached = BRACE_CMD_RE_CACHE.get(key)\n if (cached)\n return cached\n const braceEscaped = useDefault\n ? [ESCAPED_TEX_BRACE_COMMANDS, ESCAPED_KATEX_COMMANDS].filter(Boolean).join('|')\n : [\n arr.map(c => c.replace(/[.*+?^${}()|[\\\\]\\\\\\]/g, '\\\\$&')).join('|'),\n ESCAPED_TEX_BRACE_COMMANDS,\n ].filter(Boolean).join('|')\n const re = new RegExp(`(^|[^\\\\\\\\\\\\w])(${braceEscaped})\\\\s*\\\\{`, 'g')\n BRACE_CMD_RE_CACHE.set(key, re)\n return re\n}\n\n// Hoisted map of control characters -> escaped letter (e.g. '\\t' -> 't').\n// Kept at module scope to avoid recreating on every normalization call.\nconst CONTROL_MAP: Record<string, string> = {\n '\\t': 't',\n '\\r': 'r',\n '\\b': 'b',\n '\\f': 'f',\n '\\v': 'v',\n}\n\nfunction countUnescapedStrong(s: string) {\n const re = /(^|[^\\\\])(__|\\*\\*)/g\n let m: RegExpExecArray | null\n let c = 0\n // eslint-disable-next-line unused-imports/no-unused-vars\n while ((m = re.exec(s)) !== null) {\n c++\n }\n return c\n}\n\nexport function normalizeStandaloneBackslashT(s: string, opts?: MathOptions) {\n const commands = opts?.commands ?? KATEX_COMMANDS\n const escapeExclamation = opts?.escapeExclamation ?? true\n\n const useDefault = opts?.commands == null\n\n // Build or reuse regex: match control chars or unescaped command words.\n const re = getMathRegex(useDefault ? undefined : commands)\n\n let out = s.replace(re, (m: string, cmd?: string) => {\n if (CONTROL_MAP[m] !== undefined)\n return `\\\\${CONTROL_MAP[m]}`\n if (cmd && commands.includes(cmd))\n return `\\\\${cmd}`\n return m\n })\n\n // Escape standalone '!' but don't double-escape already escaped ones.\n if (escapeExclamation)\n out = out.replace(/(^|[^\\\\])!/g, '$1\\\\!')\n\n // Final pass: some TeX command names take a brace argument and may have\n // lost their leading backslash, e.g. \"operatorname{span}\". Ensure we\n // restore a backslash before known brace-taking commands when they are\n // followed by '{' and are not already escaped.\n // Use default escaped list when possible. Include TEX_BRACE_COMMANDS so\n // known brace-taking TeX commands (e.g. `text`, `boldsymbol`) are also\n // restored when their leading backslash was lost.\n let result = out\n const braceCmdRe = getBraceCmdRegex(useDefault, useDefault ? undefined : commands)\n result = result.replace(braceCmdRe, (_m: string, p1: string, p2: string) => `${p1}\\\\${p2}{`)\n result = result.replace(SPAN_CURLY_RE, 'span\\\\{$1\\\\}')\n .replace(OPERATORNAME_SPAN_RE, '\\\\operatorname{span}\\\\{$1\\\\}')\n\n // If a single backslash appears immediately before a newline (e.g. \"... 8 \\n5...\"),\n // it's likely intended as a LaTeX linebreak (`\\\\`). Double it, but avoid\n // changing already escaped `\\\\` sequences.\n // Match a single backslash not preceded by another backslash, followed by an optional CR and a LF.\n result = result.replace(SINGLE_BACKSLASH_NEWLINE_RE, '$1\\\\\\\\\\n')\n\n // If the string ends with a single backslash (no trailing newline), double it.\n result = result.replace(ENDING_SINGLE_BACKSLASH_RE, '$1\\\\\\\\')\n return result\n}\nexport function applyMath(md: MarkdownIt, mathOpts?: MathOptions) {\n // Inline rule for \\(...\\) and $$...$$ and $...$\n const mathInline = (state: unknown, silent: boolean) => {\n const s = state as any\n\n if (/^\\*[^*]+/.test(s.src)) {\n return false\n }\n const delimiters: [string, string][] = [\n ['$$', '$$'],\n ['\\\\(', '\\\\)'],\n ['\\(', '\\)'],\n ]\n\n let searchPos = 0\n let preMathPos = 0\n // use findMatchingClose from util\n for (const [open, close] of delimiters) {\n // We'll scan the entire inline source and tokenize all occurrences\n const src = s.src\n let foundAny = false\n const pushText = (text: string) => {\n // sanitize unexpected values\n if (text === 'undefined' || text == null) {\n text = ''\n }\n if (text === '\\\\') {\n s.pos = s.pos + text.length\n searchPos = s.pos\n return\n }\n if (text === '\\\\)' || text === '\\\\(') {\n const t = s.push('text_special', '', 0)\n t.content = text === '\\\\)' ? ')' : '('\n t.markup = text\n s.pos = s.pos + text.length\n searchPos = s.pos\n return\n }\n\n if (!text)\n return\n\n const t = s.push('text', '', 0)\n t.content = text\n s.pos = s.pos + text.length\n searchPos = s.pos\n }\n\n while (true) {\n if (searchPos >= src.length)\n break\n const index = src.indexOf(open, searchPos)\n if (index === -1)\n break\n\n // If the delimiter is immediately preceded by a ']' (possibly with\n // intervening spaces), it's likely part of a markdown link like\n // `[text](...)`, so we should not treat this '(' as the start of\n // an inline math span. Also guard the index to avoid OOB access.\n if (index > 0) {\n let i = index - 1\n // skip spaces between ']' and the delimiter\n while (i >= 0 && src[i] === ' ')\n i--\n if (i >= 0 && src[i] === ']')\n return false\n }\n // 有可能遇到 \\((\\operatorname{span}\\\\{\\boldsymbol{\\alpha}\\\\})^\\perp\\)\n // 这种情况,前面的 \\( 是数学公式的开始,后面的 ( 是普通括号\n // endIndex 需要找到与 open 对应的 close\n // 不能简单地用 indexOf 找到第一个 close — 需要处理嵌套与转义字符\n const endIdx = findMatchingClose(src, index + open.length, open, close)\n\n if (endIdx === -1) {\n // no matching close for this opener; skip forward\n const content = src.slice(index + open.length)\n if (content.includes(open)) {\n searchPos = src.indexOf(open, index + open.length)\n continue\n }\n if (endIdx === -1) {\n if (isMathLike(content)) {\n searchPos = index + open.length\n foundAny = true\n if (!silent) {\n s.pending = ''\n const toPushBefore = preMathPos ? src.slice(preMathPos, searchPos) : src.slice(0, searchPos)\n const isStrongPrefix = countUnescapedStrong(toPushBefore) % 2 === 1\n\n if (preMathPos) {\n pushText(src.slice(preMathPos, searchPos))\n }\n else {\n let text = src.slice(0, searchPos)\n if (text.endsWith(open))\n text = text.slice(0, text.length - open.length)\n pushText(text)\n }\n if (isStrongPrefix) {\n const strongToken = s.push('strong_open', '', 0)\n strongToken.markup = src.slice(0, index + 2)\n const token = s.push('math_inline', 'math', 0)\n token.content = normalizeStandaloneBackslashT(content, mathOpts)\n token.markup = open === '$$' ? '$$' : open === '\\\\(' ? '\\\\(\\\\)' : open === '$' ? '$' : '()'\n token.raw = `${open}${content}${close}`\n token.loading = true\n strongToken.content = content\n s.push('strong_close', '', 0)\n }\n else {\n const token = s.push('math_inline', 'math', 0)\n token.content = normalizeStandaloneBackslashT(content, mathOpts)\n token.markup = open === '$$' ? '$$' : open === '\\\\(' ? '\\\\(\\\\)' : open === '$' ? '$' : '()'\n token.raw = `${open}${content}${close}`\n token.loading = true\n }\n // consume the full inline source\n s.pos = src.length\n }\n searchPos = src.length\n preMathPos = searchPos\n }\n break\n }\n }\n const content = src.slice(index + open.length, endIdx)\n if (!isMathLike(content)) {\n // push remaining text after last match\n // not math-like; skip this match and continue scanning\n searchPos = endIdx + close.length\n const text = src.slice(s.pos, searchPos)\n if (!s.pending)\n pushText(text)\n continue\n }\n foundAny = true\n\n if (!silent) {\n // push text before this math\n const before = src.slice(0, index)\n // If we already consumed some content, avoid duplicating the prefix\n // Only push the portion from previous search position\n const prevConsumed = src.slice(0, searchPos)\n let toPushBefore = prevConsumed ? src.slice(preMathPos, index) : before\n const isStrongPrefix = countUnescapedStrong(toPushBefore) % 2 === 1\n if (index !== s.pos && isStrongPrefix) {\n toPushBefore = s.pending + src.slice(s.pos, index)\n }\n\n // strong prefix handling (preserve previous behavior)\n if (s.pending !== toPushBefore) {\n s.pending = ''\n if (isStrongPrefix) {\n const _match = toPushBefore.match(/(\\*+)/)\n const after = toPushBefore.slice(_match!.index! + _match![0].length)\n pushText(toPushBefore.slice(0, _match!.index!))\n const strongToken = s.push('strong_open', '', 0)\n strongToken.markup = _match![0]\n const textToken = s.push('text', '', 0)\n textToken.content = after\n s.push('strong_close', '', 0)\n }\n else {\n pushText(toPushBefore)\n }\n }\n if (isStrongPrefix) {\n const strongToken = s.push('strong_open', '', 0)\n strongToken.markup = '**'\n const token = s.push('math_inline', 'math', 0)\n token.content = normalizeStandaloneBackslashT(content, mathOpts)\n token.markup = open === '$$' ? '$$' : open === '\\\\(' ? '\\\\(\\\\)' : open === '$' ? '$' : '()'\n token.raw = `${open}${content}${close}`\n token.loading = false\n const raw = src.slice(endIdx + close.length)\n const isBeforeClose = raw.startsWith('*')\n if (isBeforeClose) {\n s.push('strong_close', '', 0)\n }\n if (raw) {\n const textContentToken = s.push('text', '', 0)\n textContentToken.content = (raw == null ? '' : String(raw)).replace(/^\\*+/, '')\n }\n if (!isBeforeClose)\n s.push('strong_close', '', 0)\n s.pos = src.length\n searchPos = src.length\n preMathPos = searchPos\n continue\n }\n else {\n const token = s.push('math_inline', 'math', 0)\n token.content = normalizeStandaloneBackslashT(content, mathOpts)\n token.markup = open === '$$' ? '$$' : open === '\\\\(' ? '\\\\(\\\\)' : open === '$' ? '$' : '()'\n token.raw = `${open}${content}${close}`\n token.loading = false\n }\n }\n\n searchPos = endIdx + close.length\n preMathPos = searchPos\n s.pos = searchPos\n }\n\n if (foundAny) {\n if (!silent) {\n // push remaining text after last match\n if (searchPos < src.length)\n pushText(src.slice(searchPos))\n // consume the full inline source\n s.pos = src.length\n }\n else {\n // in silent mode, advance position past what we scanned\n s.pos = searchPos\n }\n\n return true\n }\n }\n\n return false\n }\n\n // Block math rule similar to previous implementation\n const mathBlock = (\n state: unknown,\n startLine: number,\n endLine: number,\n silent: boolean,\n ) => {\n const s = state as any\n const delimiters: [string, string][] = [\n ['\\\\[', '\\\\]'],\n ['\\[', '\\]'],\n ['$$', '$$'],\n ]\n const startPos = s.bMarks[startLine] + s.tShift[startLine]\n const lineText = s.src.slice(startPos, s.eMarks[startLine]).trim()\n let matched = false\n let openDelim = ''\n let closeDelim = ''\n for (const [open, close] of delimiters) {\n if (lineText === open || lineText.startsWith(open)) {\n if (open.includes('[')) {\n if (lineText.replace('\\\\', '') === '[') {\n if (startLine + 1 < endLine) {\n matched = true\n openDelim = open\n closeDelim = close\n break\n }\n continue\n }\n }\n else {\n matched = true\n openDelim = open\n closeDelim = close\n break\n }\n }\n }\n\n if (!matched)\n return false\n if (silent)\n return true\n\n if (\n lineText.includes(closeDelim)\n && lineText.indexOf(closeDelim) > openDelim.length\n ) {\n const startDelimIndex = lineText.indexOf(openDelim)\n const endDelimIndex = lineText.indexOf(\n closeDelim,\n startDelimIndex + openDelim.length,\n )\n const content = lineText.slice(\n startDelimIndex + openDelim.length,\n endDelimIndex,\n )\n const token: any = s.push('math_block', 'math', 0)\n token.content = normalizeStandaloneBackslashT(content)\n token.markup\n = openDelim === '$$' ? '$$' : openDelim === '[' ? '[]' : '\\\\[\\\\]'\n token.map = [startLine, startLine + 1]\n token.raw = `${openDelim}${content}${closeDelim}`\n token.block = true\n token.loading = false\n s.line = startLine + 1\n return true\n }\n\n let nextLine = startLine\n let content = ''\n let found = false\n\n const firstLineContent\n = lineText === openDelim ? '' : lineText.slice(openDelim.length)\n\n if (firstLineContent.includes(closeDelim)) {\n const endIndex = firstLineContent.indexOf(closeDelim)\n content = firstLineContent.slice(0, endIndex)\n found = true\n nextLine = startLine\n }\n else {\n if (firstLineContent)\n content = firstLineContent\n\n for (nextLine = startLine + 1; nextLine < endLine; nextLine++) {\n const lineStart = s.bMarks[nextLine] + s.tShift[nextLine]\n const lineEnd = s.eMarks[nextLine]\n const currentLine = s.src.slice(lineStart - 1, lineEnd)\n if (currentLine.trim() === closeDelim) {\n found = true\n break\n }\n else if (currentLine.includes(closeDelim)) {\n found = true\n const endIndex = currentLine.indexOf(closeDelim)\n content += (content ? '\\n' : '') + currentLine.slice(0, endIndex)\n break\n }\n content += (content ? '\\n' : '') + currentLine\n }\n }\n\n const token: any = s.push('math_block', 'math', 0)\n token.content = normalizeStandaloneBackslashT(content)\n token.markup\n = openDelim === '$$' ? '$$' : openDelim === '[' ? '[]' : '\\\\[\\\\]'\n token.raw = `${openDelim}${content}${content.startsWith('\\n') ? '\\n' : ''}${closeDelim}`\n token.map = [startLine, nextLine + 1]\n token.block = true\n token.loading = !found\n s.line = nextLine + 1\n return true\n }\n\n // Register math before the escape rule so inline math is tokenized\n // before markdown-it processes backslash escapes. This preserves\n // backslashes inside math content (e.g. \"\\\\{\") instead of having\n // the escape rule remove them from the token content.\n md.inline.ruler.before('escape', 'math', mathInline)\n md.block.ruler.before('paragraph', 'math_block', mathBlock, {\n alt: ['paragraph', 'reference', 'blockquote', 'list'],\n })\n}\n","import type { MarkdownIt } from 'markdown-it-ts'\n\nexport function applyRenderRules(md: MarkdownIt) {\n // Narrow external `any` surface to `unknown` and use local casts where\n // needed to interact with markdown-it runtime objects. This reduces the\n // exported `any` footprint while preserving runtime behavior.\n const defaultImage\n = md.renderer.rules.image\n || function (tokens: unknown, idx: number, options: unknown, env: unknown, self: unknown) {\n const tokensAny = tokens as unknown as import('../types').MarkdownToken[]\n const selfShape = self as unknown as { renderToken?: (tokens: import('../types').MarkdownToken[], idx: number, options?: unknown) => string }\n return selfShape.renderToken ? selfShape.renderToken(tokensAny, idx, options) : ''\n }\n\n md.renderer.rules.image = (\n tokens: unknown,\n idx: number,\n options: unknown,\n env: unknown,\n self: unknown,\n ) => {\n const tokensAny = tokens as unknown as import('../types').MarkdownToken[]\n const token = tokensAny[idx]\n // Narrow token shape to only the runtime helpers we need to call.\n const tokenShape = token as unknown as { attrSet?: (name: string, val: string) => void }\n tokenShape.attrSet?.('loading', 'lazy')\n const defaultImageFn = defaultImage as unknown as (tokens: import('../types').MarkdownToken[], idx: number, options: unknown, env?: unknown, self?: unknown) => string\n return defaultImageFn(tokensAny, idx, options, env, self)\n }\n\n md.renderer.rules.fence\n = md.renderer.rules.fence\n || ((tokens: unknown, idx: number) => {\n const tokensAny = tokens as unknown as import('../types').MarkdownToken[]\n const token = tokensAny[idx]\n const tokenShape = token as unknown as { info?: string, content?: string }\n const info = String(tokenShape.info ?? '').trim()\n const langClass = info\n ? `language-${md.utils.escapeHtml((info as string).split(/\\s+/g)[0])}`\n : ''\n const code = md.utils.escapeHtml(String(tokenShape.content ?? ''))\n return `<pre class=\"${langClass}\"><code>${code}</code></pre>`\n })\n}\n","import type { MathOptions } from './config'\nimport MarkdownIt from 'markdown-it-ts'\nimport { getDefaultMathOptions } from './config'\nimport { applyContainers } from './plugins/containers'\nimport { applyFixHtmlInlineTokens } from './plugins/fixHtmlInline'\nimport { applyFixLinkInline } from './plugins/fixLinkInline'\nimport { applyFixLinkTokens } from './plugins/fixLinkTokens'\nimport { applyFixListItem } from './plugins/fixListItem'\nimport { applyFixStrongTokens } from './plugins/fixStrongTokens'\nimport { applyFixTableTokens } from './plugins/fixTableTokens'\nimport { applyMath } from './plugins/math'\nimport { applyRenderRules } from './renderers'\n\nexport interface FactoryOptions extends Record<string, unknown> {\n markdownItOptions?: Record<string, unknown>\n enableMath?: boolean\n enableContainers?: boolean\n mathOptions?: { commands?: string[], escapeExclamation?: boolean }\n}\n\nexport function factory(opts: FactoryOptions = {}) {\n const md = new MarkdownIt({\n html: true,\n linkify: true,\n typographer: true,\n stream: true,\n ...(opts.markdownItOptions ?? {}),\n })\n\n if (opts.enableMath ?? true) {\n const mergedMathOptions: MathOptions = { ...(getDefaultMathOptions() ?? {}), ...(opts.mathOptions ?? {}) }\n applyMath(md, mergedMathOptions)\n }\n if (opts.enableContainers ?? true)\n applyContainers(md)\n // Apply link-fixing plugin early so tokens produced during parsing\n // have corrected inline children. This runs during markdown-it's\n // core stage (after inline tokenization) instead of after parse.\n // Install inline-level link tokenizer before the built-in 'link' rule\n applyFixLinkInline(md)\n // Retain the core-stage fix as a fallback for any cases the inline\n // tokenizer does not handle.\n applyFixLinkTokens(md)\n // Also apply strong-token normalization at the same stage.\n applyFixStrongTokens(md)\n // Apply list-item inline normalization as well.\n applyFixListItem(md)\n // Apply table token normalization at block stage.\n applyFixTableTokens(md)\n applyRenderRules(md)\n applyFixHtmlInlineTokens(md)\n\n return md\n}\n","import type { CheckboxInputNode, CheckboxNode, MarkdownToken } from '../../types'\n\nexport function parseCheckboxToken(token: MarkdownToken): CheckboxNode {\n const tokenMeta = (token.meta ?? {}) as unknown as { checked?: boolean }\n return {\n type: 'checkbox',\n checked: tokenMeta.checked === true,\n raw: tokenMeta.checked ? '[x]' : '[ ]',\n }\n}\n\nexport function parseCheckboxInputToken(token: any): CheckboxInputNode {\n const tokenAny = token as unknown as { attrGet?: (name: string) => string | undefined }\n const rawAttr = tokenAny.attrGet ? tokenAny.attrGet('checked') : undefined\n const checked = rawAttr === '' || rawAttr === 'true'\n return {\n type: 'checkbox_input',\n checked,\n raw: checked ? '[x]' : '[ ]',\n }\n}\n","import type { EmojiNode, MarkdownToken } from '../../types'\n\nexport function parseEmojiToken(token: MarkdownToken): EmojiNode {\n const name = String(token.content ?? '')\n const markup = String(token.markup ?? '')\n return {\n type: 'emoji',\n name,\n markup,\n raw: `:${name}:`,\n }\n}\n","import type { EmphasisNode, MarkdownToken, ParsedNode } from '../../types'\nimport { parseInlineTokens } from '../index'\n\nexport function parseEmphasisToken(\n tokens: MarkdownToken[],\n startIndex: number,\n): {\n node: EmphasisNode\n nextIndex: number\n} {\n const children: ParsedNode[] = []\n let emText = ''\n let i = startIndex + 1\n const innerTokens: MarkdownToken[] = []\n\n // Process tokens between em_open and em_close\n while (i < tokens.length && tokens[i].type !== 'em_close') {\n emText += String(tokens[i].content ?? '')\n innerTokens.push(tokens[i])\n i++\n }\n\n // Parse inner tokens to handle nested elements\n children.push(...parseInlineTokens(innerTokens))\n\n const node: EmphasisNode = {\n type: 'emphasis',\n children,\n raw: `*${emText}*`,\n }\n\n // Skip to after em_close\n const nextIndex = i < tokens.length ? i + 1 : tokens.length\n\n return { node, nextIndex }\n}\n","import type { CodeBlockNode, MarkdownToken } from '../../types'\n\n// Strip a final line that looks like a fence marker (``` etc.)\nconst TRAILING_FENCE_LINE_RE = /\\r?\\n[ \\t]*`+\\s*$/\n// Unified diff metadata/header line prefixes to skip when splitting a diff\nconst DIFF_HEADER_PREFIXES = ['diff ', 'index ', '--- ', '+++ ', '@@ ']\n// Newline splitter reused in this module\nconst NEWLINE_RE = /\\r?\\n/\n\nfunction splitUnifiedDiff(content: string) {\n const orig: string[] = []\n const updated: string[] = []\n for (const rawLine of content.split(NEWLINE_RE)) {\n const line = rawLine\n // skip diff metadata lines\n if (DIFF_HEADER_PREFIXES.some(p => line.startsWith(p)))\n continue\n\n if (line.length >= 2 && line[0] === '-' && line[1] === ' ') {\n orig.push(` ${line.slice(1)}`)\n }\n else if (line.length >= 2 && line[0] === '+' && line[1] === ' ') {\n updated.push(` ${line.slice(1)}`)\n }\n else {\n // fallback: treat as context (no prefix)\n orig.push(line)\n updated.push(line)\n }\n }\n return {\n original: orig.join('\\n'),\n updated: updated.join('\\n'),\n }\n}\n\nexport function parseFenceToken(token: MarkdownToken): CodeBlockNode {\n const hasMap = Array.isArray(token.map) && token.map.length === 2\n const tokenMeta = (token.meta ?? {}) as unknown as { closed?: boolean }\n const closed = typeof tokenMeta.closed === 'boolean' ? tokenMeta.closed : undefined\n const info = String(token.info ?? '')\n const diff = info.startsWith('diff')\n const language = diff\n ? (() => {\n const s = info\n const sp = s.indexOf(' ')\n return sp === -1\n ? ''\n : String(s.slice(sp + 1) ?? '')\n })()\n : info\n\n // Defensive sanitization: sometimes a closing fence line (e.g. ``` or ``)\n // can accidentally end up inside `token.content` (for example when\n // the parser/mapping is confused). Remove a trailing line that only\n // contains backticks and optional whitespace so we don't render stray\n // ` or `` characters at the end of the code output. This is a\n // conservative cleanup and only strips a final line that looks like a\n // fence marker (starts with optional spaces then one or more ` and\n // only whitespace until end-of-string).\n let content = String(token.content ?? '')\n if (TRAILING_FENCE_LINE_RE.test(content))\n content = content.replace(TRAILING_FENCE_LINE_RE, '')\n\n if (diff) {\n const { original, updated } = splitUnifiedDiff(content)\n // 返回时保留原来的 code 字段为 updated(编辑后代码),并额外附加原始与更新的文本\n return {\n type: 'code_block',\n language,\n code: String(updated ?? ''),\n raw: String(content ?? ''),\n diff,\n loading: closed === true ? false : closed === false ? true : !hasMap,\n originalCode: original,\n updatedCode: updated,\n }\n }\n\n return {\n type: 'code_block',\n language,\n code: String(content ?? ''),\n raw: String(content ?? ''),\n diff,\n loading: closed === true ? false : closed === false ? true : !hasMap,\n }\n}\n","import type { FootnoteReferenceNode, MarkdownToken } from '../../types'\n\nexport function parseFootnoteRefToken(\n token: MarkdownToken,\n): FootnoteReferenceNode {\n const tokenMeta = (token.meta ?? {}) as unknown as { label?: string }\n return {\n type: 'footnote_reference',\n id: String(tokenMeta.label ?? ''),\n raw: `[^${String(tokenMeta.label ?? '')}]`,\n }\n}\n","import type { HardBreakNode } from '../../types'\n\nexport function parseHardbreakToken(): HardBreakNode {\n return {\n type: 'hardbreak',\n raw: '\\\\\\n',\n }\n}\n","import type { HighlightNode, MarkdownToken, ParsedNode } from '../../types'\nimport { parseInlineTokens } from '../index'\n\nexport function parseHighlightToken(\n tokens: MarkdownToken[],\n startIndex: number,\n): {\n node: HighlightNode\n nextIndex: number\n} {\n const children: ParsedNode[] = []\n let markText = ''\n let i = startIndex + 1\n const innerTokens: MarkdownToken[] = []\n\n // Process tokens between mark_open and mark_close\n while (i < tokens.length && tokens[i].type !== 'mark_close') {\n markText += String(tokens[i].content ?? '')\n innerTokens.push(tokens[i])\n i++\n }\n\n // Parse inner tokens to handle nested elements\n children.push(...parseInlineTokens(innerTokens))\n\n const node: HighlightNode = {\n type: 'highlight',\n children,\n raw: `==${markText}==`,\n }\n\n // Skip to after mark_close\n const nextIndex = i < tokens.length ? i + 1 : tokens.length\n\n return { node, nextIndex }\n}\n","import type { InlineCodeNode, MarkdownToken, ParsedNode } from '../../types'\n\n// Parse inline HTML and return an appropriate ParsedNode depending on tag.\nexport function parseHtmlInlineCodeToken(token: MarkdownToken, tokens: MarkdownToken[], i: number): [ParsedNode, number] {\n let code = String(token.content ?? '').trim()\n const nextToken = tokens[i + 1]\n const nnextToken = tokens[i + 2]\n\n // Quick tag detection\n const tagMatch = code.match(/^<\\s*([\\w-]+)/)\n const tag = tagMatch ? tagMatch[1].toLowerCase() : ''\n\n // Helper to extract inner text for tags like <a>...</a>, <p>...</p>, <div>...</div>\n function extractInner(html: string) {\n // Match the first closing sequence like >...< /tag>\n const m = html.match(/>([\\s\\S]*?)<\\s*\\/\\s*[\\w-]+>/)\n return m ? m[1] : ''\n }\n\n if (tag === 'a') {\n let loading = false\n if (!nextToken || (nextToken?.type === 'text' && (!nnextToken || nnextToken.type !== 'html_inline')) || !nextToken) {\n loading = true\n }\n if (nextToken?.type === 'text' && (nnextToken?.type === 'html_inline' || !nnextToken)) {\n // Try to extract href and inner text\n const hrefMatch = code.match(/href\\s*=\\s*\"([^\"]+)\"|href\\s*=\\s*'([^']+)'|href\\s*=\\s*([^\\s>]+)/i)\n const href = hrefMatch ? (hrefMatch[1] || hrefMatch[2] || hrefMatch[3]) : ''\n let index = i + 1\n if (nextToken.type === 'text') {\n code = nextToken.content?.replace(/<[^>]*$/, '') ?? ''\n\n index = i + 2\n }\n if (nnextToken?.type === 'html_inline' && nextToken.type === 'text') {\n index = i + 3\n }\n const inner = code || href || ''\n return [\n {\n type: 'link',\n href: String(href ?? ''),\n title: null,\n text: code,\n children: [\n { type: 'text', content: inner, raw: inner },\n ],\n loading,\n raw: code,\n } as ParsedNode,\n index,\n ]\n }\n }\n\n if (tag === 'p' || tag === 'div') {\n const inner = extractInner(code) || ''\n return [\n {\n type: 'paragraph',\n children: [\n { type: 'text', content: inner, raw: inner },\n ],\n raw: code,\n } as ParsedNode,\n i + 1,\n ]\n }\n // Fallback: treat as inline code (preserve previous behavior)\n return [\n {\n type: 'inline_code',\n code,\n raw: code,\n } as InlineCodeNode,\n i + 1,\n ]\n}\n","import type { ImageNode, MarkdownToken } from '../../types'\n\nexport function parseImageToken(token: MarkdownToken, loading = false): ImageNode {\n // Some call-sites pass an outer/inline token whose children contain the\n // actual image token (with attrs). Prefer token.attrs when present; when\n // absent, search children for the first child that carries attrs.\n let attrs = token.attrs ?? []\n // If the parent token has no attrs, prefer attrs from the inner child image\n // token. Remember which child provided attrs so we can prefer its content\n // over the parent's `token.content` (the parent may contain the raw\n // markdown string like ``).\n let childWithAttrs: any = null\n if ((!attrs || attrs.length === 0) && Array.isArray(token.children)) {\n for (const child of token.children) {\n // child.attrs may be null in markdown-it; check and use if populated\n const childAttrs = (child as any)?.attrs\n if (Array.isArray(childAttrs) && childAttrs.length > 0) {\n attrs = childAttrs\n childWithAttrs = child\n break\n }\n }\n }\n const src = String(attrs.find(attr => attr[0] === 'src')?.[1] ?? '')\n const altAttr = attrs.find(attr => attr[0] === 'alt')?.[1]\n // Prefer a non-empty alt attribute. If attrs were sourced from an inner\n // child token prefer that child's `content` over the parent's `token.content`\n // because the parent may contain the raw markdown instead of the plain alt\n // text.\n let alt = ''\n if (altAttr != null && String(altAttr).length > 0) {\n alt = String(altAttr)\n }\n else if (childWithAttrs?.content != null && String(childWithAttrs.content).length > 0) {\n alt = String(childWithAttrs.content)\n }\n else if (Array.isArray(childWithAttrs?.children) && childWithAttrs.children[0]?.content) {\n // If the inner image token has children (e.g. a text token) prefer that\n // child's content when the child token's own `content` is empty.\n alt = String(childWithAttrs.children[0].content)\n }\n else if (Array.isArray(token.children) && token.children[0]?.content) {\n alt = String(token.children[0].content)\n }\n else if (token.content != null && String(token.content).length > 0) {\n alt = String(token.content)\n }\n\n const _title = attrs.find(attr => attr[0] === 'title')?.[1] ?? null\n const title = _title === null ? null : String(_title)\n const raw = String(token.content ?? '')\n\n return {\n type: 'image',\n src,\n alt,\n title,\n raw,\n loading,\n }\n}\n","import type { InlineCodeNode, MarkdownToken } from '../../types'\n\nexport function parseInlineCodeToken(token: MarkdownToken): InlineCodeNode {\n const code = String(token.content ?? '')\n return {\n type: 'inline_code',\n code,\n raw: code,\n }\n}\n","import type { InsertNode, MarkdownToken, ParsedNode } from '../../types'\nimport { parseInlineTokens } from '../index'\n\nexport function parseInsertToken(\n tokens: MarkdownToken[],\n startIndex: number,\n): {\n node: InsertNode\n nextIndex: number\n} {\n const children: ParsedNode[] = []\n let insText = ''\n let i = startIndex + 1\n const innerTokens: MarkdownToken[] = []\n\n // Process tokens between ins_open and ins_close\n while (i < tokens.length && tokens[i].type !== 'ins_close') {\n insText += String(tokens[i].content ?? '')\n innerTokens.push(tokens[i])\n i++\n }\n\n // Parse inner tokens to handle nested elements\n children.push(...parseInlineTokens(innerTokens))\n\n const node: InsertNode = {\n type: 'insert',\n children,\n raw: `++${String(insText)}++`,\n }\n\n // Skip to after ins_close\n const nextIndex = i < tokens.length ? i + 1 : tokens.length\n\n return { node, nextIndex }\n}\n","import type { LinkNode, MarkdownToken } from '../../types'\nimport { parseInlineTokens } from '../index'\n\nexport function parseLinkToken(\n tokens: MarkdownToken[],\n startIndex: number,\n): {\n node: LinkNode\n nextIndex: number\n} {\n const openToken = tokens[startIndex]\n const attrs = openToken.attrs ?? []\n const href = String(attrs.find(attr => attr[0] === 'href')?.[1] ?? '')\n const _title = attrs.find(attr => attr[0] === 'title')?.[1] ?? null\n const title = _title === null ? null : String(_title)\n\n let i = startIndex + 1\n const linkTokens: MarkdownToken[] = []\n let loading = true\n\n // Collect all tokens between link_open and link_close\n while (i < tokens.length && tokens[i].type !== 'link_close') {\n linkTokens.push(tokens[i])\n i++\n }\n\n if (tokens[i]?.type === 'link_close') {\n loading = false\n }\n\n // Parse the collected tokens as inline content\n const children = parseInlineTokens(linkTokens)\n const linkText = children\n .map((node) => {\n const nodeAny = node as unknown as { content?: string, raw?: string }\n if ('content' in node)\n return String(nodeAny.content ?? '')\n return String(nodeAny.raw ?? '')\n })\n .join('')\n\n const node: LinkNode = {\n type: 'link',\n href,\n title,\n text: linkText,\n children,\n raw: String(`[${linkText}](${href}${title ? ` \"${title}\"` : ''})`),\n loading,\n }\n\n // Skip to after link_close\n const nextIndex = i < tokens.length ? i + 1 : tokens.length\n\n return { node, nextIndex }\n}\n","import type { MarkdownToken, MathInlineNode } from '../../types'\n\n// Parse a math_inline token (inline math expressions)\nexport function parseMathInlineToken(token: MarkdownToken): MathInlineNode {\n return {\n type: 'math_inline',\n content: String(token.content ?? ''),\n loading: !!token.loading,\n raw: token.raw!,\n }\n}\n","import type { MarkdownToken, ReferenceNode } from '../../types'\n\n// Parse a reference token from markdown-it\nexport function parseReferenceToken(token: MarkdownToken): ReferenceNode {\n const id = String(token.content ?? '')\n const raw = String(token.markup ?? `[${token.content ?? ''}]`)\n return {\n type: 'reference',\n id,\n raw,\n }\n}\n","import type {\n MarkdownToken,\n ParsedNode,\n StrikethroughNode,\n} from '../../types'\nimport { parseInlineTokens } from '../index'\n\nexport function parseStrikethroughToken(\n tokens: MarkdownToken[],\n startIndex: number,\n): {\n node: StrikethroughNode\n nextIndex: number\n} {\n const children: ParsedNode[] = []\n let sText = ''\n let i = startIndex + 1\n const innerTokens: MarkdownToken[] = []\n\n // Process tokens between s_open and s_close\n while (i < tokens.length && tokens[i].type !== 's_close') {\n sText += String(tokens[i].content ?? '')\n innerTokens.push(tokens[i])\n i++\n }\n\n // Parse inner tokens to handle nested elements\n children.push(...parseInlineTokens(innerTokens))\n\n const node: StrikethroughNode = {\n type: 'strikethrough',\n children,\n raw: `~~${sText}~~`,\n }\n\n // Skip to after s_close\n const nextIndex = i < tokens.length ? i + 1 : tokens.length\n\n return { node, nextIndex }\n}\n","import type { MarkdownToken, ParsedNode, StrongNode } from '../../types'\nimport { parseInlineTokens } from '../index'\n\nexport function parseStrongToken(\n tokens: MarkdownToken[],\n startIndex: number,\n raw?: string,\n): {\n node: StrongNode\n nextIndex: number\n} {\n const children: ParsedNode[] = []\n let strongText = ''\n let i = startIndex + 1\n const innerTokens: MarkdownToken[] = []\n\n // Process tokens between strong_open and strong_close\n while (i < tokens.length && tokens[i].type !== 'strong_close') {\n strongText += String(tokens[i].content ?? '')\n innerTokens.push(tokens[i])\n i++\n }\n\n // Parse inner tokens to handle nested elements\n children.push(...parseInlineTokens(innerTokens, raw))\n\n const node: StrongNode = {\n type: 'strong',\n children,\n raw: `**${String(strongText)}**`,\n }\n\n // Skip to after strong_close\n const nextIndex = i < tokens.length ? i + 1 : tokens.length\n\n return { node, nextIndex }\n}\n","import type { MarkdownToken, ParsedNode, SubscriptNode } from '../../types'\nimport { parseInlineTokens } from '../index'\n\nexport function parseSubscriptToken(\n tokens: MarkdownToken[],\n startIndex: number,\n): {\n node: SubscriptNode\n nextIndex: number\n} {\n const children: ParsedNode[] = []\n let subText = ''\n let i = startIndex + 1\n const innerTokens: MarkdownToken[] = []\n\n // Process tokens between sub_open and sub_close (if applicable)\n while (i < tokens.length && tokens[i].type !== 'sub_close') {\n subText += String(tokens[i].content ?? '')\n innerTokens.push(tokens[i])\n i++\n }\n\n // Parse inner tokens to handle nested elements\n children.push(...parseInlineTokens(innerTokens))\n\n const startContent = String(tokens[startIndex].content ?? '')\n const display = subText || startContent\n const node: SubscriptNode = {\n type: 'subscript',\n children: children.length > 0\n ? children\n : [\n {\n type: 'text',\n // Fallback to the collected inner text (e.g., \"2\" in H~2~O)\n content: display,\n raw: display,\n },\n ],\n raw: `~${display}~`,\n }\n\n // Skip to after sub_close\n const nextIndex = i < tokens.length ? i + 1 : tokens.length\n\n return { node, nextIndex }\n}\n","import type { MarkdownToken, ParsedNode, SuperscriptNode } from '../../types'\nimport { parseInlineTokens } from '../index'\n\nexport function parseSuperscriptToken(\n tokens: MarkdownToken[],\n startIndex: number,\n): {\n node: SuperscriptNode\n nextIndex: number\n} {\n const children: ParsedNode[] = []\n let supText = ''\n let i = startIndex + 1\n const innerTokens: MarkdownToken[] = []\n\n // Process tokens between sup_open and sup_close (if applicable)\n while (i < tokens.length && tokens[i].type !== 'sup_close') {\n supText += String(tokens[i].content ?? '')\n innerTokens.push(tokens[i])\n i++\n }\n\n // Parse inner tokens to handle nested elements\n children.push(...parseInlineTokens(innerTokens))\n\n const node: SuperscriptNode = {\n type: 'superscript',\n children:\n children.length > 0\n ? children\n : [\n {\n type: 'text',\n // Fallback to the collected inner text (e.g., \"2\" in x^2^)\n content: supText || String(tokens[startIndex].content ?? ''),\n raw: supText || String(tokens[startIndex].content ?? ''),\n },\n ],\n raw: `^${supText || String(tokens[startIndex].content ?? '')}^`,\n }\n\n // Skip to after sup_close\n const nextIndex = i < tokens.length ? i + 1 : tokens.length\n\n return { node, nextIndex }\n}\n","import type { MarkdownToken, TextNode } from '../../types'\n\nexport function parseTextToken(token: MarkdownToken): TextNode {\n const content = String(token.content ?? '')\n return {\n type: 'text',\n content,\n raw: content,\n }\n}\n","import type { LinkNode, MarkdownToken, ParsedNode, TextNode } from '../../types'\nimport { parseCheckboxInputToken, parseCheckboxToken } from './checkbox-parser'\nimport { parseEmojiToken } from './emoji-parser'\nimport { parseEmphasisToken } from './emphasis-parser'\nimport { parseFenceToken } from './fence-parser'\nimport { parseFootnoteRefToken } from './footnote-ref-parser'\nimport { parseHardbreakToken } from './hardbreak-parser'\nimport { parseHighlightToken } from './highlight-parser'\nimport { parseHtmlInlineCodeToken } from './html-inline-code-parser'\nimport { parseImageToken } from './image-parser'\nimport { parseInlineCodeToken } from './inline-code-parser'\nimport { parseInsertToken } from './insert-parser'\nimport { parseLinkToken } from './link-parser'\nimport { parseMathInlineToken } from './math-inline-parser'\nimport { parseReferenceToken } from './reference-parser'\nimport { parseStrikethroughToken } from './strikethrough-parser'\nimport { parseStrongToken } from './strong-parser'\nimport { parseSubscriptToken } from './subscript-parser'\nimport { parseSuperscriptToken } from './superscript-parser'\nimport { parseTextToken } from './text-parser'\n\n// Precompiled regexes used frequently in inline parsing\nconst STRONG_PAIR_RE = /\\*\\*([\\s\\S]*?)\\*\\*/\n\n// Shared helper for building safe dynamic regex parts\nfunction escapeRegExp(str: string) {\n return str.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&')\n}\n\n// Helper: detect likely URLs/hrefs (autolinks). Extracted so the\n// detection logic is easy to tweak and test.\nconst AUTOLINK_PROTOCOL_RE = /^(?:https?:\\/\\/|mailto:|ftp:\\/\\/)/i\nconst AUTOLINK_GENERIC_RE = /:\\/\\//\n\nexport function isLikelyUrl(href?: string) {\n if (!href)\n return false\n return AUTOLINK_PROTOCOL_RE.test(href) || AUTOLINK_GENERIC_RE.test(href)\n}\n\n// Process inline tokens (for text inside paragraphs, headings, etc.)\nexport function parseInlineTokens(tokens: MarkdownToken[], raw?: string, pPreToken?: MarkdownToken): ParsedNode[] {\n if (!tokens || tokens.length === 0)\n return []\n\n const result: ParsedNode[] = []\n let currentTextNode: TextNode | null = null\n\n let i = 0\n // Note: strong-token normalization and list-item normalization are\n // applied during markdown-it parsing via core rules (plugins that\n // run after 'inline'). Inline parsers should receive normalized\n // children and only focus on parsing.\n\n // Helpers to manage text node merging and pushing parsed nodes\n function resetCurrentTextNode() {\n currentTextNode = null\n }\n\n function handleEmphasisAndStrikethrough(content: string, token: MarkdownToken): boolean {\n // strikethrough (~~)\n if (/[^~]*~{2,}[^~]+/.test(content)) {\n let idx = content.indexOf('~~')\n if (idx === -1)\n idx = 0\n const _text = content.slice(0, idx)\n if (_text) {\n if (currentTextNode) {\n currentTextNode.content += _text\n currentTextNode.raw += _text\n }\n else {\n currentTextNode = {\n type: 'text',\n content: String(_text ?? ''),\n raw: String(token.content ?? ''),\n }\n result.push(currentTextNode)\n }\n }\n const strikethroughContent = content.slice(idx)\n const { node } = parseStrikethroughToken([\n { type: 's_open', tag: 's', content: '', markup: '*', info: '', meta: null },\n { type: 'text', tag: '', content: strikethroughContent.replace(/~/g, ''), markup: '', info: '', meta: null },\n { type: 's_close', tag: 's', content: '', markup: '*', info: '', meta: null },\n ], 0)\n resetCurrentTextNode()\n pushNode(node)\n i++\n return true\n }\n\n // strong (**)\n if (/\\*\\*/.test(content)) {\n const openIdx = content.indexOf('**')\n const beforeText = openIdx > -1 ? content.slice(0, openIdx) : ''\n if (beforeText) {\n pushText(beforeText, beforeText)\n }\n\n if (openIdx === -1) {\n i++\n return true\n }\n\n // find the first matching closing ** pair in the content\n const exec = STRONG_PAIR_RE.exec(content)\n let inner = ''\n let after = ''\n if (exec && typeof exec.index === 'number') {\n inner = exec[1]\n after = content.slice(exec.index + exec[0].length)\n }\n else {\n // no closing pair found: mid-state, take rest as inner\n inner = content.slice(openIdx + 2)\n after = ''\n }\n\n const { node } = parseStrongToken([\n { type: 'strong_open', tag: 'strong', content: '', markup: '*', info: '', meta: null },\n { type: 'text', tag: '', content: inner, markup: '', info: '', meta: null },\n { type: 'strong_close', tag: 'strong', content: '', markup: '*', info: '', meta: null },\n ], 0, raw)\n\n resetCurrentTextNode()\n pushNode(node)\n\n if (after) {\n handleToken({\n type: 'text',\n content: after,\n raw: after,\n })\n i--\n }\n\n i++\n return true\n }\n\n // emphasis (*)\n if (/[^*]*\\*[^*]+/.test(content)) {\n let idx = content.indexOf('*')\n if (idx === -1)\n idx = 0\n const _text = content.slice(0, idx)\n if (_text) {\n if (currentTextNode) {\n currentTextNode.content += _text\n currentTextNode.raw += _text\n }\n else {\n currentTextNode = { type: 'text', content: String(_text ?? ''), raw: String(token.content ?? '') }\n result.push(currentTextNode)\n }\n }\n const emphasisContent = content.slice(idx)\n const { node } = parseEmphasisToken([\n { type: 'em_open', tag: 'em', content: '', markup: '*', info: '', meta: null },\n { type: 'text', tag: '', content: emphasisContent.replace(/\\*/g, ''), markup: '', info: '', meta: null },\n { type: 'em_close', tag: 'em', content: '', markup: '*', info: '', meta: null },\n ], 0)\n resetCurrentTextNode()\n pushNode(node)\n i++\n return true\n }\n\n return false\n }\n\n function handleInlineCodeContent(content: string, _token: MarkdownToken): boolean {\n if (!/`[^`]*/.test(content))\n return false\n\n // Close any current text node and handle inline code\n resetCurrentTextNode()\n const code_start = content.indexOf('`')\n const code_end = content.indexOf('`', code_start + 1)\n const _text = content.slice(0, code_start)\n const codeContent = code_end === -1 ? content.slice(code_start) : content.slice(code_start, code_end)\n const after = code_end === -1 ? '' : content.slice(code_end + 1)\n if (_text) {\n // Try to re-run emphasis/strong parsing on the fragment before the code span\n // but avoid mutating the outer token index `i` (handlers sometimes increment it).\n const handled = handleEmphasisAndStrikethrough(_text, _token)\n // restore index so we don't skip tokens in the outer loop\n if (!handled) {\n pushText(_text, _text)\n }\n else {\n i--\n }\n }\n\n const code = codeContent.replace(/`/g, '')\n pushParsed({\n type: 'inline_code',\n code,\n raw: String(code ?? ''),\n } as ParsedNode)\n\n // afterCode 可能也存在很多情况包括多个 code,我们递归处理 --- IGNORE ---\n if (after) {\n handleToken({\n type: 'text',\n content: after,\n raw: String(after ?? ''),\n })\n i--\n }\n else if (code_end === -1) {\n // 要把下一个 token 也合并进来,把类型变成 text\n const nextToken = tokens[i + 1]\n if (nextToken) {\n let fixedAfter = after\n for (let j = i + 1; j < tokens.length; j++) {\n fixedAfter += String(((tokens[j].content ?? '') + (tokens[j].markup ?? '')))\n }\n i = tokens.length - 1\n handleToken({\n type: 'text',\n content: fixedAfter,\n raw: String(fixedAfter ?? ''),\n })\n }\n }\n i++\n return true\n }\n\n function pushParsed(node: ParsedNode) {\n // ensure any ongoing text node is closed when pushing non-text nodes\n resetCurrentTextNode()\n result.push(node)\n }\n\n function pushToken(token: MarkdownToken) {\n // push a raw token into result as a ParsedNode (best effort cast)\n resetCurrentTextNode()\n result.push(token as ParsedNode)\n }\n\n // backward-compatible alias used by existing call sites that pass parsed nodes\n function pushNode(node: ParsedNode) {\n pushParsed(node)\n }\n\n function pushText(content: string, raw?: string) {\n if (currentTextNode) {\n currentTextNode.content += content\n currentTextNode.raw += raw ?? content\n }\n else {\n currentTextNode = {\n type: 'text',\n content: String(content ?? ''),\n raw: String(raw ?? content ?? ''),\n } as TextNode\n result.push(currentTextNode)\n }\n }\n\n while (i < tokens.length) {\n const token = tokens[i] as MarkdownToken\n handleToken(token)\n }\n\n function handleToken(token: MarkdownToken) {\n switch (token.type) {\n case 'text': {\n handleTextToken(token)\n break\n }\n\n case 'softbreak':\n if (currentTextNode) {\n // Append newline to the current text node\n currentTextNode.content += '\\n'\n currentTextNode.raw += '\\n' // Assuming raw should also reflect the newline\n }\n // Don't create a node for softbreak itself, just modify text\n i++\n break\n\n case 'code_inline':\n pushNode(parseInlineCodeToken(token))\n i++\n break\n case 'html_inline': {\n const [node, index] = parseHtmlInlineCodeToken(token, tokens, i)\n pushNode(node)\n i = index\n break\n }\n\n case 'link_open': {\n handleLinkOpen(token)\n break\n }\n\n case 'image':\n resetCurrentTextNode()\n pushNode(parseImageToken(token))\n i++\n break\n\n case 'strong_open': {\n resetCurrentTextNode()\n const { node, nextIndex } = parseStrongToken(tokens, i, token.content)\n pushNode(node)\n i = nextIndex\n break\n }\n\n case 'em_open': {\n resetCurrentTextNode()\n const { node, nextIndex } = parseEmphasisToken(tokens, i)\n pushNode(node)\n i = nextIndex\n break\n }\n\n case 's_open': {\n resetCurrentTextNode()\n const { node, nextIndex } = parseStrikethroughToken(tokens, i)\n pushNode(node)\n i = nextIndex\n break\n }\n\n case 'mark_open': {\n resetCurrentTextNode()\n const { node, nextIndex } = parseHighlightToken(tokens, i)\n pushNode(node)\n i = nextIndex\n break\n }\n\n case 'ins_open': {\n resetCurrentTextNode()\n const { node, nextIndex } = parseInsertToken(tokens, i)\n pushNode(node)\n i = nextIndex\n break\n }\n\n case 'sub_open': {\n resetCurrentTextNode()\n const { node, nextIndex } = parseSubscriptToken(tokens, i)\n pushNode(node)\n i = nextIndex\n break\n }\n\n case 'sup_open': {\n resetCurrentTextNode()\n const { node, nextIndex } = parseSuperscriptToken(tokens, i)\n pushNode(node)\n i = nextIndex\n break\n }\n\n case 'sub':\n resetCurrentTextNode()\n pushNode({\n type: 'subscript',\n children: [\n {\n type: 'text',\n content: String(token.content ?? ''),\n raw: String(token.content ?? ''),\n },\n ],\n raw: `~${String(token.content ?? '')}~`,\n })\n i++\n break\n\n case 'sup':\n resetCurrentTextNode()\n pushNode({\n type: 'superscript',\n children: [\n {\n type: 'text',\n content: String(token.content ?? ''),\n raw: String(token.content ?? ''),\n },\n ],\n raw: `^${String(token.content ?? '')}^`,\n })\n i++\n break\n\n case 'emoji': {\n resetCurrentTextNode()\n const preToken = tokens[i - 1]\n if (preToken?.type === 'text' && /\\|:-+/.test(String(preToken.content ?? ''))) {\n // 处理表格中的 emoji,跳过\n pushText('', '')\n }\n else {\n pushNode(parseEmojiToken(token))\n }\n i++\n break\n }\n case 'checkbox':\n resetCurrentTextNode()\n pushNode(parseCheckboxToken(token))\n i++\n break\n case 'checkbox_input':\n resetCurrentTextNode()\n pushNode(parseCheckboxInputToken(token))\n i++\n break\n case 'footnote_ref':\n resetCurrentTextNode()\n pushNode(parseFootnoteRefToken(token))\n i++\n break\n\n case 'hardbreak':\n resetCurrentTextNode()\n pushNode(parseHardbreakToken())\n i++\n break\n\n case 'fence': {\n resetCurrentTextNode()\n // Handle fenced code blocks with language specifications\n pushNode(parseFenceToken(tokens[i]))\n i++\n break\n }\n\n case 'math_inline': {\n resetCurrentTextNode()\n pushNode(parseMathInlineToken(token))\n i++\n break\n }\n\n case 'reference': {\n handleReference(token)\n break\n }\n\n default:\n // Skip unknown token types, ensure text merging stops\n pushToken(token)\n i++\n break\n }\n }\n\n function handleTextToken(token: MarkdownToken) {\n // 合并连续的 text 节点\n let index = result.length - 1\n let content = String(token.content ?? '').replace(/\\\\/g, '')\n if (content.startsWith(')') && result[result.length - 1]?.type === 'link') {\n content = content.slice(1)\n }\n\n if (content.endsWith('undefined') && !raw?.endsWith('undefined')) {\n content = content.slice(0, -9)\n }\n for (index; index >= 0; index--) {\n const item = result[index]\n if (item.type === 'text') {\n currentTextNode = null\n content = item.content + content\n continue\n }\n break\n }\n\n if (index < result.length - 1)\n result.splice(index + 1)\n\n const nextToken = tokens[i + 1]\n if (pPreToken?.type === 'list_item_open' && /^\\d$/.test(content)) {\n i++\n return\n }\n if (content === '`' || content === '|' || content === '$' || /^\\*+$/.test(content)) {\n i++\n return\n }\n if (!nextToken && /[^\\]]\\s*\\(\\s*$/.test(content)) {\n content = content.replace(/\\(\\s*$/, '')\n }\n if (handleCheckboxLike(content))\n return\n const preToken = tokens[i - 1]\n if ((content === '[' && !nextToken?.markup?.includes('*')) || (content === ']' && !preToken.markup?.includes('*'))) {\n i++\n return\n }\n if (handleInlineCodeContent(content, token))\n return\n if (handleEmphasisAndStrikethrough(content, token))\n return\n if (handleInlineImageContent(content, token))\n return\n\n const textNode = parseTextToken({ ...token, content })\n\n if (handleInlineLinkContent(content, token))\n return\n if (currentTextNode) {\n // Merge with the previous text node\n currentTextNode.content += textNode.content.replace(/(\\*+|\\(|\\\\)$/, '')\n currentTextNode.raw += textNode.raw\n }\n else {\n const maybeMath = preToken?.tag === 'br' && tokens[i - 2]?.content === '['\n // Start a new text node\n const nextToken = tokens[i + 1]\n if (!nextToken)\n textNode.content = textNode.content.replace(/(\\*+|\\(|\\\\)$/, '')\n\n currentTextNode = textNode\n currentTextNode.center = maybeMath\n result.push(currentTextNode)\n }\n i++\n }\n\n function handleLinkOpen(token: MarkdownToken) {\n // mirror logic previously in the switch-case for 'link_open'\n resetCurrentTextNode()\n const href = token.attrs?.find(([name]) => name === 'href')?.[1]\n // 如果 text 不在[]里说明,它不是一个link, 当 text 处理\n\n if (raw && tokens[i + 1].type === 'text') {\n const text = String(tokens[i + 1]?.content ?? '')\n const escText = escapeRegExp(text)\n const reg = new RegExp(`\\\\[${escText}\\\\s*\\\\]`)\n if (!reg.test(raw)) {\n // If this link_open comes from an autolinkified URL (e.g. http://...)\n // treat it as a real link node rather than plain text. Otherwise\n // fall back to pushing plain text.\n const hrefAttr = token.attrs?.find(([name]) => name === 'href')?.[1] ?? ''\n // Only treat as autolink when the original raw source does not contain\n // any square-bracket link text (i.e. it was not written as [text](...)).\n const isAutolink = (!raw.includes('[')) && isLikelyUrl(String(hrefAttr))\n if (isAutolink) {\n resetCurrentTextNode()\n const node = {\n type: 'link',\n href: String(hrefAttr),\n title: null,\n text,\n children: [\n { type: 'text', content: text, raw: text },\n ],\n loading: false,\n } as ParsedNode\n pushParsed(node)\n i += 3\n return\n }\n\n pushText(text, text)\n i += 3\n return\n }\n }\n if (raw && href) {\n const loadingMath = new RegExp(`\\\\(\\\\s*${escapeRegExp(href)}\\\\s*\\\\)`)\n const pre = result.length > 0 ? result[result.length - 1] : undefined as ParsedNode | undefined\n const loading = !loadingMath.test(raw)\n if (loading && pre) {\n let preText = ''\n if (pre) {\n if (pre.type === 'link')\n preText = String((pre as LinkNode).text ?? '')\n else if (pre.type === 'text')\n preText = String((pre as TextNode).content ?? '')\n else if (((pre as { content?: unknown }).content) && typeof (pre as { content?: unknown }).content === 'string')\n preText = String((pre as { content?: string }).content ?? '').slice(1, -1)\n }\n const isLinkMatch = new RegExp(`\\\\[${escapeRegExp(preText)}\\\\s*\\\\]\\\\(`)\n if (isLinkMatch.test(raw)) {\n const text = String(preText ?? '')\n resetCurrentTextNode()\n const node = {\n type: 'link',\n href: '',\n title: null,\n text,\n children: [\n { type: 'text', content: text, raw: text },\n ],\n loading,\n } as ParsedNode\n result.splice(result.length - 1, 1, node) // remove the pre node\n i += 3\n if (String(tokens[i]?.content ?? '') === '.')\n i++\n return\n }\n }\n }\n const { node, nextIndex } = parseLinkToken(tokens, i)\n i = nextIndex\n // Determine loading state conservatively: if the link token parser\n // marked it as loading already, keep it; otherwise compute from raw\n // and href as a fallback so unclosed links remain marked as loading.\n const hrefAttr = token.attrs?.find(([name]) => name === 'href')?.[1]\n const hrefStr = String(hrefAttr ?? '')\n // Only override the link parser's default loading state when we\n // actually have an href to check against the raw source. If the\n // tokenizer emitted a link_open without an href (partial tokenizers\n // may do this), prefer the parseLinkToken's initial loading value\n // (which defaults to true for mid-state links).\n if (raw && hrefStr) {\n // More robust: locate the first \"](\" after the link text and see if\n // there's a matching ')' that closes the href. This avoids false\n // positives when other parentheses appear elsewhere in the source.\n const openIdx = raw.indexOf('](')\n if (openIdx === -1) {\n // No explicit link start found in raw — be conservative and keep\n // the parser's default loading value.\n }\n else {\n const closeIdx = raw.indexOf(')', openIdx + 2)\n if (closeIdx === -1) {\n node.loading = true\n }\n else {\n // Check that the href inside the parens corresponds to this token\n const inside = raw.slice(openIdx + 2, closeIdx)\n if (inside.includes(hrefStr))\n node.loading = false\n else\n node.loading = true\n }\n }\n }\n pushParsed(node)\n }\n\n function handleReference(token: MarkdownToken) {\n // mirror previous in-switch 'reference' handling\n resetCurrentTextNode()\n const nextToken = tokens[i + 1]\n const preToken = tokens[i - 1]\n const preResult = result[result.length - 1]\n\n const nextIsTextNotStartingParens = nextToken?.type === 'text' && !((String(nextToken.content ?? '')).startsWith('('))\n const preIsTextEndingBracketOrOnlySpace = preToken?.type === 'text' && /\\]$|^\\s*$/.test(String(preToken.content ?? ''))\n\n if (nextIsTextNotStartingParens || preIsTextEndingBracketOrOnlySpace) {\n pushNode(parseReferenceToken(token))\n }\n else if (nextToken && nextToken.type === 'text') {\n nextToken.content = String(token.markup ?? '') + String(nextToken.content ?? '')\n }\n else if (preResult && preResult.type === 'text') {\n preResult.content = String(preResult.content ?? '') + String(token.markup ?? '')\n preResult.raw = String(preResult.raw ?? '') + String(token.markup ?? '')\n }\n i++\n }\n\n function handleInlineLinkContent(content: string, _token: MarkdownToken): boolean {\n const linkStart = content.indexOf('[')\n if (linkStart === -1)\n return false\n\n let textNodeContent = content.slice(0, linkStart)\n const linkEnd = content.indexOf('](', linkStart)\n if (linkEnd !== -1) {\n const textToken = tokens[i + 2]\n let text = content.slice(linkStart + 1, linkEnd)\n if (text.includes('[')) {\n const secondLinkStart = text.indexOf('[')\n // adjust original linkStart and text\n textNodeContent += content.slice(0, linkStart + secondLinkStart + 1)\n const newLinkStart = linkStart + secondLinkStart + 1\n text = content.slice(newLinkStart + 1, linkEnd)\n }\n const nextToken = tokens[i + 1]\n if (content.endsWith('](') && nextToken?.type === 'link_open' && textToken) {\n const last = tokens[i + 4]\n let index = 4\n let loading = true\n if (last?.type === 'text' && last.content === ')') {\n index++\n loading = false\n }\n else if (last?.type === 'text' && last.content === '.') {\n i++\n }\n\n if (textNodeContent) {\n pushText(textNodeContent, textNodeContent)\n }\n pushParsed({\n type: 'link',\n href: String(textToken.content ?? ''),\n title: null,\n text,\n children: [{ type: 'text', content: text, raw: text }],\n loading,\n } as ParsedNode)\n i += index\n return true\n }\n\n const linkContentEnd = content.indexOf(')', linkEnd)\n const href = linkContentEnd !== -1 ? content.slice(linkEnd + 2, linkContentEnd) : ''\n const loading = linkContentEnd === -1\n\n if (textNodeContent) {\n pushText(textNodeContent, textNodeContent)\n }\n pushParsed({\n type: 'link',\n href,\n title: null,\n text,\n children: [{ type: 'text', content: text, raw: text }],\n loading,\n } as ParsedNode)\n\n const afterText = linkContentEnd !== -1 ? content.slice(linkContentEnd + 1) : ''\n if (afterText) {\n handleToken({ type: 'text', content: afterText, raw: afterText } as unknown as MarkdownToken)\n i--\n }\n i++\n return true\n }\n\n return false\n }\n\n function handleInlineImageContent(content: string, token: MarkdownToken): boolean {\n const imageStart = content.indexOf('![')\n if (imageStart === -1)\n return false\n\n const textNodeContent = content.slice(0, imageStart)\n if (!currentTextNode) {\n currentTextNode = {\n type: 'text',\n content: textNodeContent,\n raw: textNodeContent,\n }\n }\n else {\n currentTextNode.content += textNodeContent\n }\n result.push(currentTextNode)\n currentTextNode = null // Reset current text node\n pushParsed(parseImageToken(token, true) as ParsedNode)\n i++\n return true\n }\n\n function handleCheckboxLike(content: string): boolean {\n // Detect checkbox-like syntax at the start of a list item e.g. [x] or [ ]\n if (!(content?.startsWith('[') && pPreToken?.type === 'list_item_open'))\n return false\n\n const _content = content.slice(1)\n const w = _content.match(/[^\\s\\]]/)\n if (w === null) {\n i++\n return true\n }\n // If the first non-space/']' char is x/X treat as a checkbox input\n if (w && /x/i.test(w[0])) {\n const checked = w[0] === 'x' || w[0] === 'X'\n pushParsed({\n type: 'checkbox_input',\n checked,\n raw: checked ? '[x]' : '[ ]',\n } as ParsedNode)\n i++\n return true\n }\n\n return false\n }\n\n return result\n}\n","import type { BlockquoteNode, MarkdownToken, ParsedNode } from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\nimport { parseList } from './list-parser'\n\nexport function parseBlockquote(\n tokens: MarkdownToken[],\n index: number,\n): [BlockquoteNode, number] {\n const blockquoteChildren: ParsedNode[] = []\n let j = index + 1\n\n // Process blockquote content until closing tag is found\n while (j < tokens.length && tokens[j].type !== 'blockquote_close') {\n if (tokens[j].type === 'paragraph_open') {\n const contentToken = tokens[j + 1]\n blockquoteChildren.push({\n type: 'paragraph',\n children: parseInlineTokens(contentToken.children || []),\n raw: String(contentToken.content ?? ''),\n })\n j += 3 // Skip paragraph_open, inline, paragraph_close\n }\n else if (\n tokens[j].type === 'bullet_list_open'\n || tokens[j].type === 'ordered_list_open'\n ) {\n // Handle nested lists - use parseList directly for proper nested list support\n const [listNode, newIndex] = parseList(tokens, j)\n blockquoteChildren.push(listNode)\n j = newIndex\n }\n else {\n j++\n }\n }\n\n const blockquoteNode: BlockquoteNode = {\n type: 'blockquote',\n children: blockquoteChildren,\n raw: blockquoteChildren.map(child => child.raw).join('\\n'),\n }\n\n return [blockquoteNode, j + 1] // Skip blockquote_close\n}\n","import type { CodeBlockNode, MarkdownToken } from '../../types'\nimport { parseFenceToken } from '../inline-parsers/fence-parser'\n\nexport function parseCodeBlock(token: MarkdownToken): CodeBlockNode {\n // If this code block is actually a diff (some markdown-it backends\n // classify fences vs code_block differently), delegate to the\n // fence parser to preserve original/updated fields.\n if (token.info?.startsWith('diff')) {\n return parseFenceToken(token)\n }\n\n const contentStr = String(token.content ?? '')\n const match = contentStr.match(/ type=\"application\\/vnd\\.ant\\.([^\"]+)\"/)\n if (match?.[1]) {\n // 需要把 <antArtifact> 标签去掉\n // mutate token.content safely by assigning the cleaned string\n token.content = contentStr\n .replace(/<antArtifact[^>]*>/g, '')\n .replace(/<\\/antArtifact>/g, '')\n }\n const hasMap = Array.isArray(token.map) && token.map.length === 2\n return {\n type: 'code_block',\n language: match ? match[1] : String(token.info ?? ''),\n code: String(token.content ?? ''),\n raw: String(token.content ?? ''),\n loading: !hasMap,\n }\n}\n","import type {\n DefinitionItemNode,\n DefinitionListNode,\n MarkdownToken,\n ParsedNode,\n} from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\n\nexport function parseDefinitionList(\n tokens: MarkdownToken[],\n index: number,\n): [DefinitionListNode, number] {\n const items: DefinitionItemNode[] = []\n let j = index + 1\n let termNodes: ParsedNode[] = []\n let definitionNodes: ParsedNode[] = []\n\n while (j < tokens.length && tokens[j].type !== 'dl_close') {\n if (tokens[j].type === 'dt_open') {\n // Process term\n const termToken = tokens[j + 1]\n termNodes = parseInlineTokens(termToken.children || [])\n j += 3 // Skip dt_open, inline, dt_close\n }\n else if (tokens[j].type === 'dd_open') {\n // Process definition\n let k = j + 1\n definitionNodes = []\n\n while (k < tokens.length && tokens[k].type !== 'dd_close') {\n if (tokens[k].type === 'paragraph_open') {\n const contentToken = tokens[k + 1]\n definitionNodes.push({\n type: 'paragraph',\n children: parseInlineTokens(contentToken.children || [], String(contentToken.content ?? '')),\n raw: String(contentToken.content ?? ''),\n })\n k += 3 // Skip paragraph_open, inline, paragraph_close\n }\n else {\n k++\n }\n }\n\n // Add definition item\n if (termNodes.length > 0) {\n items.push({\n type: 'definition_item',\n term: termNodes,\n definition: definitionNodes,\n raw: `${termNodes.map(term => term.raw).join('')}: ${definitionNodes\n .map(def => def.raw)\n .join('\\n')}`,\n })\n\n // Reset term nodes\n termNodes = []\n }\n\n j = k + 1 // Skip dd_close\n }\n else {\n j++\n }\n }\n\n const definitionListNode: DefinitionListNode = {\n type: 'definition_list',\n items,\n raw: items.map(item => item.raw).join('\\n'),\n }\n\n return [definitionListNode, j + 1] // Skip dl_close\n}\n","import type { FootnoteNode, MarkdownToken, ParsedNode } from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\n\nexport function parseFootnote(\n tokens: MarkdownToken[],\n index: number,\n): [FootnoteNode, number] {\n const token = tokens[index]\n const meta = (token.meta ?? {}) as unknown as { label?: number | string }\n const id = String(meta?.label ?? '0')\n const footnoteChildren: ParsedNode[] = []\n let j = index + 1\n\n while (j < tokens.length && tokens[j].type !== 'footnote_close') {\n if (tokens[j].type === 'paragraph_open') {\n const contentToken = tokens[j + 1]\n footnoteChildren.push({\n type: 'paragraph',\n children: parseInlineTokens(contentToken.children || []),\n raw: String(contentToken.content ?? ''),\n })\n j += 3 // Skip paragraph_open, inline, paragraph_close\n }\n else {\n j++\n }\n }\n\n const footnoteNode: FootnoteNode = {\n type: 'footnote',\n id,\n children: footnoteChildren,\n raw: `[^${id}]: ${footnoteChildren.map(child => child.raw).join('\\n')}`,\n }\n\n return [footnoteNode, j + 1] // Skip footnote_close\n}\n","import type { HeadingNode, MarkdownToken } from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\n\nexport function parseHeading(\n tokens: MarkdownToken[],\n index: number,\n): HeadingNode {\n const token = tokens[index]\n const levelStr = String(token.tag?.substring(1) ?? '1')\n const headingLevel = Number.parseInt(levelStr, 10)\n const headingContentToken = tokens[index + 1]\n const headingContent = String(headingContentToken.content ?? '')\n\n return {\n type: 'heading',\n level: headingLevel,\n text: headingContent,\n children: parseInlineTokens(headingContentToken.children || []),\n raw: headingContent,\n }\n}\n","import type { MarkdownToken, MathBlockNode } from '../../types'\n\n// Parse a math_block token (block/display math expressions)\nexport function parseMathBlock(token: MarkdownToken): MathBlockNode {\n return {\n type: 'math_block',\n content: String(token.content ?? ''),\n loading: !!token.loading,\n raw: String(token.raw ?? ''),\n }\n}\n","import type {\n MarkdownToken,\n TableCellNode,\n TableNode,\n TableRowNode,\n} from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\n\nexport function parseTable(\n tokens: MarkdownToken[],\n index: number,\n): [TableNode, number] {\n let j = index + 1\n let headerRow: TableRowNode | null = null\n const rows: TableRowNode[] = []\n let isHeader = false\n\n while (j < tokens.length && tokens[j].type !== 'table_close') {\n if (tokens[j].type === 'thead_open') {\n isHeader = true\n j++\n }\n else if (tokens[j].type === 'thead_close') {\n isHeader = false\n j++\n }\n else if (\n tokens[j].type === 'tbody_open'\n || tokens[j].type === 'tbody_close'\n ) {\n j++\n }\n else if (tokens[j].type === 'tr_open') {\n const cells: TableCellNode[] = []\n let k = j + 1\n\n while (k < tokens.length && tokens[k].type !== 'tr_close') {\n if (tokens[k].type === 'th_open' || tokens[k].type === 'td_open') {\n const isHeaderCell = tokens[k].type === 'th_open'\n const contentToken = tokens[k + 1]\n const content = String(contentToken.content ?? '')\n\n cells.push({\n type: 'table_cell',\n header: isHeaderCell || isHeader,\n children: parseInlineTokens(contentToken.children || [], content),\n raw: content,\n })\n\n k += 3 // Skip th_open/td_open, inline, th_close/td_close\n }\n else {\n k++\n }\n }\n\n const rowNode: TableRowNode = {\n type: 'table_row',\n cells,\n raw: cells.map(cell => cell.raw).join('|'),\n }\n\n if (isHeader) {\n headerRow = rowNode\n }\n else {\n rows.push(rowNode)\n }\n\n j = k + 1 // Skip tr_close\n }\n else {\n j++\n }\n }\n\n if (!headerRow) {\n // Default empty header if none found\n headerRow = {\n type: 'table_row',\n cells: [],\n raw: '',\n }\n }\n\n const tableNode: TableNode = {\n type: 'table',\n header: headerRow,\n rows,\n loading: tokens[index].loading ?? false,\n raw: [headerRow, ...rows].map(row => row.raw).join('\\n'),\n }\n\n return [tableNode, j + 1] // Skip table_close\n}\n","import type { ThematicBreakNode } from '../../types'\n\nexport function parseThematicBreak(): ThematicBreakNode {\n return {\n type: 'thematic_break',\n raw: '---',\n }\n}\n","import type {\n ListItemNode,\n ListNode,\n MarkdownToken,\n ParsedNode,\n} from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\nimport { parseFenceToken } from '../inline-parsers/fence-parser'\nimport { parseAdmonition } from './admonition-parser'\nimport { parseBlockquote } from './blockquote-parser'\nimport { parseCodeBlock } from './code-block-parser'\nimport { parseDefinitionList } from './definition-list-parser'\nimport { parseFootnote } from './footnote-parser'\nimport { parseHeading } from './heading-parser'\nimport { parseMathBlock } from './math-block-parser'\nimport { parseTable } from './table-parser'\nimport { parseThematicBreak } from './thematic-break-parser'\n\nexport function parseList(\n tokens: MarkdownToken[],\n index: number,\n): [ListNode, number] {\n const token = tokens[index]\n const listItems: ListItemNode[] = []\n let j = index + 1\n\n while (\n j < tokens.length\n && tokens[j].type !== 'bullet_list_close'\n && tokens[j].type !== 'ordered_list_close'\n ) {\n if (tokens[j].type === 'list_item_open') {\n // if (tokens[j].markup === '*') {\n // j++\n // continue\n // }\n const itemChildren: ParsedNode[] = []\n let k = j + 1\n while (k < tokens.length && tokens[k].type !== 'list_item_close') {\n // Handle different block types inside list items\n if (tokens[k].type === 'paragraph_open') {\n const contentToken = tokens[k + 1]\n const preToken = tokens[k - 1]\n const contentStr = String(contentToken.content ?? '')\n if (/\\n\\d+$/.test(contentStr)) {\n contentToken.content = contentStr.replace(/\\n\\d+$/, '')\n contentToken.children?.splice(-1, 1)\n }\n itemChildren.push({\n type: 'paragraph',\n children: parseInlineTokens(contentToken.children || [], String(contentToken.content ?? ''), preToken),\n raw: String(contentToken.content ?? ''),\n })\n k += 3 // Skip paragraph_open, inline, paragraph_close\n }\n else if (tokens[k].type === 'blockquote_open') {\n // Parse blockquote within list item\n const [blockquoteNode, newIndex] = parseBlockquote(tokens, k)\n itemChildren.push(blockquoteNode)\n k = newIndex\n }\n else if (\n tokens[k].type === 'bullet_list_open'\n || tokens[k].type === 'ordered_list_open'\n ) {\n if (tokens[k].markup === '*') {\n k++\n continue\n }\n // Parse nested list\n const [nestedListNode, newIndex] = parseNestedList(tokens, k)\n itemChildren.push(nestedListNode)\n k = newIndex\n }\n else if (tokens[k].type === 'code_block') {\n // Parse code block\n itemChildren.push(parseCodeBlock(tokens[k]))\n k += 1\n }\n else if (tokens[k].type === 'fence') {\n // Parse fenced code block\n itemChildren.push(parseFenceToken(tokens[k]))\n k += 1\n }\n else if (tokens[k].type === 'math_block') {\n // Parse math block\n itemChildren.push(parseMathBlock(tokens[k]))\n k += 1\n }\n else if (tokens[k].type === 'table_open') {\n // Parse table\n const [tableNode, newIndex] = parseTable(tokens, k)\n itemChildren.push(tableNode)\n k = newIndex\n }\n else if (tokens[k].type === 'dl_open') {\n // Parse definition list\n const [defListNode, newIndex] = parseDefinitionList(tokens, k)\n itemChildren.push(defListNode)\n k = newIndex\n }\n else if (tokens[k].type === 'footnote_open') {\n // Parse footnote\n const [footnoteNode, newIndex] = parseFootnote(tokens, k)\n itemChildren.push(footnoteNode)\n k = newIndex\n }\n else if (tokens[k].type === 'heading_open') {\n // Parse heading (though headings in lists are unusual)\n const headingNode = parseHeading(tokens, k)\n itemChildren.push(headingNode)\n k += 3 // Skip heading_open, inline, heading_close\n }\n else if (tokens[k].type === 'hr') {\n // Parse thematic break\n itemChildren.push(parseThematicBreak())\n k += 1\n }\n else if (tokens[k].type === 'container_open') {\n // Handle admonition containers (warning, info, note, tip, danger, caution)\n const match\n = /^::: ?(warning|info|note|tip|danger|caution) ?(.*)$/.exec(\n String(tokens[k].info ?? ''),\n )\n if (match) {\n const [admonitionNode, newIndex] = parseAdmonition(tokens, k, match)\n itemChildren.push(admonitionNode)\n k = newIndex\n }\n else {\n k += 1 // Skip unknown container types\n }\n }\n else {\n k += 1\n }\n }\n\n listItems.push({\n type: 'list_item',\n children: itemChildren,\n raw: itemChildren.map(child => child.raw).join(''),\n })\n\n j = k + 1 // Move past list_item_close\n }\n else {\n j += 1\n }\n }\n\n const listNode: ListNode = {\n type: 'list',\n ordered: token.type === 'ordered_list_open',\n // markdown-it may include attrs like [['start','2']] on ordered_list_open\n start: (() => {\n if (token.attrs && token.attrs.length) {\n const found = token.attrs.find(a => a[0] === 'start')\n if (found) {\n const parsed = Number(found[1])\n return Number.isFinite(parsed) && parsed !== 0 ? parsed : 1\n }\n }\n return undefined\n })(),\n items: listItems,\n raw: listItems.map(item => item.raw).join('\\n'),\n }\n\n return [listNode, j + 1] // Move past list_close\n}\n\n// Enhanced function to handle nested lists properly\nfunction parseNestedList(\n tokens: MarkdownToken[],\n index: number,\n): [ListNode, number] {\n // We can directly use parseList since we're in the same file\n // This avoids circular dependency issues\n const nestedToken = tokens[index]\n const nestedItems: ListItemNode[] = []\n let j = index + 1\n\n while (\n j < tokens.length\n && tokens[j].type !== 'bullet_list_close'\n && tokens[j].type !== 'ordered_list_close'\n ) {\n if (tokens[j].type === 'list_item_open') {\n const itemChildren: ParsedNode[] = []\n let k = j + 1\n\n while (k < tokens.length && tokens[k].type !== 'list_item_close') {\n // Handle different block types inside list items\n if (tokens[k].type === 'paragraph_open') {\n const contentToken = tokens[k + 1]\n const preToken = tokens[k - 1]\n itemChildren.push({\n type: 'paragraph',\n children: parseInlineTokens(contentToken.children || [], String(contentToken.content ?? ''), preToken),\n raw: String(contentToken.content ?? ''),\n })\n k += 3 // Skip paragraph_open, inline, paragraph_close\n }\n else if (\n tokens[k].type === 'bullet_list_open'\n || tokens[k].type === 'ordered_list_open'\n ) {\n if (tokens[k].markup === '*') {\n k++\n continue\n }\n\n // Handle deeper nested lists\n const [deeperNestedListNode, newIndex] = parseNestedList(tokens, k)\n itemChildren.push(deeperNestedListNode)\n k = newIndex\n }\n else if (tokens[k].type === 'code_block') {\n itemChildren.push(parseCodeBlock(tokens[k]))\n k += 1\n }\n else if (tokens[k].type === 'fence') {\n itemChildren.push(parseFenceToken(tokens[k]))\n k += 1\n }\n else if (tokens[k].type === 'math_block') {\n // Parse math block in nested lists\n itemChildren.push(parseMathBlock(tokens[k]))\n k += 1\n }\n else {\n // Skip other token types in nested lists for simplicity\n k += 1\n }\n }\n\n nestedItems.push({\n type: 'list_item',\n children: itemChildren,\n raw: itemChildren.map(child => child.raw).join(''),\n })\n\n j = k + 1 // Move past list_item_close\n }\n else {\n j += 1\n }\n }\n\n const nestedListNode: ListNode = {\n type: 'list',\n ordered: nestedToken.type === 'ordered_list_open',\n start: (() => {\n if (nestedToken.attrs && nestedToken.attrs.length) {\n const found = nestedToken.attrs.find(a => a[0] === 'start')\n if (found) {\n const parsed = Number(found[1])\n return Number.isFinite(parsed) && parsed !== 0 ? parsed : 1\n }\n }\n return undefined\n })(),\n items: nestedItems,\n raw: nestedItems.map(item => item.raw).join('\\n'),\n }\n\n return [nestedListNode, j + 1] // Move past list_close\n}\n","import type { AdmonitionNode, MarkdownToken, ParsedNode } from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\nimport { parseList } from './list-parser'\n\nexport function parseAdmonition(\n tokens: MarkdownToken[],\n index: number,\n match: RegExpExecArray,\n): [AdmonitionNode, number] {\n const kind = String(match[1] ?? 'note')\n const title = String(match[2] ?? (kind.charAt(0).toUpperCase() + kind.slice(1)))\n const admonitionChildren: ParsedNode[] = []\n let j = index + 1\n\n while (j < tokens.length && tokens[j].type !== 'container_close') {\n if (tokens[j].type === 'paragraph_open') {\n const contentToken = tokens[j + 1]\n if (contentToken) {\n admonitionChildren.push({\n type: 'paragraph',\n children: parseInlineTokens(contentToken.children || []),\n raw: String(contentToken.content ?? ''),\n })\n }\n j += 3 // Skip paragraph_open, inline, paragraph_close\n }\n else if (\n tokens[j].type === 'bullet_list_open'\n || tokens[j].type === 'ordered_list_open'\n ) {\n // Handle nested lists - use parseList directly for proper nested list support\n const [listNode, newIndex] = parseList(tokens, j)\n admonitionChildren.push(listNode)\n j = newIndex\n }\n else {\n j++\n }\n }\n\n const admonitionNode: AdmonitionNode = {\n type: 'admonition',\n kind,\n title,\n children: admonitionChildren,\n raw: `:::${kind} ${title}\\n${admonitionChildren\n .map(child => child.raw)\n .join('\\n')}\\n:::`,\n }\n\n return [admonitionNode, j + 1] // Skip container_close\n}\n","import type { AdmonitionNode, MarkdownToken, ParsedNode, TextNode } from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\nimport { parseList } from './list-parser'\n\nexport function parseContainer(\n tokens: MarkdownToken[],\n index: number,\n): [AdmonitionNode, number] {\n const openToken = tokens[index]\n\n // Determine kind and optional title\n let kind = 'note'\n let title = ''\n\n const typeMatch = openToken.type.match(/^container_(\\w+)_open$/)\n if (typeMatch) {\n kind = typeMatch[1]\n // some implementations set info to remaining title text\n const info = String(openToken.info ?? '').trim()\n if (info && !info.startsWith(':::')) {\n // if info looks like 'warning title', drop leading kind token\n const maybe = info.replace(new RegExp(`^${kind}`), '').trim()\n if (maybe)\n title = maybe\n }\n }\n else {\n // container_open: info usually contains the marker like ' warning Title'\n const info = String(openToken.info ?? '').trim()\n\n const match\n // eslint-disable-next-line regexp/no-super-linear-backtracking\n = /^:{1,3}\\s*(warning|info|note|tip|danger|caution)\\s*(.*)$/i.exec(info)\n if (match) {\n kind = match[1]\n title = String(match[2] ?? '')\n }\n }\n\n if (!title)\n title = kind.charAt(0).toUpperCase() + kind.slice(1)\n\n const children: ParsedNode[] = []\n let j = index + 1\n\n // Accept closing tokens: 'container_close' or 'container_<kind>_close'\n const closeType = new RegExp(`^container_${kind}_close$`)\n\n while (\n j < tokens.length\n && tokens[j].type !== 'container_close'\n && !closeType.test(tokens[j].type)\n ) {\n if (tokens[j].type === 'paragraph_open') {\n const contentToken = tokens[j + 1]\n if (contentToken) {\n const childrenArr = (contentToken.children as MarkdownToken[]) || []\n let i = -1\n for (let k = childrenArr.length - 1; k >= 0; k--) {\n const t = childrenArr[k] as TextNode\n if (t.type === 'text' && /:+/.test(t.content)) {\n i = k\n break\n }\n }\n const _children = i !== -1 ? childrenArr.slice(0, i) : childrenArr\n children.push({\n type: 'paragraph',\n children: parseInlineTokens(_children || []),\n raw: String(contentToken.content ?? '').replace(/\\n:+$/, '').replace(/\\n\\s*:::\\s*$/, ''),\n })\n }\n j += 3\n }\n else if (\n tokens[j].type === 'bullet_list_open'\n || tokens[j].type === 'ordered_list_open'\n ) {\n const [listNode, newIndex] = parseList(tokens, j)\n children.push(listNode)\n j = newIndex\n }\n else {\n j++\n }\n }\n\n const admonitionNode: AdmonitionNode = {\n type: 'admonition',\n kind,\n title,\n children,\n raw: `:::${kind} ${title}\\n${children.map(c => c.raw).join('\\n')}\\n:::`,\n }\n\n // Skip the closing token\n const closingIndex = j\n return [admonitionNode, closingIndex + 1]\n}\n","import type { HardBreakNode } from '../../types'\n\nexport function parseHardBreak(): HardBreakNode {\n return {\n type: 'hardbreak',\n raw: '\\\\\\n',\n }\n}\n","import type { HtmlBlockNode, MarkdownToken } from '../../types'\n\n// Common void tags that don't require a closing tag\nconst VOID_TAGS = new Set([\n 'area',\n 'base',\n 'br',\n 'col',\n 'embed',\n 'hr',\n 'img',\n 'input',\n 'link',\n 'meta',\n 'param',\n 'source',\n 'track',\n 'wbr',\n])\n\n// Cache for dynamic closing-tag regexes per tag name\nconst CLOSE_TAG_RE_CACHE = new Map<string, RegExp>()\n\nexport function parseHtmlBlock(token: MarkdownToken): HtmlBlockNode {\n const raw = String(token.content ?? '')\n\n // Non-element html blocks (comments, doctypes, processing instructions) are non-closable\n if (/^\\s*<!--/.test(raw) || /^\\s*<!/.test(raw) || /^\\s*<\\?/.test(raw)) {\n return {\n type: 'html_block',\n content: raw,\n raw,\n tag: '',\n loading: false,\n }\n }\n\n // Extract first tag name (lowercased) like div, p, section, etc.\n const tagMatch = raw.match(/^\\s*<([A-Z][\\w:-]*)/i)\n const tag = (tagMatch?.[1] || '').toLowerCase()\n\n // Handle unknown or malformed tag gracefully\n if (!tag) {\n return {\n type: 'html_block',\n content: raw,\n raw,\n tag: '',\n loading: false,\n }\n }\n\n // Self-closing first tag like <img ... />\n const selfClosing = /^\\s*<[^>]*\\/\\s*>/.test(raw)\n const isVoid = VOID_TAGS.has(tag)\n\n // Already closed somewhere in the block (case-insensitive)\n let closeRe = CLOSE_TAG_RE_CACHE.get(tag)\n if (!closeRe) {\n closeRe = new RegExp(`<\\\\/\\\\s*${tag}\\\\b`, 'i')\n CLOSE_TAG_RE_CACHE.set(tag, closeRe)\n }\n const hasClosing = closeRe.test(raw)\n\n const loading = !(isVoid || selfClosing || hasClosing)\n\n const content = loading\n ? `${raw.replace(/<[^>]*$/, '')}\\n</${tag}>`\n : raw\n\n return {\n type: 'html_block',\n content,\n raw,\n tag,\n loading,\n }\n}\n","import type { MarkdownToken, ParagraphNode } from '../../types'\nimport { parseInlineTokens } from '../inline-parsers'\n\nexport function parseParagraph(\n tokens: MarkdownToken[],\n index: number,\n): ParagraphNode {\n const paragraphContentToken = tokens[index + 1]\n const paragraphContent = String(paragraphContentToken.content ?? '')\n\n return {\n type: 'paragraph',\n children: parseInlineTokens(paragraphContentToken.children || [], paragraphContent),\n raw: paragraphContent,\n }\n}\n","import type { MarkdownIt } from 'markdown-it-ts'\nimport type { MarkdownToken, ParsedNode, ParseOptions } from '../types'\nimport { parseInlineTokens } from './inline-parsers'\nimport { parseFenceToken } from './inline-parsers/fence-parser'\nimport { parseAdmonition } from './node-parsers/admonition-parser'\nimport { parseBlockquote } from './node-parsers/blockquote-parser'\nimport { parseCodeBlock } from './node-parsers/code-block-parser'\nimport { parseContainer } from './node-parsers/container-parser'\nimport { parseDefinitionList } from './node-parsers/definition-list-parser'\nimport { parseFootnote } from './node-parsers/footnote-parser'\nimport { parseHardBreak } from './node-parsers/hardbreak-parser'\nimport { parseHeading } from './node-parsers/heading-parser'\nimport { parseHtmlBlock } from './node-parsers/html-block-parser'\nimport { parseList } from './node-parsers/list-parser'\nimport { parseMathBlock } from './node-parsers/math-block-parser'\nimport { parseParagraph } from './node-parsers/paragraph-parser'\nimport { parseTable } from './node-parsers/table-parser'\nimport { parseThematicBreak } from './node-parsers/thematic-break-parser'\n\nexport function parseMarkdownToStructure(\n markdown: string,\n md: MarkdownIt,\n options: ParseOptions = {},\n): ParsedNode[] {\n // Ensure markdown is a string — guard against null/undefined inputs from callers\n let safeMarkdown = (markdown ?? '').toString().replace(/([^\\\\])\\right/g, '$1\\\\right')\n if (safeMarkdown.endsWith('- *')) {\n // 放置markdown 解析 - * 会被处理成多个 ul >li 嵌套列表\n safeMarkdown = safeMarkdown.replace(/- \\*$/, '- \\\\*')\n }\n if (/\\n\\s*-\\s*$/.test(safeMarkdown)) {\n // 此时 markdown 解析会出错要跳过\n safeMarkdown = safeMarkdown.replace(/\\n\\s*-\\s*$/, '\\n')\n }\n else if (/\\n[[(]\\n*$/.test(safeMarkdown)) {\n // 此时 markdown 解析会出错要跳过\n safeMarkdown = safeMarkdown.replace(/(\\n\\[|\\n\\()+\\n*$/g, '\\n')\n }\n\n // Get tokens from markdown-it\n const tokens = md.parse(safeMarkdown, {})\n // Defensive: ensure tokens is an array\n if (!tokens || !Array.isArray(tokens))\n return []\n\n // Allow consumers to transform tokens before processing\n const pre = options.preTransformTokens\n const post = options.postTransformTokens\n let transformedTokens = tokens as unknown as MarkdownToken[]\n if (pre && typeof pre === 'function') {\n transformedTokens = pre(transformedTokens) || transformedTokens\n }\n\n // Process the tokens into our structured format\n let result = processTokens(transformedTokens)\n\n // Backwards compatible token-level post hook: if provided and returns\n // a modified token array, re-process tokens and override node-level result.\n if (post && typeof post === 'function') {\n const postResult = post(transformedTokens)\n if (Array.isArray(postResult)) {\n // Backwards compatibility: if the hook returns an array of tokens\n // (they have a `type` string property), re-process them into nodes.\n const first = (postResult as unknown[])[0] as unknown\n const firstType = (first as Record<string, unknown>)?.type\n if (first && typeof firstType === 'string') {\n result = processTokens(postResult as unknown as MarkdownToken[])\n }\n else {\n // Otherwise assume it returned ParsedNode[] and use it as-is\n result = postResult as unknown as ParsedNode[]\n }\n }\n }\n return result\n}\n\n// Process markdown-it tokens into our structured format\nexport function processTokens(tokens: MarkdownToken[]): ParsedNode[] {\n // Defensive: ensure tokens is an array\n if (!tokens || !Array.isArray(tokens))\n return []\n\n const result: ParsedNode[] = []\n let i = 0\n // Note: table token normalization is applied during markdown-it parsing\n // via the `applyFixTableTokens` plugin (core.ruler.after('block')).\n // Link/strong/list-item fixes are applied during the inline stage by\n // their respective plugins. That keeps parsing-time fixes centralized\n // and avoids ad-hoc post-processing here.\n while (i < tokens.length) {\n const token = tokens[i]\n switch (token.type) {\n case 'container_warning_open':\n case 'container_info_open':\n case 'container_note_open':\n case 'container_tip_open':\n case 'container_danger_open':\n case 'container_caution_open':\n case 'container_error_open': {\n const [warningNode, newIndex] = parseContainer(tokens, i)\n result.push(warningNode)\n i = newIndex\n break\n }\n\n case 'heading_open':\n result.push(parseHeading(tokens, i))\n i += 3 // Skip heading_open, inline, heading_close\n break\n\n case 'paragraph_open':\n result.push(parseParagraph(tokens, i))\n i += 3 // Skip paragraph_open, inline, paragraph_close\n break\n\n case 'html_block':\n result.push(parseHtmlBlock(token))\n i += 1\n break\n case 'code_block':\n result.push(parseCodeBlock(tokens[i]))\n i += 1\n break\n\n case 'fence':\n result.push(parseFenceToken(tokens[i]))\n i += 1\n break\n\n case 'bullet_list_open':\n case 'ordered_list_open': {\n const [listNode, newIndex] = parseList(tokens, i)\n result.push(listNode)\n i = newIndex\n break\n }\n\n case 'hr':\n result.push(parseThematicBreak())\n i += 1\n break\n\n case 'blockquote_open': {\n const [blockquoteNode, newIndex] = parseBlockquote(tokens, i)\n result.push(blockquoteNode)\n i = newIndex\n break\n }\n\n case 'table_open': {\n const [tableNode, newIndex] = parseTable(tokens, i)\n result.push(tableNode)\n i = newIndex\n break\n }\n\n case 'dl_open': {\n const [definitionListNode, newIndex] = parseDefinitionList(tokens, i)\n result.push(definitionListNode)\n i = newIndex\n break\n }\n\n case 'footnote_open': {\n const [footnoteNode, newIndex] = parseFootnote(tokens, i)\n result.push(footnoteNode)\n i = newIndex\n break\n }\n\n case 'container_open': {\n const match\n = /^::: ?(warning|info|note|tip|danger|caution|error) ?(.*)$/.exec(\n String(token.info ?? ''),\n )\n if (match) {\n const [admonitionNode, newIndex] = parseAdmonition(tokens, i, match)\n result.push(admonitionNode)\n i = newIndex\n }\n else {\n i += 1 // Not a container type we handle, skip\n }\n break\n }\n\n case 'hardbreak':\n result.push(parseHardBreak())\n i++\n break\n\n case 'math_block':\n result.push(parseMathBlock(tokens[i]))\n i += 1\n break\n\n case 'inline':\n result.push(...parseInlineTokens(token.children || []))\n i += 1\n break\n default:\n // Handle other token types or skip them\n i += 1\n break\n }\n }\n\n return result\n}\n\nexport { parseInlineTokens }\n","import type { MarkdownIt, MarkdownItPlugin } from 'markdown-it-ts'\nimport type { FactoryOptions } from './factory'\nimport { full as markdownItEmoji } from 'markdown-it-emoji'\nimport markdownItFootnote from 'markdown-it-footnote'\nimport markdownItIns from 'markdown-it-ins'\nimport markdownItMark from 'markdown-it-mark'\nimport markdownItSub from 'markdown-it-sub'\n\nimport markdownItSup from 'markdown-it-sup'\nimport * as markdownItCheckbox from 'markdown-it-task-checkbox'\nimport { factory } from './factory'\nimport {\n parseInlineTokens,\n parseMarkdownToStructure,\n processTokens,\n} from './parser'\n\n// Re-export config\nexport { setDefaultMathOptions } from './config'\n\n// Re-export parser functions\nexport { parseInlineTokens, parseMarkdownToStructure, processTokens }\nexport type { MathOptions } from './config'\n\n// Re-export utilities\nexport { findMatchingClose } from './findMatchingClose'\n\nexport { parseFenceToken } from './parser/inline-parsers/fence-parser'\n// Re-export plugins\nexport { applyContainers } from './plugins/containers'\n\nexport { ESCAPED_TEX_BRACE_COMMANDS, isMathLike, TEX_BRACE_COMMANDS } from './plugins/isMathLike'\nexport { applyMath, KATEX_COMMANDS, normalizeStandaloneBackslashT } from './plugins/math'\n// Re-export the node types for backward compatibility\nexport * from './types'\n\nexport interface GetMarkdownOptions extends FactoryOptions {\n plugin?: Array<unknown>\n apply?: Array<(md: MarkdownIt) => void>\n /**\n * Custom translation function or translation map for UI texts\n * @default { 'common.copy': 'Copy' }\n */\n i18n?: ((key: string) => string) | Record<string, string>\n}\n\nexport function getMarkdown(msgId: string = `editor-${Date.now()}`, options: GetMarkdownOptions = {}) {\n // keep legacy behaviour but delegate to new factory and reapply project-specific rules\n const md = factory(options)\n\n // Setup i18n translator function\n const defaultTranslations: Record<string, string> = {\n 'common.copy': 'Copy',\n }\n\n let t: (key: string) => string\n if (typeof options.i18n === 'function') {\n t = options.i18n\n }\n else if (options.i18n && typeof options.i18n === 'object') {\n const i18nMap = options.i18n as Record<string, string>\n t = (key: string) => i18nMap[key] ?? defaultTranslations[key] ?? key\n }\n else {\n t = (key: string) => defaultTranslations[key] ?? key\n }\n\n // apply user supplied plugins (md.use)\n if (Array.isArray(options.plugin)) {\n for (const p of options.plugin) {\n // allow both [plugin, opts] tuple or plugin function\n const pluginItem = p as unknown\n if (Array.isArray(pluginItem)) {\n const fn = pluginItem[0]\n const opts = pluginItem[1]\n if (typeof fn === 'function')\n md.use(fn, opts)\n }\n else if (typeof pluginItem === 'function') {\n md.use(pluginItem as MarkdownItPlugin)\n }\n // otherwise ignore non-callable plugins\n }\n }\n\n // apply user supplied apply functions to mutate the md instance (e.g. md.block.ruler.before(...))\n if (Array.isArray(options.apply)) {\n for (const fn of options.apply) {\n try {\n fn(md)\n }\n catch (e) {\n // swallow errors to preserve legacy behaviour; developers can see stack in console\n\n console.error('[getMarkdown] apply function threw an error', e)\n }\n }\n }\n\n // Re-apply a few project specific plugins that were previously always enabled\n md.use(markdownItSub)\n md.use(markdownItSup)\n md.use(markdownItMark)\n md.use(markdownItEmoji)\n // Safely resolve default export or the module itself for checkbox plugin\n type CheckboxPluginFn = (md: MarkdownIt, opts?: unknown) => void\n const markdownItCheckboxPlugin = ((markdownItCheckbox as unknown) as {\n default?: CheckboxPluginFn\n }).default ?? markdownItCheckbox\n md.use(markdownItCheckboxPlugin)\n md.use(markdownItIns)\n md.use(markdownItFootnote)\n\n // Annotate fence tokens with unclosed meta using a lightweight line check\n md.core.ruler.after('block', 'mark_fence_closed', (state: unknown) => {\n const s = state as unknown as {\n src: string\n tokens: Array<{ type?: string, map?: number[], markup?: string, meta?: Record<string, unknown> }>\n }\n const src: string = s.src\n const lines = src.split(/\\r?\\n/)\n for (const token of s.tokens) {\n if (token.type !== 'fence' || !token.map || !token.markup)\n continue\n const openLine: number = token.map[0]\n const endLine: number = token.map[1]\n const markup: string = token.markup\n const marker = markup[0]\n const minLen = markup.length\n // The closing line, if exists, should be the last line consumed by the block\n const lineIdx = Math.max(0, endLine - 1)\n const line = lines[lineIdx] ?? ''\n let i = 0\n while (i < line.length && (line[i] === ' ' || line[i] === '\\t')) i++\n let count = 0\n while (i + count < line.length && line[i + count] === marker) count++\n let j = i + count\n while (j < line.length && (line[j] === ' ' || line[j] === '\\t')) j++\n const closed = endLine > openLine + 1 && count >= minLen && j === line.length\n const tokenShape = token as unknown as { meta?: Record<string, unknown> }\n tokenShape.meta = tokenShape.meta ?? {}\n ;(tokenShape.meta as Record<string, unknown>).unclosed = !closed\n // also set a explicit `closed` boolean for compatibility with plugins/tests\n ;(tokenShape.meta as Record<string, unknown>).closed = !!closed\n }\n })\n\n // wave rule (legacy)\n const waveRule = (state: unknown, silent: boolean) => {\n const s = state as unknown as { pos: number, src: string, push: (type: string, tag?: string, nesting?: number) => any }\n const start = s.pos\n if (s.src[start] !== '~')\n return false\n const prevChar = s.src[start - 1]\n const nextChar = s.src[start + 1]\n if (/\\d/.test(prevChar) && /\\d/.test(nextChar)) {\n if (!silent) {\n const token = s.push('text', '', 0)\n token.content = '~'\n }\n s.pos += 1\n return true\n }\n return false\n }\n\n md.inline.ruler.before('sub', 'wave', waveRule)\n\n // custom fence that uses msgId for unique ids\n md.renderer.rules.fence = (tokens: unknown, idx: number) => {\n const tokensAny = tokens as unknown as import('./types').MarkdownToken[]\n const token = tokensAny[idx]\n const tokenShape = token as unknown as { info?: string, content?: string }\n const info = String(tokenShape.info ?? '').trim()\n const str = String(tokenShape.content ?? '')\n const encodedCode = btoa(unescape(encodeURIComponent(str)))\n const language = String(info ?? 'text')\n const uniqueId = `editor-${msgId}-${idx}-${language}`\n\n return `<div class=\"code-block\" data-code=\"${encodedCode}\" data-lang=\"${language}\" id=\"${uniqueId}\">\n <div class=\"code-header\">\n <span class=\"code-lang\">${language.toUpperCase()}</span>\n <button class=\"copy-button\" data-code=\"${encodedCode}\">${t(\n 'common.copy',\n )}</button>\n </div>\n <div class=\"code-editor\"></div>\n </div>`\n }\n\n // reference rule (legacy)\n const RE_REFERENCE = /^\\[(\\d+)\\]/\n const referenceInline = (state: unknown, silent: boolean) => {\n const s = state as unknown as { src: string, pos: number, push: (type: string, tag?: string, nesting?: number) => any }\n if (s.src[s.pos] !== '[')\n return false\n const match = RE_REFERENCE.exec(s.src.slice(s.pos))\n if (!match)\n return false\n if (!silent) {\n const id = match[1]\n const token = s.push('reference', 'span', 0)\n token.content = id\n token.markup = match[0]\n }\n s.pos += match[0].length\n return true\n }\n\n md.inline.ruler.before('escape', 'reference', referenceInline)\n md.renderer.rules.reference = (tokens: unknown, idx: number) => {\n const tokensAny = tokens as unknown as import('./types').MarkdownToken[]\n const id = String(tokensAny[idx].content ?? '')\n return `<span class=\"reference-link\" data-reference-id=\"${id}\" role=\"button\" tabindex=\"0\" title=\"Click to view reference\">${id}</span>`\n }\n\n return md\n}\n"],"mappings":";;;;;;;;;;;AAgBA,IAAIA;AAEJ,SAAgB,sBAAsB,MAA+B;AACnE,sBAAqB;;AAGvB,SAAgB,wBAAiD;AAC/D,QAAO;;;;;ACpBT,SAAgB,gBAAgB,IAAgB;AAC7C;EACC;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACD,CAAC,SAAS,SAAS;AAClB,KAAG,IAAI,qBAAqB,MAAM,EAChC,OAAO,QAAiB,KAAa;AAMnC,OALkB,OACM,KAIT,YAAY,EACzB,QAAO,2CAA2C,KAAK;OAGvD,QAAO;KAGZ,CAAC;GACF;AAGF,IAAG,MAAM,MAAM,OACb,SACA,2BACC,OAAgB,WAAmB,SAAiB,WAAoB;EAUvE,MAAM,IAAI;EACV,MAAM,WAAW,EAAE,OAAO,aAAa,EAAE,OAAO;EAChD,MAAM,UAAU,EAAE,OAAO;EACzB,MAAM,cAAc,EAAE,IACnB,MAAM,UAAU,QAAQ,CACxB,MAAM,eAAe;AACxB,MAAI,CAAC,YACH,QAAO;AACT,MAAI,OACF,QAAO;EAET,MAAM,OAAO,YAAY;EACzB,IAAI,WAAW,YAAY;EAC3B,IAAI,QAAQ;AACZ,SAAO,YAAY,SAAS;GAC1B,MAAM,OAAO,EAAE,OAAO,YAAY,EAAE,OAAO;GAC3C,MAAM,OAAO,EAAE,OAAO;AACtB,OAAI,EAAE,IAAI,MAAM,MAAM,KAAK,CAAC,MAAM,KAAK,OAAO;AAC5C,YAAQ;AACR;;AAEF;;AAEF,MAAI,CAAC,MACH,QAAO;AAIT,EAFkB,EAAE,KAAK,sBAAsB,OAAO,EAAE,CAE9C,QAAQ,SAAS,+BAA+B,OAAO;EAEjE,MAAMC,eAAyB,EAAE;AACjC,OAAK,IAAI,IAAI,YAAY,GAAG,IAAI,UAAU,KAAK;GAC7C,MAAM,OAAO,EAAE,OAAO,KAAK,EAAE,OAAO;GACpC,MAAM,OAAO,EAAE,OAAO;AACtB,gBAAa,KAAK,EAAE,IAAI,MAAM,MAAM,KAAK,CAAC;;AAI5C,IAAE,KAAK,kBAAkB,KAAK,EAAE;EAChC,MAAM,cAAc,EAAE,KAAK,UAAU,IAAI,EAAE;AAC3C,cAAY,UAAU,aAAa,KAAK,KAAK;AAC7C,cAAY,MAAM,CAAC,YAAY,GAAG,SAAS;AAG3C,cAAY,WAAW,EAAE;AACzB,IAAE,GAAG,OAAO,MAAM,YAAY,SAAS,EAAE,IAAK,EAAU,KAAK,YAAY,SAAS;AAClF,IAAE,KAAK,mBAAmB,KAAK,GAAG;AAElC,IAAE,KAAK,uBAAuB,OAAO,GAAG;AAExC,IAAE,OAAO,WAAW;AACpB,SAAO;GAEV;;;;;AChGH,MAAMC,cAAY,IAAI,IAAI;CACxB;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAEF,SAAgB,yBAAyB,IAAgB;AAGvD,IAAG,KAAK,MAAM,KAAK,2BAA2B,UAAmB;EAE/D,MAAM,OADI,MACK,UAAU,EAAE;EAG3B,MAAMC,WAA+B,EAAE;AACvC,OAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;GACpC,MAAM,IAAI,KAAK;AACf,OAAI,EAAE,SAAS,cAAc;IAC3B,MAAM,MAAM,EAAE,SAAS,MAAM,cAAc,GAAG,MAAM;AAEpD,QAAI,CADiB,uBAAuB,KAAK,EAAE,WAAW,GAAG,CAG/D,UAAS,KAAK,CAAC,KAAK,EAAE,CAAC;aAInB,SAAS,SAAS,KAAK,SAAS,SAAS,SAAS,GAAG,OAAO,IAC9D,UAAS,KAAK;AAGlB;cAEO,SAAS,SAAS,GAAG;AAE5B,QAAI,EAAE,SAAS,oBAAoB,EAAE,SAAS,mBAAmB;AAE/D,UAAK,OAAO,GAAG,EAAE;AACjB;AACA;;IAEF,MAAM,UAAU,EAAE,WAAW;IAE7B,MAAM,gCADoB,IAAI,OAAO,eAAe,SAAS,SAAS,SAAS,GAAG,GAAG,OAAO,EACrD,KAAK,QAAQ;AAEpD,QAAI,SAAS;KAEX,MAAM,GAAG,aAAa,SAAS,SAAS,SAAS;KACjD,MAAM,YAAY,KAAK;AACvB,eAAU,UAAU,GAAG,UAAU,WAAW,GAAG,IAAI;AACnD,SAAI,UAAU,YAAY,MACxB,WAAU,UAAU,CAAC;;AAEzB,QAAI,aACF,UAAS,KAAK;AAGhB,SAAK,OAAO,GAAG,EAAE;AACjB;SAGA;;AAIJ,OAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;GACpC,MAAM,IAAI,KAAK;AACf,OAAI,EAAE,SAAS,cAAc;IAC3B,MAAM,MAAM,EAAE,SAAS,MAAM,cAAc,GAAG,MAAM;AAEpD,QAAI;KAAC;KAAM;KAAM;KAAO;KAAS;KAAQ;KAAQ;KAAO;KAAK;KAAM;KAAK,CAAC,SAAS,IAAI,CACpF;AACF,MAAE,OAAO;IACT,MAAM,UAAU,EAAE,SAAS,SAAS,KAAK,IAAI,GAAG,GAAG,QAAQ,EAAE,YAAY,SAAY,EAAE,UAAU;AACjG,MAAE,WAAW,CACX;KACE,MAAM;KACN,SAAS,EAAE;KACX,KAAK,EAAE,SAAS,MAAM,cAAc,GAAG,MAAM;KAC7C;KACD,CACF;AACD;;AAEF,OAAI,CAAC,KAAK,EAAE,SAAS,SACnB;AAGF,OAAI,EAAE,SAAS,WAAW,KAAK,EAAE,SAAS,GAAG,SAAS,eAAe;IAEnE,MAAM,MAAM,EAAE,SAAS,GAAG,SAAS,MAAM,cAAc,GAAG,MAAM;AAEhE,QAAI;KAAC;KAAK;KAAQ;KAAU;KAAM;KAAK;KAAK;KAAI,CAAC,SAAS,IAAI,EAAE;AAC9D,OAAE,SAAS,GAAG,UAAU;AACxB,OAAE,SAAS,GAAG,MAAM;AACpB,OAAE,SAAS,KAAK;MACd,MAAM;MACN;MACA,SAAS;MACT,SAAS,KAAK,IAAI;MACnB,CAAQ;UAGT,GAAE,WAAW,CACX;KACE,MAAM;KACN,SAAS;KACT;KACA,SAAS,EAAE,SAAS,GAAG,UAAU,EAAE,SAAS,GAAG;KAChD,CACF;AAEH;cAEO,EAAE,SAAS,WAAW,KAAK,EAAE,SAAS,GAAG,SAAS,iBAAiB,EAAE,SAAS,GAAG,SAAS,eAAe;IAChH,MAAM,MAAM,EAAE,SAAS,GAAG,SAAS,MAAM,cAAc,GAAG,MAAM;AAEhE,QAAI;KAAC;KAAK;KAAQ;KAAU;KAAM;KAAK;KAAK;KAAI,CAAC,SAAS,IAAI,CAC5D;AACF,MAAE,WAAW,CACX;KACE,MAAM;KACN,SAAS;KACT;KACA,SAAS,EAAE,SAAS,KAAI,OAAM,GAAG,QAAQ,CAAC,KAAK,GAAG;KACnD,CACF;AACD;;AAGF,OAAI,CAAC,EAAE,SAAS,WAAW,IAAI,IAAK,EAAU,UAAU,WAAW,EACjE;GAEF,MAAM,MAAM,OAAO,EAAE,QAAQ;GAC7B,MAAM,UAAU,IAAI,MAAM,cAAc,GAAG,IAAI,aAAa,IAAI;AAChE,OAAI,CAAC,QACH;GAGF,MAAM,SADc,aAAa,KAAK,IAAI,IACZD,YAAU,IAAI,QAAQ;GAEpD,MAAM,YAAY;AAElB,OAAI,QAAQ;AAEV,cAAU,WAAW,CACnB;KAAE,MAAM;KAAe,SAAS;KAAK,CACtC;AACD;;AAEF,aAAU,SAAS,SAAS;;GAE9B;;;;;AChKJ,MAAM,iBAAiB;AAEvB,SAAgB,mBAAmB,IAAgB;CAIjD,MAAM,QAAQ,OAAgB,WAAoB;EAChD,MAAM,IAAI;EACV,MAAM,QAAQ,EAAE;AAChB,MAAI,EAAE,IAAI,WAAW,IACnB,QAAO;AAGT,MAAI,QAAQ,KAAK,EAAE,IAAI,QAAQ,OAAO,IACpC,QAAO;EAGT,MAAM,OAAO,EAAE,IAAI,MAAM,MAAM;EAE/B,MAAM,IAAI,eAAe,KAAK,KAAK;AACnC,MAAI,CAAC,EACH,QAAO;AAET,MAAI,OACF,QAAO;EAET,MAAM,OAAO,EAAE,MAAM;EACrB,MAAM,OAAO,EAAE,MAAM;AAIrB,MAAI,KAAK,SAAS,IAAI,IAAI,KAAK,SAAS,IAAI,CAC1C,QAAO;EACT,MAAM,WAAW,KAAK,QAAQ,IAAI;EAClC,MAAM,kBAAkB,aAAa;EAGrC,MAAM,OAAO,EAAE,KAAK,aAAa,KAAK,EAAE;AACxC,OAAK,QAAQ,CAAC,CAAC,QAAQ,KAAK,CAAC;EAE7B,MAAM,MAAM,EAAE,KAAK,QAAQ,IAAI,EAAE;AACjC,MAAI,UAAU;AAGd,MAAI,iBAAiB;AACnB,KAAE,KAAK,cAAc,KAAK,GAAG;AAE7B,KAAE,OAAO,WAAW;QAKpB,GAAE,OAAO,EAAE,GAAG;AAEhB,SAAO;;AAIT,IAAG,OAAO,MAAM,OAAO,QAAQ,mBAAmB,KAAK;;;;;AC1DzD,SAAgB,mBAAmB,IAAgB;AAKjD,IAAG,KAAK,MAAM,MAAM,UAAU,oBAAoB,UAAmB;EAEnE,MAAM,OADI,MACK,UAAU,EAAE;AAC3B,OAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;GACpC,MAAM,IAAI,KAAK;AACf,OAAI,KAAK,EAAE,SAAS,YAAY,MAAM,QAAQ,EAAE,SAAS,CACvD,KAAI;AACF,MAAE,WAAW,aAAa,EAAE,SAAS;YAEhC,GAAG;AAKR,YAAQ,MAAM,sDAAsD,EAAE;;;GAI5E;;AAGJ,SAAS,aAAa,QAA0C;AAC9D,KAAI,OAAO,SAAS,EAClB,QAAO;AAET,MAAK,IAAI,IAAI,GAAG,KAAK,OAAO,SAAS,GAAG,KAAK;AAC3C,MAAI,CAAC,OAAO,GACV;AACF,MAAI,OAAO,IAAI,SAAS,UAAU,OAAO,GAAG,SAAS,SAAS,IAAI,IAAI,OAAO,IAAI,IAAI,SAAS,aAAa;GACzG,MAAM,QAAQ,OAAO,GAAG,QAAS,MAAM,eAAe;AACtD,OAAI,OAAO;IACT,IAAI,aAAa,OAAO,GAAG,QAAS,MAAM,GAAG,MAAM,MAAM;IACzD,MAAM,gBAAgB,WAAW,MAAM,SAAS;IAChD,MAAM,iBAAiB,EAAE;AACzB,QAAI,eAAe;AACjB,kBAAa,WAAW,MAAM,GAAG,cAAc,MAAM;AACrD,SAAI,WACF,gBAAe,KAAK;MAClB,MAAM;MACN,SAAS;MACT,KAAK;MACN,CAAC;KAEJ,MAAM,OAAO,MAAM;KACnB,MAAM,OAAO,cAAc,GAAG;AAC9B,SAAI,SAAS,EACX,gBAAe,KAAK;MAAE,MAAM;MAAW,KAAK;MAAM,SAAS;MAAG,CAAC;cAExD,SAAS,EAChB,gBAAe,KAAK;MAAE,MAAM;MAAe,KAAK;MAAU,SAAS;MAAG,CAAC;cAEhE,SAAS,GAAG;AACnB,qBAAe,KAAK;OAAE,MAAM;OAAe,KAAK;OAAU,SAAS;OAAG,CAAC;AACvE,qBAAe,KAAK;OAAE,MAAM;OAAW,KAAK;OAAM,SAAS;OAAG,CAAC;;KAEjE,IAAI,OAAO,OAAO,IAAI,IAAI,WAAW;AACrC,SAAI,OAAO,IAAI,IAAI,SAAS,UAAU,CAAC,OAAO,IAAI,GAAG,SAAS,WAAW,IAAI,EAAE;AAC7E,cAAQ,OAAO,IAAI,IAAI,WAAW;AAClC,aAAO,IAAI,GAAG,UAAU;;AAE1B,oBAAe,KACb;MACE,MAAM;MACN,SAAS,CAAC,OAAO,IAAI,IAAI,SAAS,WAAW,IAAI;MACjD;MACA,OAAO;MACP;MACA,UAAU,CACR;OACE,MAAM;OACN,SAAS;OACT,KAAK;OACN,CACF;MACD,KAAK,OAAO,IAAI,KAAK,IAAI,KAAK,GAAG;MAClC,CACF;AACD,SAAI,SAAS,EACX,gBAAe,KAAK;MAAE,MAAM;MAAY,KAAK;MAAM,SAAS;MAAI,CAAC;cAE1D,SAAS,EAChB,gBAAe,KAAK;MAAE,MAAM;MAAgB,KAAK;MAAU,SAAS;MAAI,CAAC;cAElE,SAAS,GAAG;AACnB,qBAAe,KAAK;OAAE,MAAM;OAAY,KAAK;OAAM,SAAS;OAAI,CAAC;AACjE,qBAAe,KAAK;OAAE,MAAM;OAAgB,KAAK;OAAU,SAAS;OAAI,CAAC;;AAE3E,SAAI,OAAO,IAAI,IAAI,SAAS,QAAQ;MAClC,MAAM,YAAY,OAAO,IAAI,GAAG,SAAS,QAAQ,UAAU,GAAG;AAC9D,UAAI,UACF,gBAAe,KAAK;OAClB,MAAM;OACN,SAAS;OACT,KAAK;OACN,CAAC;AAEJ,aAAO,OAAO,GAAG,GAAG,GAAG,eAAe;WAGtC,QAAO,OAAO,GAAG,GAAG,GAAG,eAAe;WAGrC;AACH,SAAI,WACF,gBAAe,KAAK;MAClB,MAAM;MACN,SAAS;MACT,KAAK;MACN,CAAC;KAEJ,MAAM,OAAO,MAAM;KACnB,IAAI,OAAO,OAAO,IAAI,IAAI,WAAW;AACrC,SAAI,OAAO,IAAI,IAAI,SAAS,UAAU,CAAC,OAAO,IAAI,GAAG,SAAS,WAAW,IAAI,EAAE;AAC7E,cAAQ,OAAO,IAAI,IAAI,WAAW;AAClC,aAAO,IAAI,GAAG,UAAU;;AAE1B,oBAAe,KAAK,GAAG,CACrB;MACE,MAAM;MACN,SAAS,CAAC,OAAO,IAAI,IAAI,SAAS,WAAW,IAAI;MACjD;MACA,OAAO;MACP;MACA,UAAU,CACR;OACE,MAAM;OACN,SAAS;OACT,KAAK;OACN,CACF;MACD,KAAK,OAAO,IAAI,KAAK,IAAI,KAAK,GAAG;MAClC,CACF,CAAC;AACF,SAAI,OAAO,IAAI,IAAI,SAAS,QAAQ;MAClC,MAAM,YAAY,OAAO,IAAI,GAAG,SAAS,QAAQ,OAAO,GAAG;AAC3D,UAAI,UACF,gBAAe,KAAK;OAClB,MAAM;OACN,SAAS;OACT,KAAK;OACN,CAAC;AAEJ,aAAO,OAAO,GAAG,GAAG,GAAG,eAAe;WAGtC,QAAO,OAAO,GAAG,GAAG,GAAG,eAAe;;AAG1C,SAAM,eAAe,SAAS;AAC9B;;aAGK,OAAO,GAAG,SAAS,eAAe,OAAO,GAAG,WAAW,aAAa,OAAO,IAAI,IAAI,SAAS,UAAU,OAAO,IAAI,GAAG,SAAS,SAAS,IAAI,EACjJ;OAAI,OAAO,IAAI,IAAI,SAAS,cAAc;IAExC,MAAM,iBAAiB,EAAE;IACzB,MAAM,OAAQ,OAAO,IAAI,GAAG,WAAW;IACvC,IAAI,OAAO,OAAO,GAAG,OAAO,MAAK,SAAQ,KAAK,OAAO,OAAO,GAAG,MAAM;AAErE,QAAI,OAAO,IAAI,IAAI,SAAS,QAAQ;KAClC,MAAM,KAAK,OAAO,IAAI,IAAI,WAAW,IAAI,QAAQ,IAAI;KACrD,MAAM,UAAU,MAAM;AACtB,SAAI,MAAM,IAAI;AACZ,cAAS,OAAO,IAAI,IAAI,SAAS,MAAM,GAAG,EAAE,IAAI;AAChD,aAAO,IAAI,GAAG,UAAU;;AAG1B,oBAAe,KAAK;MAClB,MAAM;MACN;MACA;MACA,OAAO;MACP;MACA,UAAU,CACR;OACE,MAAM;OACN,SAAS;OACT,KAAK;OACN,CACF;MACD,KAAK,OAAO,IAAI,KAAK,IAAI,KAAK,GAAG;MAClC,CAAC;KACF,MAAM,YAAY,OAAO,IAAI,GAAG,SAAS,QAAQ,UAAU,GAAG;AAC9D,SAAI,UACF,gBAAe,KAAK;MAClB,MAAM;MACN,SAAS;MACT,KAAK;MACN,CAAC;AAEJ,YAAO,OAAO,IAAI,GAAG,GAAG,GAAG,eAAe;WAEvC;AACH,oBAAe,KAAK;MAClB,MAAM;MACN,SAAS;MACT;MACA,OAAO;MACP;MACA,UAAU,CACR;OACE,MAAM;OACN,SAAS;OACT,KAAK;OACN,CACF;MACD,KAAK,OAAO,IAAI,KAAK,IAAI,KAAK,GAAG;MAClC,CAAC;AACF,YAAO,OAAO,IAAI,GAAG,GAAG,GAAG,eAAe;;AAE5C;;;AAGJ,MAAI,OAAO,GAAG,SAAS,gBAAgB,OAAO,GAAG,YAAY,MAAM,OAAO,IAAI,IAAI,SAAS,UAAU,OAAO,IAAI,IAAI,SAAS,UAAU,OAAO,IAAI,IAAI,SAAS,aAAa;AAE1K,UAAO,IAAI,GAAG,UAAU;GACxB,MAAM,OAAO,OAAO,IAAI,GAAG,WAAW;GACtC,IAAI,OAAO,OAAO,IAAI,GAAG,QAAQ,KAAK,MAAM;GAC5C,IAAI,QAAQ;AACZ,OAAI,OAAO,GAAG,WAAW,aAAa,OAAO,IAAI,IAAI,SAAS,QAAQ;IACpE,MAAM,KAAK,OAAO,IAAI,IAAI,WAAW,IAAI,QAAQ,IAAI;AACrD,QAAI,MAAM,IAAI;AACZ,aAAS,OAAO,IAAI,IAAI,SAAS,MAAM,GAAG,EAAE,IAAI;AAChD,YAAO,IAAI,GAAG,UAAU;;AAE1B,aAAS;;AAEX,UAAO,OAAO,IAAI,GAAG,OAAO;IAC1B,MAAM;IACN,SAAS;IACT;IACA,OAAO;IACP;IACA,UAAU,CACR;KACE,MAAM;KACN,SAAS;KACT,KAAK;KACN,CACF;IACD,KAAK,OAAO,IAAI,KAAK,IAAI,KAAK,GAAG;IAClC,CAAQ;aAEF,OAAO,GAAG,SAAS,WAAW,KAAK,IAAI,OAAO,IAAI,GAAG,QAAQ,SAAS,IAAI,IAAI,OAAO,IAAI,GAAG,SAAS,UAAU,OAAO,IAAI,GAAG,SAAS,SAAS,IAAI,EAAE;GAC5J,MAAM,OAAO,OAAO,IAAI,GAAG,OAAQ;GACnC,MAAM,iBAAiB,EAAE;GACzB,MAAM,aAAa,OAAO,IAAI,GAAG,QAAS,MAAM,GAAG,OAAO,IAAI,GAAG,QAAS,SAAS,IAAI,KAAK;AAC5F,OAAI,WACF,gBAAe,KAAK;IAClB,MAAM;IACN,SAAS;IACT,KAAK;IACN,CAAC;AAEJ,OAAI,SAAS,EACX,gBAAe,KAAK;IAAE,MAAM;IAAW,KAAK;IAAM,SAAS;IAAG,CAAC;YAExD,SAAS,EAChB,gBAAe,KAAK;IAAE,MAAM;IAAe,KAAK;IAAU,SAAS;IAAG,CAAC;YAEhE,SAAS,GAAG;AACnB,mBAAe,KAAK;KAAE,MAAM;KAAe,KAAK;KAAU,SAAS;KAAG,CAAC;AACvE,mBAAe,KAAK;KAAE,MAAM;KAAW,KAAK;KAAM,SAAS;KAAG,CAAC;;GAEjE,MAAM,OAAO,OAAO,IAAI,GAAG,WAAW;GACtC,IAAI,OAAO,OAAO,GAAG,QAAS,MAAM,EAAE;GACtC,IAAI,UAAU;AACd,OAAI,OAAO,IAAI,IAAI,SAAS,QAAQ;IAClC,MAAM,KAAK,OAAO,IAAI,IAAI,WAAW,IAAI,QAAQ,IAAI;AACrD,cAAU,MAAM;AAChB,QAAI,MAAM,IAAI;AACZ,aAAS,OAAO,IAAI,IAAI,SAAS,MAAM,GAAG,EAAE,IAAI;AAChD,YAAO,IAAI,GAAG,UAAU;;;AAG5B,kBAAe,KAAK;IAClB,MAAM;IACN;IACA;IACA,OAAO;IACP;IACA,UAAU,CACR;KACE,MAAM;KACN,SAAS;KACT,KAAK;KACN,CACF;IACD,KAAK,OAAO,IAAI,KAAK,IAAI,KAAK,GAAG;IAClC,CAAC;AACF,OAAI,SAAS,EACX,gBAAe,KAAK;IAAE,MAAM;IAAY,KAAK;IAAM,SAAS;IAAI,CAAC;YAE1D,SAAS,EAChB,gBAAe,KAAK;IAAE,MAAM;IAAgB,KAAK;IAAU,SAAS;IAAI,CAAC;YAElE,SAAS,GAAG;AACnB,mBAAe,KAAK;KAAE,MAAM;KAAY,KAAK;KAAM,SAAS;KAAI,CAAC;AACjE,mBAAe,KAAK;KAAE,MAAM;KAAgB,KAAK;KAAU,SAAS;KAAI,CAAC;;AAE3E,OAAI,OAAO,IAAI,IAAI,SAAS,QAAQ;IAClC,MAAM,YAAY,OAAO,IAAI,GAAG,SAAS,QAAQ,UAAU,GAAG;AAC9D,QAAI,UACF,gBAAe,KAAK;KAClB,MAAM;KACN,SAAS;KACT,KAAK;KACN,CAAC;AAEJ,WAAO,OAAO,IAAI,GAAG,GAAG,GAAG,eAAe;cAEnC,OAAO,IAAI,IAAI,SAAS,YAE/B,QAAO,OAAO,IAAI,GAAG,IAAI,GAAG,eAAe;OAG3C,QAAO,OAAO,IAAI,GAAG,GAAG,GAAG,eAAe;AAE5C,QAAM,eAAe,SAAS;AAC9B;;;AAGJ,QAAO;;;;;ACxUT,SAAgB,iBAAiB,IAAgB;AAG/C,IAAG,KAAK,MAAM,MAAM,UAAU,yBAAyB,UAAmB;EAExE,MAAM,OADI,MACK,UAAU,EAAE;AAC3B,OAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;GACpC,MAAM,IAAI,KAAK;AACf,OAAI,KAAK,EAAE,SAAS,YAAY,MAAM,QAAQ,EAAE,SAAS,CACvD,KAAI;AACF,MAAE,WAAW,YAAY,EAAE,SAAS;YAE/B,GAAG;AAGR,YAAQ,MAAM,oDAAoD,EAAE;;;GAI1E;;AAGJ,SAAS,YAAY,QAA0C;CAC7D,MAAM,OAAO,OAAO,OAAO,SAAS;CACpC,MAAM,cAAc,OAAO,MAAM,WAAW,GAAG;AAE/C,KAAI,MAAM,SAAS,UAAW,gBAAgB,KAAK,YAAY,IAAI,OAAO,OAAO,SAAS,IAAI,QAAQ,KACpG,QAAO,OAAO,OAAO,SAAS,GAAG,EAAE;AAGrC,QAAO;;;;;AC9BT,SAAgB,qBAAqB,IAAgB;AAInD,IAAG,KAAK,MAAM,MAAM,UAAU,sBAAsB,UAAmB;EAErE,MAAM,OADI,MACK,UAAU,EAAE;AAC3B,OAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;GACpC,MAAM,IAAI,KAAK;AACf,OAAI,KAAK,EAAE,SAAS,YAAY,MAAM,QAAQ,EAAE,SAAS,CACvD,KAAI;AACF,MAAE,WAAW,gBAAgB,EAAE,SAAS;YAEnC,GAAG;AAGR,YAAQ,MAAM,wDAAwD,EAAE;;;GAI9E;;AAGJ,SAAS,gBAAgB,QAA0C;CACjE,MAAM,cAAc,CAAC,GAAG,OAAO;AAC/B,KAAI,OAAO,SAAS,EAClB,QAAO;CACT,MAAM,IAAI,OAAO,SAAS;CAC1B,MAAM,QAAQ,OAAO;AACrB,KAAI,CAAC,MACH,QAAO;CACT,MAAM,YAAY,OAAO,IAAI;CAC7B,MAAM,eAAe,OAAO,MAAM,WAAW,GAAG;AAChD,KAAI,MAAM,SAAS,eAAe,OAAO,IAAI,IAAI,SAAS,aAAa,OAAO,IAAI,IAAI,SAAS,UAAU,OAAO,IAAI,GAAG,SAAS,SAAS,IAAI,EAAE;EAC7I,MAAM,cAAc,OAAO,OAAO,IAAI,GAAG,WAAW,GAAG,CAAC,MAAM,GAAG,GAAG;EAEpE,MAAM,gBAAgB;GACpB;IACE,MAAM;IACN,KAAK;IACL,OAAO;IACP,KAAK;IACL,UAAU;IACV,SAAS;IACT,QAAQ;IACR,MAAM;IACN,MAAM;IACP;GACD,OAAO;GACP,OAAO,IAAI;GACX,OAAO,IAAI;GACX;IACE,MAAM;IACN,KAAK;IACL,OAAO;IACP,KAAK;IACL,UAAU;IACV,SAAS;IACT,QAAQ;IACR,MAAM;IACN,MAAM;IACP;GACF;AACD,MAAI,YACF,eAAc,QAAQ;GACpB,MAAM;GACN,SAAS;GACT,KAAK;GACN,CAAC;AAEJ,cAAY,OAAO,IAAI,GAAG,GAAG,GAAG,cAAc;YAEvC,MAAM,SAAS,UAAU,aAAa,SAAS,IAAI,IAAI,UAAU,SAAS,WAAW;EAE5F,MAAM,aAAa,OAAO,IAAI;EAC9B,MAAM,QAAQ,YAAY,SAAS,SAAS,IAAI;EAChD,MAAM,SAAS;GACb;IACE,MAAM;IACN,KAAK;IACL,OAAO;IACP,KAAK;IACL,UAAU;IACV,SAAS;IACT,QAAQ;IACR,MAAM;IACN,MAAM;IACP;GACD;IACE,MAAM;IACN,SAAS,YAAY,SAAS,SAAS,OAAO,WAAW,WAAW,GAAG,GAAG;IAC3E;GACD;IACE,MAAM;IACN,KAAK;IACL,OAAO;IACP,KAAK;IACL,UAAU;IACV,SAAS;IACT,QAAQ;IACR,MAAM;IACN,MAAM;IACP;GACF;EACD,MAAM,aAAa,aAAa,MAAM,GAAG,GAAG;AAC5C,MAAI,WACF,QAAO,QAAQ;GACb,MAAM;GACN,SAAS;GACT,KAAK;GACN,CAAC;AAEJ,cAAY,OAAO,GAAG,OAAO,GAAG,OAAO;AACvC,SAAO;;AAGT,QAAO;;;;;ACpHT,SAAgB,oBAAoB,IAAgB;AAIlD,IAAG,KAAK,MAAM,MAAM,SAAS,qBAAqB,UAAmB;EACnE,MAAM,IAAI;AACV,MAAI;GAEF,MAAM,QAAQ,eADD,EAAE,UAAU,EAAE,CACO;AAClC,OAAI,MAAM,QAAQ,MAAM,CACtB,GAAE,SAAS;WAER,GAAG;AAER,WAAQ,MAAM,oDAAoD,EAAE;;GAEtE;;AAGJ,SAAS,cAAc;AACrB,QAAO;EACL;GACE,MAAM;GACN,KAAK;GACL,OAAO;GACP,KAAK;GACL,UAAU;GACV,SAAS;GACT,QAAQ;GACR,MAAM;GACN,OAAO;GACP,SAAS;GACT,MAAM;GACP;EACD;GACE,MAAM;GACN,KAAK;GACL,OAAO;GACP,OAAO;GACP,OAAO;GACP,UAAU;GACX;EACD;GACE,MAAM;GACN,KAAK;GACL,OAAO;GACP,OAAO;GACP,OAAO;GACP,UAAU;GACX;EAEF;;AAEH,SAAS,YAAY;AACnB,QAAO;EACL;GACE,MAAM;GACN,KAAK;GACL,OAAO;GACP,OAAO;GACP,OAAO;GACP,UAAU;GACX;EACD;GACE,MAAM;GACN,KAAK;GACL,OAAO;GACP,OAAO;GACP,OAAO;GACP,UAAU;GACX;EACD;GACE,MAAM;GACN,KAAK;GACL,OAAO;GACP,KAAK;GACL,UAAU;GACV,SAAS;GACT,QAAQ;GACR,MAAM;GACN,OAAO;GACP,MAAM;GACP;EACF;;AAEH,SAAS,SAAS,MAAc;AAC9B,QAAO;EAAC;GACN,MAAM;GACN,KAAK;GACL,OAAO;GACP,OAAO;GACP,OAAO;GACP,UAAU;GACX;EAAE;GACD,MAAM;GACN,KAAK;GACL,UAAU,CACR;IACE,KAAK;IACL,MAAM;IACN,OAAO;IACP,SAAS;IACT,UAAU;IACX,CACF;GACD,SAAS;GACT,OAAO;GACP,OAAO;GACP,OAAO;GACR;EAAE;GACD,MAAM;GACN,KAAK;GACL,OAAO;GACP,OAAO;GACP,OAAO;GACP,UAAU;GACX;EAAC;;AAEJ,SAAgB,eAAe,QAA0C;CACvE,MAAM,cAAc,CAAC,GAAG,OAAO;AAC/B,KAAI,OAAO,SAAS,EAClB,QAAO;CACT,MAAM,IAAI,OAAO,SAAS;CAC1B,MAAM,QAAQ,OAAO;AACrB,KAAI,MAAM,SAAS,UAAU;EAC3B,MAAM,WAAW,OAAO,MAAM,WAAW,GAAG;EAC5C,MAAM,eAAe,OAAO,MAAM,WAAW,IAAI,WAAW,GAAG;AAE/D,MAAI,qBAAqB,KAAK,SAAS,EAAE;GAEvC,MAAM,OAAO,aAAa,MAAM,EAAE,CAAC,MAAM,IAAI,CAAC,KAAI,QAAKE,IAAE,MAAM,CAAC,CAAC,OAAO,QAAQ,CAAC,SAAQ,QAAK,SAASA,IAAE,CAAC;GAC1G,MAAM,SAAU;IACd,GAAG,aAAa;IAChB,GAAG;IACH,GAAG,WAAW;IACf;AACD,eAAY,OAAO,IAAI,GAAG,GAAG,GAAG,OAAO;aAEhC,2BAA2B,KAAK,SAAS,EAAE;GAElD,MAAM,OAAO,aAAa,MAAM,GAAG,GAAG,CAAC,MAAM,IAAI,CAAC,KAAI,QAAKA,IAAE,MAAM,CAAC,CAAC,SAAQ,QAAK,SAASA,IAAE,CAAC;GAC9F,MAAM,SAAU;IACd,GAAG,aAAa;IAChB,GAAG;IACH,GAAG,WAAW;IACf;AACD,eAAY,OAAO,IAAI,GAAG,GAAG,GAAG,OAAO;aAEhC,4BAA4B,KAAK,SAAS,EAAE;AACnD,SAAM,UAAU,SAAS,MAAM,GAAG,GAAG;AACrC,SAAM,SAAU,OAAO,GAAG,EAAE;;;AAIhC,QAAO;;;;;AC7JT,SAAgB,kBAAkB,KAAa,UAAkB,MAAc,OAAe;CAC5F,MAAM,MAAM,IAAI;AAGhB,KAAI,SAAS,QAAQ,UAAU,MAAM;EACnC,IAAIC,MAAI;AACR,SAAOA,MAAI,MAAM,GAAG;AAClB,OAAI,IAAIA,SAAO,OAAO,IAAIA,MAAI,OAAO,KAAK;IAExC,IAAI,IAAIA,MAAI;IACZ,IAAI,cAAc;AAClB,WAAO,KAAK,KAAK,IAAI,OAAO,MAAM;AAChC;AACA;;AAEF,QAAI,cAAc,MAAM,EACtB,QAAOA;;AAEX;;AAEF,SAAO;;CAGT,MAAM,WAAW,KAAK,KAAK,SAAS;CACpC,MAAM,WAAW;CACjB,IAAI,QAAQ;CACZ,IAAI,IAAI;AACR,QAAO,IAAI,KAAK;AAEd,MAAI,IAAI,MAAM,GAAG,IAAI,SAAS,OAAO,KAAK,UAAU;GAClD,IAAI,IAAI,IAAI;GACZ,IAAI,cAAc;AAClB,UAAO,KAAK,KAAK,IAAI,OAAO,MAAM;AAChC;AACA;;AAEF,OAAI,cAAc,MAAM,GAAG;AACzB,QAAI,UAAU,EACZ,QAAO;AACT;AACA,SAAK,SAAS;AACd;;;EAIJ,MAAM,KAAK,IAAI;AAEf,MAAI,OAAO,MAAM;AACf,QAAK;AACL;;AAGF,MAAI,OAAO,SACT;WAEO,OAAO,SAAS,SAAS,SAAS,IACzC;OAAI,QAAQ,EACV;;AAEJ;;AAEF,QAAO;;AAGT,gCAAe;;;;AChEf,MAAa,qBAAqB;CAChC;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD;AAED,MAAa,6BAA6B,mBAAmB,KAAI,MAAK,EAAE,QAAQ,wBAAwB,OAAO,CAAC,CAAC,KAAK,IAAI;AAE1H,MAAM,aAAa;AACnB,MAAM,eAAe;AACrB,MAAM,yBAAyB,IAAI,OAAO,GAAG,aAAa,KAAK,2BAA2B,mBAAmB,IAAI;AAKjH,MAAM,yBAAyB,IAAI,OAAO,MAAM,aAAa,OAAO,2BAA2B,SAAS,IAAI;AAC5G,MAAM,kBAAkB;AAOxB,MAAM,yBAAS,IAAI,OAAO,iFAAiF;AAC3G,MAAM,eAAe;AACrB,MAAM,WAAW;AAGjB,MAAM,eAAe;AACrB,SAAgB,WAAW,GAAW;AACpC,KAAI,CAAC,EACH,QAAO;CAQT,MAAM,OAAO,EAAE,QAAQ,WAAW,MAAM;CACxC,MAAM,WAAW,KAAK,MAAM;AAI5B,KAAI,aAAa,KAAK,SAAS,CAC7B,QAAO;AACT,KAAI,SAAS,SAAS,KAAK,CACzB,QAAO;AACT,KAAI,SAAS,SAAS,IACpB,QAAO;AAET,KAAI,sBAAsB,KAAK,EAAE,CAC/B,QAAO;CAIT,MAAM,SAAS,WAAW,KAAK,KAAK;CACpC,MAAM,mBAAmB,uBAAuB,KAAK,KAAK;CAC1D,MAAM,gBAAgB,uBAAuB,KAAK,KAAK;CAGvD,MAAM,cAAc,gBAAgB,KAAK,KAAK;CAG9C,MAAM,WAFmB,4DAES,KAAK,KAAK,IADjB,6DACwC,KAAK,KAAK;CAE7E,MAAM,MAAM,OAAO,KAAK,KAAK;CAE7B,MAAM,WAAW,aAAa,KAAK,KAAK;CAExC,MAAM,QAAQ,SAAS,KAAK,KAAK;AAKjC,QAAO,UAAU,oBAAoB,iBAAiB,eAAe,YAAY,OAAO,YAAY;;;;;ACzEtG,MAAa,iBAAiB;CAC5B;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD;AAOD,MAAa,yBAAyB,eACnC,OAAO,CACP,MAAM,GAAG,MAAM,EAAE,SAAS,EAAE,OAAO,CACnC,KAAI,MAAK,EAAE,QAAQ,yBAAyB,OAAO,CAAC,CACpD,KAAK,IAAI;AACZ,MAAM,sBAAsB;AAG5B,MAAM,gBAAgB;AACtB,MAAM,uBAAuB;AAC7B,MAAM,8BAA8B;AACpC,MAAM,6BAA6B;AAGnC,MAAM,kBAAkB,IAAI,OAAO,GAAG,oBAAoB,iBAAiB,uBAAuB,OAAO,IAAI;AAC7G,MAAM,gCAAgB,IAAI,KAAqB;AAC/C,MAAM,qCAAqB,IAAI,KAAqB;AAEpD,SAAS,aAAa,UAA6C;AACjE,KAAI,CAAC,SACH,QAAO;CACT,MAAM,MAAM,CAAC,GAAG,SAAS;AACzB,KAAI,MAAM,GAAG,MAAM,EAAE,SAAS,EAAE,OAAO;CACvC,MAAM,MAAM,IAAI,KAAK,IAAS;CAC9B,MAAM,SAAS,cAAc,IAAI,IAAI;AACrC,KAAI,OACF,QAAO;CACT,MAAM,iBAAiB,MAAM,IAAI,KAAI,MAAK,EAAE,QAAQ,0BAA0B,OAAO,CAAC,CAAC,KAAK,IAAI,CAAC;CACjG,MAAM,KAAK,IAAI,OAAO,GAAG,oBAAoB,iBAAiB,eAAe,OAAO,IAAI;AACxF,eAAc,IAAI,KAAK,GAAG;AAC1B,QAAO;;AAGT,SAAS,iBAAiB,YAAqB,UAA6C;CAC1F,MAAM,MAAM,aAAa,EAAE,GAAG,CAAC,GAAI,YAAY,EAAE,CAAE;AACnD,KAAI,CAAC,WACH,KAAI,MAAM,GAAG,MAAM,EAAE,SAAS,EAAE,OAAO;CACzC,MAAM,MAAM,aAAa,gBAAgB,IAAI,KAAK,IAAS;CAC3D,MAAM,SAAS,mBAAmB,IAAI,IAAI;AAC1C,KAAI,OACF,QAAO;CACT,MAAM,eAAe,aACjB,CAAC,4BAA4B,uBAAuB,CAAC,OAAO,QAAQ,CAAC,KAAK,IAAI,GAC9E,CACE,IAAI,KAAI,MAAK,EAAE,QAAQ,yBAAyB,OAAO,CAAC,CAAC,KAAK,IAAI,EAClE,2BACD,CAAC,OAAO,QAAQ,CAAC,KAAK,IAAI;CAC/B,MAAM,KAAK,IAAI,OAAO,kBAAkB,aAAa,WAAW,IAAI;AACpE,oBAAmB,IAAI,KAAK,GAAG;AAC/B,QAAO;;AAKT,MAAMC,cAAsC;CAC1C,KAAM;CACN,MAAM;CACN,MAAM;CACN,MAAM;CACN,MAAM;CACP;AAED,SAAS,qBAAqB,GAAW;CACvC,MAAM,KAAK;CAEX,IAAI,IAAI;AAER,QAAY,GAAG,KAAK,EAAE,KAAM,KAC1B;AAEF,QAAO;;AAGT,SAAgB,8BAA8B,GAAW,MAAoB;CAC3E,MAAM,WAAW,MAAM,YAAY;CACnC,MAAM,oBAAoB,MAAM,qBAAqB;CAErD,MAAM,aAAa,MAAM,YAAY;CAGrC,MAAM,KAAK,aAAa,aAAa,SAAY,SAAS;CAE1D,IAAI,MAAM,EAAE,QAAQ,KAAK,GAAW,QAAiB;AACnD,MAAI,YAAY,OAAO,OACrB,QAAO,KAAK,YAAY;AAC1B,MAAI,OAAO,SAAS,SAAS,IAAI,CAC/B,QAAO,KAAK;AACd,SAAO;GACP;AAGF,KAAI,kBACF,OAAM,IAAI,QAAQ,eAAe,QAAQ;CAS3C,IAAI,SAAS;CACb,MAAM,aAAa,iBAAiB,YAAY,aAAa,SAAY,SAAS;AAClF,UAAS,OAAO,QAAQ,aAAa,IAAY,IAAY,OAAe,GAAG,GAAG,IAAI,GAAG,GAAG;AAC5F,UAAS,OAAO,QAAQ,eAAe,eAAe,CACnD,QAAQ,sBAAsB,+BAA+B;AAMhE,UAAS,OAAO,QAAQ,6BAA6B,WAAW;AAGhE,UAAS,OAAO,QAAQ,4BAA4B,SAAS;AAC7D,QAAO;;AAET,SAAgB,UAAU,IAAgB,UAAwB;CAEhE,MAAM,cAAc,OAAgB,WAAoB;EACtD,MAAM,IAAI;AAEV,MAAI,WAAW,KAAK,EAAE,IAAI,CACxB,QAAO;EAET,MAAMC,aAAiC;GACrC,CAAC,MAAM,KAAK;GACZ,CAAC,OAAO,MAAM;GACd,CAAC,KAAM,IAAK;GACb;EAED,IAAI,YAAY;EAChB,IAAI,aAAa;AAEjB,OAAK,MAAM,CAAC,MAAM,UAAU,YAAY;GAEtC,MAAM,MAAM,EAAE;GACd,IAAI,WAAW;GACf,MAAM,YAAY,SAAiB;AAEjC,QAAI,SAAS,eAAe,QAAQ,KAClC,QAAO;AAET,QAAI,SAAS,MAAM;AACjB,OAAE,MAAM,EAAE,MAAM,KAAK;AACrB,iBAAY,EAAE;AACd;;AAEF,QAAI,SAAS,SAAS,SAAS,OAAO;KACpC,MAAMC,MAAI,EAAE,KAAK,gBAAgB,IAAI,EAAE;AACvC,SAAE,UAAU,SAAS,QAAQ,MAAM;AACnC,SAAE,SAAS;AACX,OAAE,MAAM,EAAE,MAAM,KAAK;AACrB,iBAAY,EAAE;AACd;;AAGF,QAAI,CAAC,KACH;IAEF,MAAM,IAAI,EAAE,KAAK,QAAQ,IAAI,EAAE;AAC/B,MAAE,UAAU;AACZ,MAAE,MAAM,EAAE,MAAM,KAAK;AACrB,gBAAY,EAAE;;AAGhB,UAAO,MAAM;AACX,QAAI,aAAa,IAAI,OACnB;IACF,MAAM,QAAQ,IAAI,QAAQ,MAAM,UAAU;AAC1C,QAAI,UAAU,GACZ;AAMF,QAAI,QAAQ,GAAG;KACb,IAAI,IAAI,QAAQ;AAEhB,YAAO,KAAK,KAAK,IAAI,OAAO,IAC1B;AACF,SAAI,KAAK,KAAK,IAAI,OAAO,IACvB,QAAO;;IAMX,MAAM,SAASC,0BAAkB,KAAK,QAAQ,KAAK,QAAQ,MAAM,MAAM;AAEvE,QAAI,WAAW,IAAI;KAEjB,MAAMC,YAAU,IAAI,MAAM,QAAQ,KAAK,OAAO;AAC9C,SAAIA,UAAQ,SAAS,KAAK,EAAE;AAC1B,kBAAY,IAAI,QAAQ,MAAM,QAAQ,KAAK,OAAO;AAClD;;AAEF,SAAI,WAAW,IAAI;AACjB,UAAI,WAAWA,UAAQ,EAAE;AACvB,mBAAY,QAAQ,KAAK;AACzB,kBAAW;AACX,WAAI,CAAC,QAAQ;AACX,UAAE,UAAU;QAEZ,MAAM,iBAAiB,qBADF,aAAa,IAAI,MAAM,YAAY,UAAU,GAAG,IAAI,MAAM,GAAG,UAAU,CACnC,GAAG,MAAM;AAElE,YAAI,WACF,UAAS,IAAI,MAAM,YAAY,UAAU,CAAC;aAEvC;SACH,IAAI,OAAO,IAAI,MAAM,GAAG,UAAU;AAClC,aAAI,KAAK,SAAS,KAAK,CACrB,QAAO,KAAK,MAAM,GAAG,KAAK,SAAS,KAAK,OAAO;AACjD,kBAAS,KAAK;;AAEhB,YAAI,gBAAgB;SAClB,MAAM,cAAc,EAAE,KAAK,eAAe,IAAI,EAAE;AAChD,qBAAY,SAAS,IAAI,MAAM,GAAG,QAAQ,EAAE;SAC5C,MAAM,QAAQ,EAAE,KAAK,eAAe,QAAQ,EAAE;AAC9C,eAAM,UAAU,8BAA8BA,WAAS,SAAS;AAChE,eAAM,SAAS,SAAS,OAAO,OAAO,SAAS,QAAQ,WAAW,SAAS,MAAM,MAAM;AACvF,eAAM,MAAM,GAAG,OAAOA,YAAU;AAChC,eAAM,UAAU;AAChB,qBAAY,UAAUA;AACtB,WAAE,KAAK,gBAAgB,IAAI,EAAE;eAE1B;SACH,MAAM,QAAQ,EAAE,KAAK,eAAe,QAAQ,EAAE;AAC9C,eAAM,UAAU,8BAA8BA,WAAS,SAAS;AAChE,eAAM,SAAS,SAAS,OAAO,OAAO,SAAS,QAAQ,WAAW,SAAS,MAAM,MAAM;AACvF,eAAM,MAAM,GAAG,OAAOA,YAAU;AAChC,eAAM,UAAU;;AAGlB,UAAE,MAAM,IAAI;;AAEd,mBAAY,IAAI;AAChB,oBAAa;;AAEf;;;IAGJ,MAAM,UAAU,IAAI,MAAM,QAAQ,KAAK,QAAQ,OAAO;AACtD,QAAI,CAAC,WAAW,QAAQ,EAAE;AAGxB,iBAAY,SAAS,MAAM;KAC3B,MAAM,OAAO,IAAI,MAAM,EAAE,KAAK,UAAU;AACxC,SAAI,CAAC,EAAE,QACL,UAAS,KAAK;AAChB;;AAEF,eAAW;AAEX,QAAI,CAAC,QAAQ;KAEX,MAAM,SAAS,IAAI,MAAM,GAAG,MAAM;KAIlC,IAAI,eADiB,IAAI,MAAM,GAAG,UAAU,GACV,IAAI,MAAM,YAAY,MAAM,GAAG;KACjE,MAAM,iBAAiB,qBAAqB,aAAa,GAAG,MAAM;AAClE,SAAI,UAAU,EAAE,OAAO,eACrB,gBAAe,EAAE,UAAU,IAAI,MAAM,EAAE,KAAK,MAAM;AAIpD,SAAI,EAAE,YAAY,cAAc;AAC9B,QAAE,UAAU;AACZ,UAAI,gBAAgB;OAClB,MAAM,SAAS,aAAa,MAAM,QAAQ;OAC1C,MAAM,QAAQ,aAAa,MAAM,OAAQ,QAAS,OAAQ,GAAG,OAAO;AACpE,gBAAS,aAAa,MAAM,GAAG,OAAQ,MAAO,CAAC;OAC/C,MAAM,cAAc,EAAE,KAAK,eAAe,IAAI,EAAE;AAChD,mBAAY,SAAS,OAAQ;OAC7B,MAAM,YAAY,EAAE,KAAK,QAAQ,IAAI,EAAE;AACvC,iBAAU,UAAU;AACpB,SAAE,KAAK,gBAAgB,IAAI,EAAE;YAG7B,UAAS,aAAa;;AAG1B,SAAI,gBAAgB;MAClB,MAAM,cAAc,EAAE,KAAK,eAAe,IAAI,EAAE;AAChD,kBAAY,SAAS;MACrB,MAAM,QAAQ,EAAE,KAAK,eAAe,QAAQ,EAAE;AAC9C,YAAM,UAAU,8BAA8B,SAAS,SAAS;AAChE,YAAM,SAAS,SAAS,OAAO,OAAO,SAAS,QAAQ,WAAW,SAAS,MAAM,MAAM;AACvF,YAAM,MAAM,GAAG,OAAO,UAAU;AAChC,YAAM,UAAU;MAChB,MAAM,MAAM,IAAI,MAAM,SAAS,MAAM,OAAO;MAC5C,MAAM,gBAAgB,IAAI,WAAW,IAAI;AACzC,UAAI,cACF,GAAE,KAAK,gBAAgB,IAAI,EAAE;AAE/B,UAAI,KAAK;OACP,MAAM,mBAAmB,EAAE,KAAK,QAAQ,IAAI,EAAE;AAC9C,wBAAiB,WAAW,OAAO,OAAO,KAAK,OAAO,IAAI,EAAE,QAAQ,QAAQ,GAAG;;AAEjF,UAAI,CAAC,cACH,GAAE,KAAK,gBAAgB,IAAI,EAAE;AAC/B,QAAE,MAAM,IAAI;AACZ,kBAAY,IAAI;AAChB,mBAAa;AACb;YAEG;MACH,MAAM,QAAQ,EAAE,KAAK,eAAe,QAAQ,EAAE;AAC9C,YAAM,UAAU,8BAA8B,SAAS,SAAS;AAChE,YAAM,SAAS,SAAS,OAAO,OAAO,SAAS,QAAQ,WAAW,SAAS,MAAM,MAAM;AACvF,YAAM,MAAM,GAAG,OAAO,UAAU;AAChC,YAAM,UAAU;;;AAIpB,gBAAY,SAAS,MAAM;AAC3B,iBAAa;AACb,MAAE,MAAM;;AAGV,OAAI,UAAU;AACZ,QAAI,CAAC,QAAQ;AAEX,SAAI,YAAY,IAAI,OAClB,UAAS,IAAI,MAAM,UAAU,CAAC;AAEhC,OAAE,MAAM,IAAI;UAIZ,GAAE,MAAM;AAGV,WAAO;;;AAIX,SAAO;;CAIT,MAAM,aACJ,OACA,WACA,SACA,WACG;EACH,MAAM,IAAI;EACV,MAAMH,aAAiC;GACrC,CAAC,OAAO,MAAM;GACd,CAAC,KAAM,IAAK;GACZ,CAAC,MAAM,KAAK;GACb;EACD,MAAM,WAAW,EAAE,OAAO,aAAa,EAAE,OAAO;EAChD,MAAM,WAAW,EAAE,IAAI,MAAM,UAAU,EAAE,OAAO,WAAW,CAAC,MAAM;EAClE,IAAI,UAAU;EACd,IAAI,YAAY;EAChB,IAAI,aAAa;AACjB,OAAK,MAAM,CAAC,MAAM,UAAU,WAC1B,KAAI,aAAa,QAAQ,SAAS,WAAW,KAAK,CAChD,KAAI,KAAK,SAAS,IAAI,EACpB;OAAI,SAAS,QAAQ,MAAM,GAAG,KAAK,KAAK;AACtC,QAAI,YAAY,IAAI,SAAS;AAC3B,eAAU;AACV,iBAAY;AACZ,kBAAa;AACb;;AAEF;;SAGC;AACH,aAAU;AACV,eAAY;AACZ,gBAAa;AACb;;AAKN,MAAI,CAAC,QACH,QAAO;AACT,MAAI,OACF,QAAO;AAET,MACE,SAAS,SAAS,WAAW,IAC1B,SAAS,QAAQ,WAAW,GAAG,UAAU,QAC5C;GACA,MAAM,kBAAkB,SAAS,QAAQ,UAAU;GACnD,MAAM,gBAAgB,SAAS,QAC7B,YACA,kBAAkB,UAAU,OAC7B;GACD,MAAMG,YAAU,SAAS,MACvB,kBAAkB,UAAU,QAC5B,cACD;GACD,MAAMC,UAAa,EAAE,KAAK,cAAc,QAAQ,EAAE;AAClD,WAAM,UAAU,8BAA8BD,UAAQ;AACtD,WAAM,SACF,cAAc,OAAO,OAAO,cAAc,MAAM,OAAO;AAC3D,WAAM,MAAM,CAAC,WAAW,YAAY,EAAE;AACtC,WAAM,MAAM,GAAG,YAAYA,YAAU;AACrC,WAAM,QAAQ;AACd,WAAM,UAAU;AAChB,KAAE,OAAO,YAAY;AACrB,UAAO;;EAGT,IAAI,WAAW;EACf,IAAI,UAAU;EACd,IAAI,QAAQ;EAEZ,MAAM,mBACF,aAAa,YAAY,KAAK,SAAS,MAAM,UAAU,OAAO;AAElE,MAAI,iBAAiB,SAAS,WAAW,EAAE;GACzC,MAAM,WAAW,iBAAiB,QAAQ,WAAW;AACrD,aAAU,iBAAiB,MAAM,GAAG,SAAS;AAC7C,WAAQ;AACR,cAAW;SAER;AACH,OAAI,iBACF,WAAU;AAEZ,QAAK,WAAW,YAAY,GAAG,WAAW,SAAS,YAAY;IAC7D,MAAM,YAAY,EAAE,OAAO,YAAY,EAAE,OAAO;IAChD,MAAM,UAAU,EAAE,OAAO;IACzB,MAAM,cAAc,EAAE,IAAI,MAAM,YAAY,GAAG,QAAQ;AACvD,QAAI,YAAY,MAAM,KAAK,YAAY;AACrC,aAAQ;AACR;eAEO,YAAY,SAAS,WAAW,EAAE;AACzC,aAAQ;KACR,MAAM,WAAW,YAAY,QAAQ,WAAW;AAChD,iBAAY,UAAU,OAAO,MAAM,YAAY,MAAM,GAAG,SAAS;AACjE;;AAEF,gBAAY,UAAU,OAAO,MAAM;;;EAIvC,MAAMC,QAAa,EAAE,KAAK,cAAc,QAAQ,EAAE;AAClD,QAAM,UAAU,8BAA8B,QAAQ;AACtD,QAAM,SACF,cAAc,OAAO,OAAO,cAAc,MAAM,OAAO;AAC3D,QAAM,MAAM,GAAG,YAAY,UAAU,QAAQ,WAAW,KAAK,GAAG,OAAO,KAAK;AAC5E,QAAM,MAAM,CAAC,WAAW,WAAW,EAAE;AACrC,QAAM,QAAQ;AACd,QAAM,UAAU,CAAC;AACjB,IAAE,OAAO,WAAW;AACpB,SAAO;;AAOT,IAAG,OAAO,MAAM,OAAO,UAAU,QAAQ,WAAW;AACpD,IAAG,MAAM,MAAM,OAAO,aAAa,cAAc,WAAW,EAC1D,KAAK;EAAC;EAAa;EAAa;EAAc;EAAO,EACtD,CAAC;;;;;AC/hBJ,SAAgB,iBAAiB,IAAgB;CAI/C,MAAM,eACF,GAAG,SAAS,MAAM,SACf,SAAU,QAAiB,KAAa,SAAkB,KAAc,MAAe;EACxF,MAAM,YAAY;EAClB,MAAM,YAAY;AAClB,SAAO,UAAU,cAAc,UAAU,YAAY,WAAW,KAAK,QAAQ,GAAG;;AAGtF,IAAG,SAAS,MAAM,SAChB,QACA,KACA,SACA,KACA,SACG;EACH,MAAM,YAAY;AAIlB,EAHc,UAAU,KAGb,UAAU,WAAW,OAAO;AAEvC,SADuB,aACD,WAAW,KAAK,SAAS,KAAK,KAAK;;AAG3D,IAAG,SAAS,MAAM,QACd,GAAG,SAAS,MAAM,WACb,QAAiB,QAAgB;EAGpC,MAAM,aAFY,OACM;EAExB,MAAM,OAAO,OAAO,WAAW,QAAQ,GAAG,CAAC,MAAM;AAKjD,SAAO,eAJW,OACd,YAAY,GAAG,MAAM,WAAY,KAAgB,MAAM,OAAO,CAAC,GAAG,KAClE,GAE4B,UADnB,GAAG,MAAM,WAAW,OAAO,WAAW,WAAW,GAAG,CAAC,CACnB;;;;;;ACrBvD,SAAgB,QAAQ,OAAuB,EAAE,EAAE;CACjD,MAAM,KAAK,IAAI,WAAW;EACxB,MAAM;EACN,SAAS;EACT,aAAa;EACb,QAAQ;EACR,GAAI,KAAK,qBAAqB,EAAE;EACjC,CAAC;AAEF,KAAI,KAAK,cAAc,KAErB,WAAU,IAD6B;EAAE,GAAI,uBAAuB,IAAI,EAAE;EAAG,GAAI,KAAK,eAAe,EAAE;EAAG,CAC1E;AAElC,KAAI,KAAK,oBAAoB,KAC3B,iBAAgB,GAAG;AAKrB,oBAAmB,GAAG;AAGtB,oBAAmB,GAAG;AAEtB,sBAAqB,GAAG;AAExB,kBAAiB,GAAG;AAEpB,qBAAoB,GAAG;AACvB,kBAAiB,GAAG;AACpB,0BAAyB,GAAG;AAE5B,QAAO;;;;;AClDT,SAAgB,mBAAmB,OAAoC;CACrE,MAAM,YAAa,MAAM,QAAQ,EAAE;AACnC,QAAO;EACL,MAAM;EACN,SAAS,UAAU,YAAY;EAC/B,KAAK,UAAU,UAAU,QAAQ;EAClC;;AAGH,SAAgB,wBAAwB,OAA+B;CACrE,MAAM,WAAW;CACjB,MAAM,UAAU,SAAS,UAAU,SAAS,QAAQ,UAAU,GAAG;CACjE,MAAM,UAAU,YAAY,MAAM,YAAY;AAC9C,QAAO;EACL,MAAM;EACN;EACA,KAAK,UAAU,QAAQ;EACxB;;;;;ACjBH,SAAgB,gBAAgB,OAAiC;CAC/D,MAAM,OAAO,OAAO,MAAM,WAAW,GAAG;AAExC,QAAO;EACL,MAAM;EACN;EACA,QAJa,OAAO,MAAM,UAAU,GAAG;EAKvC,KAAK,IAAI,KAAK;EACf;;;;;ACPH,SAAgB,mBACd,QACA,YAIA;CACA,MAAMC,WAAyB,EAAE;CACjC,IAAI,SAAS;CACb,IAAI,IAAI,aAAa;CACrB,MAAMC,cAA+B,EAAE;AAGvC,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,YAAY;AACzD,YAAU,OAAO,OAAO,GAAG,WAAW,GAAG;AACzC,cAAY,KAAK,OAAO,GAAG;AAC3B;;AAIF,UAAS,KAAK,GAAG,kBAAkB,YAAY,CAAC;AAWhD,QAAO;EAAE,MATkB;GACzB,MAAM;GACN;GACA,KAAK,IAAI,OAAO;GACjB;EAKc,WAFG,IAAI,OAAO,SAAS,IAAI,IAAI,OAAO;EAE3B;;;;;AC/B5B,MAAM,yBAAyB;AAE/B,MAAM,uBAAuB;CAAC;CAAS;CAAU;CAAQ;CAAQ;CAAM;AAEvE,MAAM,aAAa;AAEnB,SAAS,iBAAiB,SAAiB;CACzC,MAAMC,OAAiB,EAAE;CACzB,MAAMC,UAAoB,EAAE;AAC5B,MAAK,MAAM,WAAW,QAAQ,MAAM,WAAW,EAAE;EAC/C,MAAM,OAAO;AAEb,MAAI,qBAAqB,MAAK,MAAK,KAAK,WAAW,EAAE,CAAC,CACpD;AAEF,MAAI,KAAK,UAAU,KAAK,KAAK,OAAO,OAAO,KAAK,OAAO,IACrD,MAAK,KAAK,IAAI,KAAK,MAAM,EAAE,GAAG;WAEvB,KAAK,UAAU,KAAK,KAAK,OAAO,OAAO,KAAK,OAAO,IAC1D,SAAQ,KAAK,IAAI,KAAK,MAAM,EAAE,GAAG;OAE9B;AAEH,QAAK,KAAK,KAAK;AACf,WAAQ,KAAK,KAAK;;;AAGtB,QAAO;EACL,UAAU,KAAK,KAAK,KAAK;EACzB,SAAS,QAAQ,KAAK,KAAK;EAC5B;;AAGH,SAAgB,gBAAgB,OAAqC;CACnE,MAAM,SAAS,MAAM,QAAQ,MAAM,IAAI,IAAI,MAAM,IAAI,WAAW;CAChE,MAAM,YAAa,MAAM,QAAQ,EAAE;CACnC,MAAM,SAAS,OAAO,UAAU,WAAW,YAAY,UAAU,SAAS;CAC1E,MAAM,OAAO,OAAO,MAAM,QAAQ,GAAG;CACrC,MAAM,OAAO,KAAK,WAAW,OAAO;CACpC,MAAM,WAAW,cACN;EACL,MAAM,IAAI;EACV,MAAM,KAAK,EAAE,QAAQ,IAAI;AACzB,SAAO,OAAO,KACV,KACA,OAAO,EAAE,MAAM,KAAK,EAAE,IAAI,GAAG;KAC/B,GACJ;CAUJ,IAAI,UAAU,OAAO,MAAM,WAAW,GAAG;AACzC,KAAI,uBAAuB,KAAK,QAAQ,CACtC,WAAU,QAAQ,QAAQ,wBAAwB,GAAG;AAEvD,KAAI,MAAM;EACR,MAAM,EAAE,UAAU,YAAY,iBAAiB,QAAQ;AAEvD,SAAO;GACL,MAAM;GACN;GACA,MAAM,OAAO,WAAW,GAAG;GAC3B,KAAK,OAAO,WAAW,GAAG;GAC1B;GACA,SAAS,WAAW,OAAO,QAAQ,WAAW,QAAQ,OAAO,CAAC;GAC9D,cAAc;GACd,aAAa;GACd;;AAGH,QAAO;EACL,MAAM;EACN;EACA,MAAM,OAAO,WAAW,GAAG;EAC3B,KAAK,OAAO,WAAW,GAAG;EAC1B;EACA,SAAS,WAAW,OAAO,QAAQ,WAAW,QAAQ,OAAO,CAAC;EAC/D;;;;;ACpFH,SAAgB,sBACd,OACuB;CACvB,MAAM,YAAa,MAAM,QAAQ,EAAE;AACnC,QAAO;EACL,MAAM;EACN,IAAI,OAAO,UAAU,SAAS,GAAG;EACjC,KAAK,KAAK,OAAO,UAAU,SAAS,GAAG,CAAC;EACzC;;;;;ACRH,SAAgB,sBAAqC;AACnD,QAAO;EACL,MAAM;EACN,KAAK;EACN;;;;;ACHH,SAAgB,oBACd,QACA,YAIA;CACA,MAAMC,WAAyB,EAAE;CACjC,IAAI,WAAW;CACf,IAAI,IAAI,aAAa;CACrB,MAAMC,cAA+B,EAAE;AAGvC,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,cAAc;AAC3D,cAAY,OAAO,OAAO,GAAG,WAAW,GAAG;AAC3C,cAAY,KAAK,OAAO,GAAG;AAC3B;;AAIF,UAAS,KAAK,GAAG,kBAAkB,YAAY,CAAC;AAWhD,QAAO;EAAE,MATmB;GAC1B,MAAM;GACN;GACA,KAAK,KAAK,SAAS;GACpB;EAKc,WAFG,IAAI,OAAO,SAAS,IAAI,IAAI,OAAO;EAE3B;;;;;AC/B5B,SAAgB,yBAAyB,OAAsB,QAAyB,GAAiC;CACvH,IAAI,OAAO,OAAO,MAAM,WAAW,GAAG,CAAC,MAAM;CAC7C,MAAM,YAAY,OAAO,IAAI;CAC7B,MAAM,aAAa,OAAO,IAAI;CAG9B,MAAM,WAAW,KAAK,MAAM,gBAAgB;CAC5C,MAAM,MAAM,WAAW,SAAS,GAAG,aAAa,GAAG;CAGnD,SAAS,aAAa,MAAc;EAElC,MAAM,IAAI,KAAK,MAAM,8BAA8B;AACnD,SAAO,IAAI,EAAE,KAAK;;AAGpB,KAAI,QAAQ,KAAK;EACf,IAAI,UAAU;AACd,MAAI,CAAC,aAAc,WAAW,SAAS,WAAW,CAAC,cAAc,WAAW,SAAS,kBAAmB,CAAC,UACvG,WAAU;AAEZ,MAAI,WAAW,SAAS,WAAW,YAAY,SAAS,iBAAiB,CAAC,aAAa;GAErF,MAAM,YAAY,KAAK,MAAM,kEAAkE;GAC/F,MAAM,OAAO,YAAa,UAAU,MAAM,UAAU,MAAM,UAAU,KAAM;GAC1E,IAAI,QAAQ,IAAI;AAChB,OAAI,UAAU,SAAS,QAAQ;AAC7B,WAAO,UAAU,SAAS,QAAQ,WAAW,GAAG,IAAI;AAEpD,YAAQ,IAAI;;AAEd,OAAI,YAAY,SAAS,iBAAiB,UAAU,SAAS,OAC3D,SAAQ,IAAI;GAEd,MAAM,QAAQ,QAAQ,QAAQ;AAC9B,UAAO,CACL;IACE,MAAM;IACN,MAAM,OAAO,QAAQ,GAAG;IACxB,OAAO;IACP,MAAM;IACN,UAAU,CACR;KAAE,MAAM;KAAQ,SAAS;KAAO,KAAK;KAAO,CAC7C;IACD;IACA,KAAK;IACN,EACD,MACD;;;AAIL,KAAI,QAAQ,OAAO,QAAQ,OAAO;EAChC,MAAM,QAAQ,aAAa,KAAK,IAAI;AACpC,SAAO,CACL;GACE,MAAM;GACN,UAAU,CACR;IAAE,MAAM;IAAQ,SAAS;IAAO,KAAK;IAAO,CAC7C;GACD,KAAK;GACN,EACD,IAAI,EACL;;AAGH,QAAO,CACL;EACE,MAAM;EACN;EACA,KAAK;EACN,EACD,IAAI,EACL;;;;;AC1EH,SAAgB,gBAAgB,OAAsB,UAAU,OAAkB;CAIhF,IAAI,QAAQ,MAAM,SAAS,EAAE;CAK7B,IAAIC,iBAAsB;AAC1B,MAAK,CAAC,SAAS,MAAM,WAAW,MAAM,MAAM,QAAQ,MAAM,SAAS,CACjE,MAAK,MAAM,SAAS,MAAM,UAAU;EAElC,MAAM,aAAc,OAAe;AACnC,MAAI,MAAM,QAAQ,WAAW,IAAI,WAAW,SAAS,GAAG;AACtD,WAAQ;AACR,oBAAiB;AACjB;;;CAIN,MAAM,MAAM,OAAO,MAAM,MAAK,SAAQ,KAAK,OAAO,MAAM,GAAG,MAAM,GAAG;CACpE,MAAM,UAAU,MAAM,MAAK,SAAQ,KAAK,OAAO,MAAM,GAAG;CAKxD,IAAI,MAAM;AACV,KAAI,WAAW,QAAQ,OAAO,QAAQ,CAAC,SAAS,EAC9C,OAAM,OAAO,QAAQ;UAEd,gBAAgB,WAAW,QAAQ,OAAO,eAAe,QAAQ,CAAC,SAAS,EAClF,OAAM,OAAO,eAAe,QAAQ;UAE7B,MAAM,QAAQ,gBAAgB,SAAS,IAAI,eAAe,SAAS,IAAI,QAG9E,OAAM,OAAO,eAAe,SAAS,GAAG,QAAQ;UAEzC,MAAM,QAAQ,MAAM,SAAS,IAAI,MAAM,SAAS,IAAI,QAC3D,OAAM,OAAO,MAAM,SAAS,GAAG,QAAQ;UAEhC,MAAM,WAAW,QAAQ,OAAO,MAAM,QAAQ,CAAC,SAAS,EAC/D,OAAM,OAAO,MAAM,QAAQ;CAG7B,MAAM,SAAS,MAAM,MAAK,SAAQ,KAAK,OAAO,QAAQ,GAAG,MAAM;CAC/D,MAAM,QAAQ,WAAW,OAAO,OAAO,OAAO,OAAO;CACrD,MAAM,MAAM,OAAO,MAAM,WAAW,GAAG;AAEvC,QAAO;EACL,MAAM;EACN;EACA;EACA;EACA;EACA;EACD;;;;;ACzDH,SAAgB,qBAAqB,OAAsC;CACzE,MAAM,OAAO,OAAO,MAAM,WAAW,GAAG;AACxC,QAAO;EACL,MAAM;EACN;EACA,KAAK;EACN;;;;;ACLH,SAAgB,iBACd,QACA,YAIA;CACA,MAAMC,WAAyB,EAAE;CACjC,IAAI,UAAU;CACd,IAAI,IAAI,aAAa;CACrB,MAAMC,cAA+B,EAAE;AAGvC,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,aAAa;AAC1D,aAAW,OAAO,OAAO,GAAG,WAAW,GAAG;AAC1C,cAAY,KAAK,OAAO,GAAG;AAC3B;;AAIF,UAAS,KAAK,GAAG,kBAAkB,YAAY,CAAC;AAWhD,QAAO;EAAE,MATgB;GACvB,MAAM;GACN;GACA,KAAK,KAAK,OAAO,QAAQ,CAAC;GAC3B;EAKc,WAFG,IAAI,OAAO,SAAS,IAAI,IAAI,OAAO;EAE3B;;;;;AC/B5B,SAAgB,eACd,QACA,YAIA;CAEA,MAAM,QADY,OAAO,YACD,SAAS,EAAE;CACnC,MAAM,OAAO,OAAO,MAAM,MAAK,SAAQ,KAAK,OAAO,OAAO,GAAG,MAAM,GAAG;CACtE,MAAM,SAAS,MAAM,MAAK,SAAQ,KAAK,OAAO,QAAQ,GAAG,MAAM;CAC/D,MAAM,QAAQ,WAAW,OAAO,OAAO,OAAO,OAAO;CAErD,IAAI,IAAI,aAAa;CACrB,MAAMC,aAA8B,EAAE;CACtC,IAAI,UAAU;AAGd,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,cAAc;AAC3D,aAAW,KAAK,OAAO,GAAG;AAC1B;;AAGF,KAAI,OAAO,IAAI,SAAS,aACtB,WAAU;CAIZ,MAAM,WAAW,kBAAkB,WAAW;CAC9C,MAAM,WAAW,SACd,KAAK,SAAS;EACb,MAAM,UAAU;AAChB,MAAI,aAAa,KACf,QAAO,OAAO,QAAQ,WAAW,GAAG;AACtC,SAAO,OAAO,QAAQ,OAAO,GAAG;GAChC,CACD,KAAK,GAAG;AAeX,QAAO;EAAE,MAbc;GACrB,MAAM;GACN;GACA;GACA,MAAM;GACN;GACA,KAAK,OAAO,IAAI,SAAS,IAAI,OAAO,QAAQ,KAAK,MAAM,KAAK,GAAG,GAAG;GAClE;GACD;EAKc,WAFG,IAAI,OAAO,SAAS,IAAI,IAAI,OAAO;EAE3B;;;;;ACnD5B,SAAgB,qBAAqB,OAAsC;AACzE,QAAO;EACL,MAAM;EACN,SAAS,OAAO,MAAM,WAAW,GAAG;EACpC,SAAS,CAAC,CAAC,MAAM;EACjB,KAAK,MAAM;EACZ;;;;;ACNH,SAAgB,oBAAoB,OAAqC;AAGvE,QAAO;EACL,MAAM;EACN,IAJS,OAAO,MAAM,WAAW,GAAG;EAKpC,KAJU,OAAO,MAAM,UAAU,IAAI,MAAM,WAAW,GAAG,GAAG;EAK7D;;;;;ACHH,SAAgB,wBACd,QACA,YAIA;CACA,MAAMC,WAAyB,EAAE;CACjC,IAAI,QAAQ;CACZ,IAAI,IAAI,aAAa;CACrB,MAAMC,cAA+B,EAAE;AAGvC,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,WAAW;AACxD,WAAS,OAAO,OAAO,GAAG,WAAW,GAAG;AACxC,cAAY,KAAK,OAAO,GAAG;AAC3B;;AAIF,UAAS,KAAK,GAAG,kBAAkB,YAAY,CAAC;AAWhD,QAAO;EAAE,MATuB;GAC9B,MAAM;GACN;GACA,KAAK,KAAK,MAAM;GACjB;EAKc,WAFG,IAAI,OAAO,SAAS,IAAI,IAAI,OAAO;EAE3B;;;;;ACnC5B,SAAgB,iBACd,QACA,YACA,KAIA;CACA,MAAMC,WAAyB,EAAE;CACjC,IAAI,aAAa;CACjB,IAAI,IAAI,aAAa;CACrB,MAAMC,cAA+B,EAAE;AAGvC,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,gBAAgB;AAC7D,gBAAc,OAAO,OAAO,GAAG,WAAW,GAAG;AAC7C,cAAY,KAAK,OAAO,GAAG;AAC3B;;AAIF,UAAS,KAAK,GAAG,kBAAkB,aAAa,IAAI,CAAC;AAWrD,QAAO;EAAE,MATgB;GACvB,MAAM;GACN;GACA,KAAK,KAAK,OAAO,WAAW,CAAC;GAC9B;EAKc,WAFG,IAAI,OAAO,SAAS,IAAI,IAAI,OAAO;EAE3B;;;;;AChC5B,SAAgB,oBACd,QACA,YAIA;CACA,MAAMC,WAAyB,EAAE;CACjC,IAAI,UAAU;CACd,IAAI,IAAI,aAAa;CACrB,MAAMC,cAA+B,EAAE;AAGvC,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,aAAa;AAC1D,aAAW,OAAO,OAAO,GAAG,WAAW,GAAG;AAC1C,cAAY,KAAK,OAAO,GAAG;AAC3B;;AAIF,UAAS,KAAK,GAAG,kBAAkB,YAAY,CAAC;CAEhD,MAAM,eAAe,OAAO,OAAO,YAAY,WAAW,GAAG;CAC7D,MAAM,UAAU,WAAW;AAmB3B,QAAO;EAAE,MAlBmB;GAC1B,MAAM;GACN,UAAU,SAAS,SAAS,IACxB,WACA,CACE;IACE,MAAM;IAEN,SAAS;IACT,KAAK;IACN,CACF;GACL,KAAK,IAAI,QAAQ;GAClB;EAKc,WAFG,IAAI,OAAO,SAAS,IAAI,IAAI,OAAO;EAE3B;;;;;AC1C5B,SAAgB,sBACd,QACA,YAIA;CACA,MAAMC,WAAyB,EAAE;CACjC,IAAI,UAAU;CACd,IAAI,IAAI,aAAa;CACrB,MAAMC,cAA+B,EAAE;AAGvC,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,aAAa;AAC1D,aAAW,OAAO,OAAO,GAAG,WAAW,GAAG;AAC1C,cAAY,KAAK,OAAO,GAAG;AAC3B;;AAIF,UAAS,KAAK,GAAG,kBAAkB,YAAY,CAAC;AAqBhD,QAAO;EAAE,MAnBqB;GAC5B,MAAM;GACN,UACE,SAAS,SAAS,IACd,WACA,CACE;IACE,MAAM;IAEN,SAAS,WAAW,OAAO,OAAO,YAAY,WAAW,GAAG;IAC5D,KAAK,WAAW,OAAO,OAAO,YAAY,WAAW,GAAG;IACzD,CACF;GACP,KAAK,IAAI,WAAW,OAAO,OAAO,YAAY,WAAW,GAAG,CAAC;GAC9D;EAKc,WAFG,IAAI,OAAO,SAAS,IAAI,IAAI,OAAO;EAE3B;;;;;AC1C5B,SAAgB,eAAe,OAAgC;CAC7D,MAAM,UAAU,OAAO,MAAM,WAAW,GAAG;AAC3C,QAAO;EACL,MAAM;EACN;EACA,KAAK;EACN;;;;;ACcH,MAAM,iBAAiB;AAGvB,SAAS,aAAa,KAAa;AACjC,QAAO,IAAI,QAAQ,uBAAuB,OAAO;;AAKnD,MAAM,uBAAuB;AAC7B,MAAM,sBAAsB;AAE5B,SAAgB,YAAY,MAAe;AACzC,KAAI,CAAC,KACH,QAAO;AACT,QAAO,qBAAqB,KAAK,KAAK,IAAI,oBAAoB,KAAK,KAAK;;AAI1E,SAAgB,kBAAkB,QAAyB,KAAc,WAAyC;AAChH,KAAI,CAAC,UAAU,OAAO,WAAW,EAC/B,QAAO,EAAE;CAEX,MAAMC,SAAuB,EAAE;CAC/B,IAAIC,kBAAmC;CAEvC,IAAI,IAAI;CAOR,SAAS,uBAAuB;AAC9B,oBAAkB;;CAGpB,SAAS,+BAA+B,SAAiB,OAA+B;AAEtF,MAAI,kBAAkB,KAAK,QAAQ,EAAE;GACnC,IAAI,MAAM,QAAQ,QAAQ,KAAK;AAC/B,OAAI,QAAQ,GACV,OAAM;GACR,MAAM,QAAQ,QAAQ,MAAM,GAAG,IAAI;AACnC,OAAI,MACF,KAAI,iBAAiB;AACnB,oBAAgB,WAAW;AAC3B,oBAAgB,OAAO;UAEpB;AACH,sBAAkB;KAChB,MAAM;KACN,SAAS,OAAO,SAAS,GAAG;KAC5B,KAAK,OAAO,MAAM,WAAW,GAAG;KACjC;AACD,WAAO,KAAK,gBAAgB;;GAIhC,MAAM,EAAE,SAAS,wBAAwB;IACvC;KAAE,MAAM;KAAU,KAAK;KAAK,SAAS;KAAI,QAAQ;KAAK,MAAM;KAAI,MAAM;KAAM;IAC5E;KAAE,MAAM;KAAQ,KAAK;KAAI,SAHE,QAAQ,MAAM,IAAI,CAGU,QAAQ,MAAM,GAAG;KAAE,QAAQ;KAAI,MAAM;KAAI,MAAM;KAAM;IAC5G;KAAE,MAAM;KAAW,KAAK;KAAK,SAAS;KAAI,QAAQ;KAAK,MAAM;KAAI,MAAM;KAAM;IAC9E,EAAE,EAAE;AACL,yBAAsB;AACtB,YAAS,KAAK;AACd;AACA,UAAO;;AAIT,MAAI,OAAO,KAAK,QAAQ,EAAE;GACxB,MAAM,UAAU,QAAQ,QAAQ,KAAK;GACrC,MAAM,aAAa,UAAU,KAAK,QAAQ,MAAM,GAAG,QAAQ,GAAG;AAC9D,OAAI,WACF,UAAS,YAAY,WAAW;AAGlC,OAAI,YAAY,IAAI;AAClB;AACA,WAAO;;GAIT,MAAM,OAAO,eAAe,KAAK,QAAQ;GACzC,IAAI,QAAQ;GACZ,IAAI,QAAQ;AACZ,OAAI,QAAQ,OAAO,KAAK,UAAU,UAAU;AAC1C,YAAQ,KAAK;AACb,YAAQ,QAAQ,MAAM,KAAK,QAAQ,KAAK,GAAG,OAAO;UAE/C;AAEH,YAAQ,QAAQ,MAAM,UAAU,EAAE;AAClC,YAAQ;;GAGV,MAAM,EAAE,SAAS,iBAAiB;IAChC;KAAE,MAAM;KAAe,KAAK;KAAU,SAAS;KAAI,QAAQ;KAAK,MAAM;KAAI,MAAM;KAAM;IACtF;KAAE,MAAM;KAAQ,KAAK;KAAI,SAAS;KAAO,QAAQ;KAAI,MAAM;KAAI,MAAM;KAAM;IAC3E;KAAE,MAAM;KAAgB,KAAK;KAAU,SAAS;KAAI,QAAQ;KAAK,MAAM;KAAI,MAAM;KAAM;IACxF,EAAE,GAAG,IAAI;AAEV,yBAAsB;AACtB,YAAS,KAAK;AAEd,OAAI,OAAO;AACT,gBAAY;KACV,MAAM;KACN,SAAS;KACT,KAAK;KACN,CAAC;AACF;;AAGF;AACA,UAAO;;AAIT,MAAI,eAAe,KAAK,QAAQ,EAAE;GAChC,IAAI,MAAM,QAAQ,QAAQ,IAAI;AAC9B,OAAI,QAAQ,GACV,OAAM;GACR,MAAM,QAAQ,QAAQ,MAAM,GAAG,IAAI;AACnC,OAAI,MACF,KAAI,iBAAiB;AACnB,oBAAgB,WAAW;AAC3B,oBAAgB,OAAO;UAEpB;AACH,sBAAkB;KAAE,MAAM;KAAQ,SAAS,OAAO,SAAS,GAAG;KAAE,KAAK,OAAO,MAAM,WAAW,GAAG;KAAE;AAClG,WAAO,KAAK,gBAAgB;;GAIhC,MAAM,EAAE,SAAS,mBAAmB;IAClC;KAAE,MAAM;KAAW,KAAK;KAAM,SAAS;KAAI,QAAQ;KAAK,MAAM;KAAI,MAAM;KAAM;IAC9E;KAAE,MAAM;KAAQ,KAAK;KAAI,SAHH,QAAQ,MAAM,IAAI,CAGU,QAAQ,OAAO,GAAG;KAAE,QAAQ;KAAI,MAAM;KAAI,MAAM;KAAM;IACxG;KAAE,MAAM;KAAY,KAAK;KAAM,SAAS;KAAI,QAAQ;KAAK,MAAM;KAAI,MAAM;KAAM;IAChF,EAAE,EAAE;AACL,yBAAsB;AACtB,YAAS,KAAK;AACd;AACA,UAAO;;AAGT,SAAO;;CAGT,SAAS,wBAAwB,SAAiB,QAAgC;AAChF,MAAI,CAAC,SAAS,KAAK,QAAQ,CACzB,QAAO;AAGT,wBAAsB;EACtB,MAAM,aAAa,QAAQ,QAAQ,IAAI;EACvC,MAAM,WAAW,QAAQ,QAAQ,KAAK,aAAa,EAAE;EACrD,MAAM,QAAQ,QAAQ,MAAM,GAAG,WAAW;EAC1C,MAAM,cAAc,aAAa,KAAK,QAAQ,MAAM,WAAW,GAAG,QAAQ,MAAM,YAAY,SAAS;EACrG,MAAM,QAAQ,aAAa,KAAK,KAAK,QAAQ,MAAM,WAAW,EAAE;AAChE,MAAI,MAKF,KAAI,CAFY,+BAA+B,OAAO,OAAO,CAG3D,UAAS,OAAO,MAAM;MAGtB;EAIJ,MAAM,OAAO,YAAY,QAAQ,MAAM,GAAG;AAC1C,aAAW;GACT,MAAM;GACN;GACA,KAAK,OAAO,QAAQ,GAAG;GACxB,CAAe;AAGhB,MAAI,OAAO;AACT,eAAY;IACV,MAAM;IACN,SAAS;IACT,KAAK,OAAO,SAAS,GAAG;IACzB,CAAC;AACF;aAEO,aAAa,IAGpB;OADkB,OAAO,IAAI,IACd;IACb,IAAI,aAAa;AACjB,SAAK,IAAI,IAAI,IAAI,GAAG,IAAI,OAAO,QAAQ,IACrC,eAAc,QAAS,OAAO,GAAG,WAAW,OAAO,OAAO,GAAG,UAAU,IAAK;AAE9E,QAAI,OAAO,SAAS;AACpB,gBAAY;KACV,MAAM;KACN,SAAS;KACT,KAAK,OAAO,cAAc,GAAG;KAC9B,CAAC;;;AAGN;AACA,SAAO;;CAGT,SAAS,WAAW,MAAkB;AAEpC,wBAAsB;AACtB,SAAO,KAAK,KAAK;;CAGnB,SAAS,UAAU,OAAsB;AAEvC,wBAAsB;AACtB,SAAO,KAAK,MAAoB;;CAIlC,SAAS,SAAS,MAAkB;AAClC,aAAW,KAAK;;CAGlB,SAAS,SAAS,SAAiB,OAAc;AAC/C,MAAI,iBAAiB;AACnB,mBAAgB,WAAW;AAC3B,mBAAgB,OAAOC,SAAO;SAE3B;AACH,qBAAkB;IAChB,MAAM;IACN,SAAS,OAAO,WAAW,GAAG;IAC9B,KAAK,OAAOA,SAAO,WAAW,GAAG;IAClC;AACD,UAAO,KAAK,gBAAgB;;;AAIhC,QAAO,IAAI,OAAO,QAAQ;EACxB,MAAM,QAAQ,OAAO;AACrB,cAAY,MAAM;;CAGpB,SAAS,YAAY,OAAsB;AACzC,UAAQ,MAAM,MAAd;GACE,KAAK;AACH,oBAAgB,MAAM;AACtB;GAGF,KAAK;AACH,QAAI,iBAAiB;AAEnB,qBAAgB,WAAW;AAC3B,qBAAgB,OAAO;;AAGzB;AACA;GAEF,KAAK;AACH,aAAS,qBAAqB,MAAM,CAAC;AACrC;AACA;GACF,KAAK,eAAe;IAClB,MAAM,CAAC,MAAM,SAAS,yBAAyB,OAAO,QAAQ,EAAE;AAChE,aAAS,KAAK;AACd,QAAI;AACJ;;GAGF,KAAK;AACH,mBAAe,MAAM;AACrB;GAGF,KAAK;AACH,0BAAsB;AACtB,aAAS,gBAAgB,MAAM,CAAC;AAChC;AACA;GAEF,KAAK,eAAe;AAClB,0BAAsB;IACtB,MAAM,EAAE,MAAM,cAAc,iBAAiB,QAAQ,GAAG,MAAM,QAAQ;AACtE,aAAS,KAAK;AACd,QAAI;AACJ;;GAGF,KAAK,WAAW;AACd,0BAAsB;IACtB,MAAM,EAAE,MAAM,cAAc,mBAAmB,QAAQ,EAAE;AACzD,aAAS,KAAK;AACd,QAAI;AACJ;;GAGF,KAAK,UAAU;AACb,0BAAsB;IACtB,MAAM,EAAE,MAAM,cAAc,wBAAwB,QAAQ,EAAE;AAC9D,aAAS,KAAK;AACd,QAAI;AACJ;;GAGF,KAAK,aAAa;AAChB,0BAAsB;IACtB,MAAM,EAAE,MAAM,cAAc,oBAAoB,QAAQ,EAAE;AAC1D,aAAS,KAAK;AACd,QAAI;AACJ;;GAGF,KAAK,YAAY;AACf,0BAAsB;IACtB,MAAM,EAAE,MAAM,cAAc,iBAAiB,QAAQ,EAAE;AACvD,aAAS,KAAK;AACd,QAAI;AACJ;;GAGF,KAAK,YAAY;AACf,0BAAsB;IACtB,MAAM,EAAE,MAAM,cAAc,oBAAoB,QAAQ,EAAE;AAC1D,aAAS,KAAK;AACd,QAAI;AACJ;;GAGF,KAAK,YAAY;AACf,0BAAsB;IACtB,MAAM,EAAE,MAAM,cAAc,sBAAsB,QAAQ,EAAE;AAC5D,aAAS,KAAK;AACd,QAAI;AACJ;;GAGF,KAAK;AACH,0BAAsB;AACtB,aAAS;KACP,MAAM;KACN,UAAU,CACR;MACE,MAAM;MACN,SAAS,OAAO,MAAM,WAAW,GAAG;MACpC,KAAK,OAAO,MAAM,WAAW,GAAG;MACjC,CACF;KACD,KAAK,IAAI,OAAO,MAAM,WAAW,GAAG,CAAC;KACtC,CAAC;AACF;AACA;GAEF,KAAK;AACH,0BAAsB;AACtB,aAAS;KACP,MAAM;KACN,UAAU,CACR;MACE,MAAM;MACN,SAAS,OAAO,MAAM,WAAW,GAAG;MACpC,KAAK,OAAO,MAAM,WAAW,GAAG;MACjC,CACF;KACD,KAAK,IAAI,OAAO,MAAM,WAAW,GAAG,CAAC;KACtC,CAAC;AACF;AACA;GAEF,KAAK,SAAS;AACZ,0BAAsB;IACtB,MAAM,WAAW,OAAO,IAAI;AAC5B,QAAI,UAAU,SAAS,UAAU,QAAQ,KAAK,OAAO,SAAS,WAAW,GAAG,CAAC,CAE3E,UAAS,IAAI,GAAG;QAGhB,UAAS,gBAAgB,MAAM,CAAC;AAElC;AACA;;GAEF,KAAK;AACH,0BAAsB;AACtB,aAAS,mBAAmB,MAAM,CAAC;AACnC;AACA;GACF,KAAK;AACH,0BAAsB;AACtB,aAAS,wBAAwB,MAAM,CAAC;AACxC;AACA;GACF,KAAK;AACH,0BAAsB;AACtB,aAAS,sBAAsB,MAAM,CAAC;AACtC;AACA;GAEF,KAAK;AACH,0BAAsB;AACtB,aAAS,qBAAqB,CAAC;AAC/B;AACA;GAEF,KAAK;AACH,0BAAsB;AAEtB,aAAS,gBAAgB,OAAO,GAAG,CAAC;AACpC;AACA;GAGF,KAAK;AACH,0BAAsB;AACtB,aAAS,qBAAqB,MAAM,CAAC;AACrC;AACA;GAGF,KAAK;AACH,oBAAgB,MAAM;AACtB;GAGF;AAEE,cAAU,MAAM;AAChB;AACA;;;CAIN,SAAS,gBAAgB,OAAsB;EAE7C,IAAI,QAAQ,OAAO,SAAS;EAC5B,IAAI,UAAU,OAAO,MAAM,WAAW,GAAG,CAAC,QAAQ,OAAO,GAAG;AAC5D,MAAI,QAAQ,WAAW,IAAI,IAAI,OAAO,OAAO,SAAS,IAAI,SAAS,OACjE,WAAU,QAAQ,MAAM,EAAE;AAG5B,MAAI,QAAQ,SAAS,YAAY,IAAI,CAAC,KAAK,SAAS,YAAY,CAC9D,WAAU,QAAQ,MAAM,GAAG,GAAG;AAEhC,SAAY,SAAS,GAAG,SAAS;GAC/B,MAAM,OAAO,OAAO;AACpB,OAAI,KAAK,SAAS,QAAQ;AACxB,sBAAkB;AAClB,cAAU,KAAK,UAAU;AACzB;;AAEF;;AAGF,MAAI,QAAQ,OAAO,SAAS,EAC1B,QAAO,OAAO,QAAQ,EAAE;EAE1B,MAAM,YAAY,OAAO,IAAI;AAC7B,MAAI,WAAW,SAAS,oBAAoB,OAAO,KAAK,QAAQ,EAAE;AAChE;AACA;;AAEF,MAAI,YAAY,OAAO,YAAY,OAAO,YAAY,OAAO,QAAQ,KAAK,QAAQ,EAAE;AAClF;AACA;;AAEF,MAAI,CAAC,aAAa,iBAAiB,KAAK,QAAQ,CAC9C,WAAU,QAAQ,QAAQ,UAAU,GAAG;AAEzC,MAAI,mBAAmB,QAAQ,CAC7B;EACF,MAAM,WAAW,OAAO,IAAI;AAC5B,MAAK,YAAY,OAAO,CAAC,WAAW,QAAQ,SAAS,IAAI,IAAM,YAAY,OAAO,CAAC,SAAS,QAAQ,SAAS,IAAI,EAAG;AAClH;AACA;;AAEF,MAAI,wBAAwB,SAAS,MAAM,CACzC;AACF,MAAI,+BAA+B,SAAS,MAAM,CAChD;AACF,MAAI,yBAAyB,SAAS,MAAM,CAC1C;EAEF,MAAM,WAAW,eAAe;GAAE,GAAG;GAAO;GAAS,CAAC;AAEtD,MAAI,wBAAwB,SAAS,MAAM,CACzC;AACF,MAAI,iBAAiB;AAEnB,mBAAgB,WAAW,SAAS,QAAQ,QAAQ,gBAAgB,GAAG;AACvE,mBAAgB,OAAO,SAAS;SAE7B;GACH,MAAM,YAAY,UAAU,QAAQ,QAAQ,OAAO,IAAI,IAAI,YAAY;AAGvE,OAAI,CADc,OAAO,IAAI,GAE3B,UAAS,UAAU,SAAS,QAAQ,QAAQ,gBAAgB,GAAG;AAEjE,qBAAkB;AAClB,mBAAgB,SAAS;AACzB,UAAO,KAAK,gBAAgB;;AAE9B;;CAGF,SAAS,eAAe,OAAsB;AAE5C,wBAAsB;EACtB,MAAM,OAAO,MAAM,OAAO,MAAM,CAAC,UAAU,SAAS,OAAO,GAAG;AAG9D,MAAI,OAAO,OAAO,IAAI,GAAG,SAAS,QAAQ;GACxC,MAAM,OAAO,OAAO,OAAO,IAAI,IAAI,WAAW,GAAG;GACjD,MAAM,UAAU,aAAa,KAAK;AAElC,OAAI,kBADQ,IAAI,OAAO,MAAM,QAAQ,SAAS,EACrC,KAAK,IAAI,EAAE;IAIlB,MAAMC,aAAW,MAAM,OAAO,MAAM,CAAC,UAAU,SAAS,OAAO,GAAG,MAAM;AAIxE,QADoB,CAAC,IAAI,SAAS,IAAI,IAAK,YAAY,OAAOA,WAAS,CAAC,EACxD;AACd,2BAAsB;AAWtB,gBAVa;MACX,MAAM;MACN,MAAM,OAAOA,WAAS;MACtB,OAAO;MACP;MACA,UAAU,CACR;OAAE,MAAM;OAAQ,SAAS;OAAM,KAAK;OAAM,CAC3C;MACD,SAAS;MACV,CACe;AAChB,UAAK;AACL;;AAGF,aAAS,MAAM,KAAK;AACpB,SAAK;AACL;;;AAGJ,MAAI,OAAO,MAAM;GACf,MAAM,8BAAc,IAAI,OAAO,UAAU,aAAa,KAAK,CAAC,SAAS;GACrE,MAAM,MAAM,OAAO,SAAS,IAAI,OAAO,OAAO,SAAS,KAAK;GAC5D,MAAM,UAAU,CAAC,YAAY,KAAK,IAAI;AACtC,OAAI,WAAW,KAAK;IAClB,IAAI,UAAU;AACd,QAAI,KACF;SAAI,IAAI,SAAS,OACf,WAAU,OAAQ,IAAiB,QAAQ,GAAG;cACvC,IAAI,SAAS,OACpB,WAAU,OAAQ,IAAiB,WAAW,GAAG;cACxC,IAA8B,WAAY,OAAQ,IAA8B,YAAY,SACrG,WAAU,OAAQ,IAA6B,WAAW,GAAG,CAAC,MAAM,GAAG,GAAG;;AAG9E,yBADoB,IAAI,OAAO,MAAM,aAAa,QAAQ,CAAC,YAAY,EACvD,KAAK,IAAI,EAAE;KACzB,MAAM,OAAO,OAAO,WAAW,GAAG;AAClC,2BAAsB;KACtB,MAAMC,SAAO;MACX,MAAM;MACN,MAAM;MACN,OAAO;MACP;MACA,UAAU,CACR;OAAE,MAAM;OAAQ,SAAS;OAAM,KAAK;OAAM,CAC3C;MACD;MACD;AACD,YAAO,OAAO,OAAO,SAAS,GAAG,GAAGA,OAAK;AACzC,UAAK;AACL,SAAI,OAAO,OAAO,IAAI,WAAW,GAAG,KAAK,IACvC;AACF;;;;EAIN,MAAM,EAAE,MAAM,cAAc,eAAe,QAAQ,EAAE;AACrD,MAAI;EAIJ,MAAM,WAAW,MAAM,OAAO,MAAM,CAAC,UAAU,SAAS,OAAO,GAAG;EAClE,MAAM,UAAU,OAAO,YAAY,GAAG;AAMtC,MAAI,OAAO,SAAS;GAIlB,MAAM,UAAU,IAAI,QAAQ,KAAK;AACjC,OAAI,YAAY,IAAI,QAIf;IACH,MAAM,WAAW,IAAI,QAAQ,KAAK,UAAU,EAAE;AAC9C,QAAI,aAAa,GACf,MAAK,UAAU;aAIA,IAAI,MAAM,UAAU,GAAG,SAAS,CACpC,SAAS,QAAQ,CAC1B,MAAK,UAAU;QAEf,MAAK,UAAU;;;AAIvB,aAAW,KAAK;;CAGlB,SAAS,gBAAgB,OAAsB;AAE7C,wBAAsB;EACtB,MAAM,YAAY,OAAO,IAAI;EAC7B,MAAM,WAAW,OAAO,IAAI;EAC5B,MAAM,YAAY,OAAO,OAAO,SAAS;EAEzC,MAAM,8BAA8B,WAAW,SAAS,UAAU,CAAG,OAAO,UAAU,WAAW,GAAG,CAAE,WAAW,IAAI;EACrH,MAAM,oCAAoC,UAAU,SAAS,UAAU,YAAY,KAAK,OAAO,SAAS,WAAW,GAAG,CAAC;AAEvH,MAAI,+BAA+B,kCACjC,UAAS,oBAAoB,MAAM,CAAC;WAE7B,aAAa,UAAU,SAAS,OACvC,WAAU,UAAU,OAAO,MAAM,UAAU,GAAG,GAAG,OAAO,UAAU,WAAW,GAAG;WAEzE,aAAa,UAAU,SAAS,QAAQ;AAC/C,aAAU,UAAU,OAAO,UAAU,WAAW,GAAG,GAAG,OAAO,MAAM,UAAU,GAAG;AAChF,aAAU,MAAM,OAAO,UAAU,OAAO,GAAG,GAAG,OAAO,MAAM,UAAU,GAAG;;AAE1E;;CAGF,SAAS,wBAAwB,SAAiB,QAAgC;EAChF,MAAM,YAAY,QAAQ,QAAQ,IAAI;AACtC,MAAI,cAAc,GAChB,QAAO;EAET,IAAI,kBAAkB,QAAQ,MAAM,GAAG,UAAU;EACjD,MAAM,UAAU,QAAQ,QAAQ,MAAM,UAAU;AAChD,MAAI,YAAY,IAAI;GAClB,MAAM,YAAY,OAAO,IAAI;GAC7B,IAAI,OAAO,QAAQ,MAAM,YAAY,GAAG,QAAQ;AAChD,OAAI,KAAK,SAAS,IAAI,EAAE;IACtB,MAAM,kBAAkB,KAAK,QAAQ,IAAI;AAEzC,uBAAmB,QAAQ,MAAM,GAAG,YAAY,kBAAkB,EAAE;IACpE,MAAM,eAAe,YAAY,kBAAkB;AACnD,WAAO,QAAQ,MAAM,eAAe,GAAG,QAAQ;;GAEjD,MAAM,YAAY,OAAO,IAAI;AAC7B,OAAI,QAAQ,SAAS,KAAK,IAAI,WAAW,SAAS,eAAe,WAAW;IAC1E,MAAM,OAAO,OAAO,IAAI;IACxB,IAAI,QAAQ;IACZ,IAAIC,YAAU;AACd,QAAI,MAAM,SAAS,UAAU,KAAK,YAAY,KAAK;AACjD;AACA,iBAAU;eAEH,MAAM,SAAS,UAAU,KAAK,YAAY,IACjD;AAGF,QAAI,gBACF,UAAS,iBAAiB,gBAAgB;AAE5C,eAAW;KACT,MAAM;KACN,MAAM,OAAO,UAAU,WAAW,GAAG;KACrC,OAAO;KACP;KACA,UAAU,CAAC;MAAE,MAAM;MAAQ,SAAS;MAAM,KAAK;MAAM,CAAC;KACtD;KACD,CAAe;AAChB,SAAK;AACL,WAAO;;GAGT,MAAM,iBAAiB,QAAQ,QAAQ,KAAK,QAAQ;GACpD,MAAM,OAAO,mBAAmB,KAAK,QAAQ,MAAM,UAAU,GAAG,eAAe,GAAG;GAClF,MAAM,UAAU,mBAAmB;AAEnC,OAAI,gBACF,UAAS,iBAAiB,gBAAgB;AAE5C,cAAW;IACT,MAAM;IACN;IACA,OAAO;IACP;IACA,UAAU,CAAC;KAAE,MAAM;KAAQ,SAAS;KAAM,KAAK;KAAM,CAAC;IACtD;IACD,CAAe;GAEhB,MAAM,YAAY,mBAAmB,KAAK,QAAQ,MAAM,iBAAiB,EAAE,GAAG;AAC9E,OAAI,WAAW;AACb,gBAAY;KAAE,MAAM;KAAQ,SAAS;KAAW,KAAK;KAAW,CAA6B;AAC7F;;AAEF;AACA,UAAO;;AAGT,SAAO;;CAGT,SAAS,yBAAyB,SAAiB,OAA+B;EAChF,MAAM,aAAa,QAAQ,QAAQ,KAAK;AACxC,MAAI,eAAe,GACjB,QAAO;EAET,MAAM,kBAAkB,QAAQ,MAAM,GAAG,WAAW;AACpD,MAAI,CAAC,gBACH,mBAAkB;GAChB,MAAM;GACN,SAAS;GACT,KAAK;GACN;MAGD,iBAAgB,WAAW;AAE7B,SAAO,KAAK,gBAAgB;AAC5B,oBAAkB;AAClB,aAAW,gBAAgB,OAAO,KAAK,CAAe;AACtD;AACA,SAAO;;CAGT,SAAS,mBAAmB,SAA0B;AAEpD,MAAI,EAAE,SAAS,WAAW,IAAI,IAAI,WAAW,SAAS,kBACpD,QAAO;EAGT,MAAM,IADW,QAAQ,MAAM,EAAE,CACd,MAAM,UAAU;AACnC,MAAI,MAAM,MAAM;AACd;AACA,UAAO;;AAGT,MAAI,KAAK,KAAK,KAAK,EAAE,GAAG,EAAE;GACxB,MAAM,UAAU,EAAE,OAAO,OAAO,EAAE,OAAO;AACzC,cAAW;IACT,MAAM;IACN;IACA,KAAK,UAAU,QAAQ;IACxB,CAAe;AAChB;AACA,UAAO;;AAGT,SAAO;;AAGT,QAAO;;;;;ACpxBT,SAAgB,gBACd,QACA,OAC0B;CAC1B,MAAMC,qBAAmC,EAAE;CAC3C,IAAI,IAAI,QAAQ;AAGhB,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,mBAC7C,KAAI,OAAO,GAAG,SAAS,kBAAkB;EACvC,MAAM,eAAe,OAAO,IAAI;AAChC,qBAAmB,KAAK;GACtB,MAAM;GACN,UAAU,kBAAkB,aAAa,YAAY,EAAE,CAAC;GACxD,KAAK,OAAO,aAAa,WAAW,GAAG;GACxC,CAAC;AACF,OAAK;YAGL,OAAO,GAAG,SAAS,sBAChB,OAAO,GAAG,SAAS,qBACtB;EAEA,MAAM,CAAC,UAAU,YAAY,UAAU,QAAQ,EAAE;AACjD,qBAAmB,KAAK,SAAS;AACjC,MAAI;OAGJ;AAUJ,QAAO,CANgC;EACrC,MAAM;EACN,UAAU;EACV,KAAK,mBAAmB,KAAI,UAAS,MAAM,IAAI,CAAC,KAAK,KAAK;EAC3D,EAEuB,IAAI,EAAE;;;;;ACvChC,SAAgB,eAAe,OAAqC;AAIlE,KAAI,MAAM,MAAM,WAAW,OAAO,CAChC,QAAO,gBAAgB,MAAM;CAG/B,MAAM,aAAa,OAAO,MAAM,WAAW,GAAG;CAC9C,MAAM,QAAQ,WAAW,MAAM,yCAAyC;AACxE,KAAI,QAAQ,GAGV,OAAM,UAAU,WACb,QAAQ,uBAAuB,GAAG,CAClC,QAAQ,oBAAoB,GAAG;CAEpC,MAAM,SAAS,MAAM,QAAQ,MAAM,IAAI,IAAI,MAAM,IAAI,WAAW;AAChE,QAAO;EACL,MAAM;EACN,UAAU,QAAQ,MAAM,KAAK,OAAO,MAAM,QAAQ,GAAG;EACrD,MAAM,OAAO,MAAM,WAAW,GAAG;EACjC,KAAK,OAAO,MAAM,WAAW,GAAG;EAChC,SAAS,CAAC;EACX;;;;;ACnBH,SAAgB,oBACd,QACA,OAC8B;CAC9B,MAAMC,QAA8B,EAAE;CACtC,IAAI,IAAI,QAAQ;CAChB,IAAIC,YAA0B,EAAE;CAChC,IAAIC,kBAAgC,EAAE;AAEtC,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,WAC7C,KAAI,OAAO,GAAG,SAAS,WAAW;EAEhC,MAAM,YAAY,OAAO,IAAI;AAC7B,cAAY,kBAAkB,UAAU,YAAY,EAAE,CAAC;AACvD,OAAK;YAEE,OAAO,GAAG,SAAS,WAAW;EAErC,IAAI,IAAI,IAAI;AACZ,oBAAkB,EAAE;AAEpB,SAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,WAC7C,KAAI,OAAO,GAAG,SAAS,kBAAkB;GACvC,MAAM,eAAe,OAAO,IAAI;AAChC,mBAAgB,KAAK;IACnB,MAAM;IACN,UAAU,kBAAkB,aAAa,YAAY,EAAE,EAAE,OAAO,aAAa,WAAW,GAAG,CAAC;IAC5F,KAAK,OAAO,aAAa,WAAW,GAAG;IACxC,CAAC;AACF,QAAK;QAGL;AAKJ,MAAI,UAAU,SAAS,GAAG;AACxB,SAAM,KAAK;IACT,MAAM;IACN,MAAM;IACN,YAAY;IACZ,KAAK,GAAG,UAAU,KAAI,SAAQ,KAAK,IAAI,CAAC,KAAK,GAAG,CAAC,IAAI,gBAClD,KAAI,QAAO,IAAI,IAAI,CACnB,KAAK,KAAK;IACd,CAAC;AAGF,eAAY,EAAE;;AAGhB,MAAI,IAAI;OAGR;AAUJ,QAAO,CANwC;EAC7C,MAAM;EACN;EACA,KAAK,MAAM,KAAI,SAAQ,KAAK,IAAI,CAAC,KAAK,KAAK;EAC5C,EAE2B,IAAI,EAAE;;;;;ACrEpC,SAAgB,cACd,QACA,OACwB;CAExB,MAAM,OADQ,OAAO,OACD,QAAQ,EAAE;CAC9B,MAAM,KAAK,OAAO,MAAM,SAAS,IAAI;CACrC,MAAMC,mBAAiC,EAAE;CACzC,IAAI,IAAI,QAAQ;AAEhB,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,iBAC7C,KAAI,OAAO,GAAG,SAAS,kBAAkB;EACvC,MAAM,eAAe,OAAO,IAAI;AAChC,mBAAiB,KAAK;GACpB,MAAM;GACN,UAAU,kBAAkB,aAAa,YAAY,EAAE,CAAC;GACxD,KAAK,OAAO,aAAa,WAAW,GAAG;GACxC,CAAC;AACF,OAAK;OAGL;AAWJ,QAAO,CAP4B;EACjC,MAAM;EACN;EACA,UAAU;EACV,KAAK,KAAK,GAAG,KAAK,iBAAiB,KAAI,UAAS,MAAM,IAAI,CAAC,KAAK,KAAK;EACtE,EAEqB,IAAI,EAAE;;;;;AChC9B,SAAgB,aACd,QACA,OACa;CACb,MAAM,QAAQ,OAAO;CACrB,MAAM,WAAW,OAAO,MAAM,KAAK,UAAU,EAAE,IAAI,IAAI;CACvD,MAAM,eAAe,OAAO,SAAS,UAAU,GAAG;CAClD,MAAM,sBAAsB,OAAO,QAAQ;CAC3C,MAAM,iBAAiB,OAAO,oBAAoB,WAAW,GAAG;AAEhE,QAAO;EACL,MAAM;EACN,OAAO;EACP,MAAM;EACN,UAAU,kBAAkB,oBAAoB,YAAY,EAAE,CAAC;EAC/D,KAAK;EACN;;;;;AChBH,SAAgB,eAAe,OAAqC;AAClE,QAAO;EACL,MAAM;EACN,SAAS,OAAO,MAAM,WAAW,GAAG;EACpC,SAAS,CAAC,CAAC,MAAM;EACjB,KAAK,OAAO,MAAM,OAAO,GAAG;EAC7B;;;;;ACDH,SAAgB,WACd,QACA,OACqB;CACrB,IAAI,IAAI,QAAQ;CAChB,IAAIC,YAAiC;CACrC,MAAMC,OAAuB,EAAE;CAC/B,IAAI,WAAW;AAEf,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,cAC7C,KAAI,OAAO,GAAG,SAAS,cAAc;AACnC,aAAW;AACX;YAEO,OAAO,GAAG,SAAS,eAAe;AACzC,aAAW;AACX;YAGA,OAAO,GAAG,SAAS,gBAChB,OAAO,GAAG,SAAS,cAEtB;UAEO,OAAO,GAAG,SAAS,WAAW;EACrC,MAAMC,QAAyB,EAAE;EACjC,IAAI,IAAI,IAAI;AAEZ,SAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,WAC7C,KAAI,OAAO,GAAG,SAAS,aAAa,OAAO,GAAG,SAAS,WAAW;GAChE,MAAM,eAAe,OAAO,GAAG,SAAS;GACxC,MAAM,eAAe,OAAO,IAAI;GAChC,MAAM,UAAU,OAAO,aAAa,WAAW,GAAG;AAElD,SAAM,KAAK;IACT,MAAM;IACN,QAAQ,gBAAgB;IACxB,UAAU,kBAAkB,aAAa,YAAY,EAAE,EAAE,QAAQ;IACjE,KAAK;IACN,CAAC;AAEF,QAAK;QAGL;EAIJ,MAAMC,UAAwB;GAC5B,MAAM;GACN;GACA,KAAK,MAAM,KAAI,SAAQ,KAAK,IAAI,CAAC,KAAK,IAAI;GAC3C;AAED,MAAI,SACF,aAAY;MAGZ,MAAK,KAAK,QAAQ;AAGpB,MAAI,IAAI;OAGR;AAIJ,KAAI,CAAC,UAEH,aAAY;EACV,MAAM;EACN,OAAO,EAAE;EACT,KAAK;EACN;AAWH,QAAO,CARsB;EAC3B,MAAM;EACN,QAAQ;EACR;EACA,SAAS,OAAO,OAAO,WAAW;EAClC,KAAK,CAAC,WAAW,GAAG,KAAK,CAAC,KAAI,QAAO,IAAI,IAAI,CAAC,KAAK,KAAK;EACzD,EAEkB,IAAI,EAAE;;;;;AC3F3B,SAAgB,qBAAwC;AACtD,QAAO;EACL,MAAM;EACN,KAAK;EACN;;;;;ACYH,SAAgB,UACd,QACA,OACoB;CACpB,MAAM,QAAQ,OAAO;CACrB,MAAMC,YAA4B,EAAE;CACpC,IAAI,IAAI,QAAQ;AAEhB,QACE,IAAI,OAAO,UACR,OAAO,GAAG,SAAS,uBACnB,OAAO,GAAG,SAAS,qBAEtB,KAAI,OAAO,GAAG,SAAS,kBAAkB;EAKvC,MAAMC,eAA6B,EAAE;EACrC,IAAI,IAAI,IAAI;AACZ,SAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,kBAE7C,KAAI,OAAO,GAAG,SAAS,kBAAkB;GACvC,MAAM,eAAe,OAAO,IAAI;GAChC,MAAM,WAAW,OAAO,IAAI;GAC5B,MAAM,aAAa,OAAO,aAAa,WAAW,GAAG;AACrD,OAAI,SAAS,KAAK,WAAW,EAAE;AAC7B,iBAAa,UAAU,WAAW,QAAQ,UAAU,GAAG;AACvD,iBAAa,UAAU,OAAO,IAAI,EAAE;;AAEtC,gBAAa,KAAK;IAChB,MAAM;IACN,UAAU,kBAAkB,aAAa,YAAY,EAAE,EAAE,OAAO,aAAa,WAAW,GAAG,EAAE,SAAS;IACtG,KAAK,OAAO,aAAa,WAAW,GAAG;IACxC,CAAC;AACF,QAAK;aAEE,OAAO,GAAG,SAAS,mBAAmB;GAE7C,MAAM,CAAC,gBAAgB,YAAY,gBAAgB,QAAQ,EAAE;AAC7D,gBAAa,KAAK,eAAe;AACjC,OAAI;aAGJ,OAAO,GAAG,SAAS,sBAChB,OAAO,GAAG,SAAS,qBACtB;AACA,OAAI,OAAO,GAAG,WAAW,KAAK;AAC5B;AACA;;GAGF,MAAM,CAAC,gBAAgB,YAAY,gBAAgB,QAAQ,EAAE;AAC7D,gBAAa,KAAK,eAAe;AACjC,OAAI;aAEG,OAAO,GAAG,SAAS,cAAc;AAExC,gBAAa,KAAK,eAAe,OAAO,GAAG,CAAC;AAC5C,QAAK;aAEE,OAAO,GAAG,SAAS,SAAS;AAEnC,gBAAa,KAAK,gBAAgB,OAAO,GAAG,CAAC;AAC7C,QAAK;aAEE,OAAO,GAAG,SAAS,cAAc;AAExC,gBAAa,KAAK,eAAe,OAAO,GAAG,CAAC;AAC5C,QAAK;aAEE,OAAO,GAAG,SAAS,cAAc;GAExC,MAAM,CAAC,WAAW,YAAY,WAAW,QAAQ,EAAE;AACnD,gBAAa,KAAK,UAAU;AAC5B,OAAI;aAEG,OAAO,GAAG,SAAS,WAAW;GAErC,MAAM,CAAC,aAAa,YAAY,oBAAoB,QAAQ,EAAE;AAC9D,gBAAa,KAAK,YAAY;AAC9B,OAAI;aAEG,OAAO,GAAG,SAAS,iBAAiB;GAE3C,MAAM,CAAC,cAAc,YAAY,cAAc,QAAQ,EAAE;AACzD,gBAAa,KAAK,aAAa;AAC/B,OAAI;aAEG,OAAO,GAAG,SAAS,gBAAgB;GAE1C,MAAM,cAAc,aAAa,QAAQ,EAAE;AAC3C,gBAAa,KAAK,YAAY;AAC9B,QAAK;aAEE,OAAO,GAAG,SAAS,MAAM;AAEhC,gBAAa,KAAK,oBAAoB,CAAC;AACvC,QAAK;aAEE,OAAO,GAAG,SAAS,kBAAkB;GAE5C,MAAM,QACF,sDAAsD,KACtD,OAAO,OAAO,GAAG,QAAQ,GAAG,CAC7B;AACH,OAAI,OAAO;IACT,MAAM,CAAC,gBAAgB,YAAY,gBAAgB,QAAQ,GAAG,MAAM;AACpE,iBAAa,KAAK,eAAe;AACjC,QAAI;SAGJ,MAAK;QAIP,MAAK;AAIT,YAAU,KAAK;GACb,MAAM;GACN,UAAU;GACV,KAAK,aAAa,KAAI,UAAS,MAAM,IAAI,CAAC,KAAK,GAAG;GACnD,CAAC;AAEF,MAAI,IAAI;OAGR,MAAK;AAsBT,QAAO,CAlBoB;EACzB,MAAM;EACN,SAAS,MAAM,SAAS;EAExB,cAAc;AACZ,OAAI,MAAM,SAAS,MAAM,MAAM,QAAQ;IACrC,MAAM,QAAQ,MAAM,MAAM,MAAK,MAAK,EAAE,OAAO,QAAQ;AACrD,QAAI,OAAO;KACT,MAAM,SAAS,OAAO,MAAM,GAAG;AAC/B,YAAO,OAAO,SAAS,OAAO,IAAI,WAAW,IAAI,SAAS;;;MAI5D;EACJ,OAAO;EACP,KAAK,UAAU,KAAI,SAAQ,KAAK,IAAI,CAAC,KAAK,KAAK;EAChD,EAEiB,IAAI,EAAE;;AAI1B,SAAS,gBACP,QACA,OACoB;CAGpB,MAAM,cAAc,OAAO;CAC3B,MAAMC,cAA8B,EAAE;CACtC,IAAI,IAAI,QAAQ;AAEhB,QACE,IAAI,OAAO,UACR,OAAO,GAAG,SAAS,uBACnB,OAAO,GAAG,SAAS,qBAEtB,KAAI,OAAO,GAAG,SAAS,kBAAkB;EACvC,MAAMD,eAA6B,EAAE;EACrC,IAAI,IAAI,IAAI;AAEZ,SAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,kBAE7C,KAAI,OAAO,GAAG,SAAS,kBAAkB;GACvC,MAAM,eAAe,OAAO,IAAI;GAChC,MAAM,WAAW,OAAO,IAAI;AAC5B,gBAAa,KAAK;IAChB,MAAM;IACN,UAAU,kBAAkB,aAAa,YAAY,EAAE,EAAE,OAAO,aAAa,WAAW,GAAG,EAAE,SAAS;IACtG,KAAK,OAAO,aAAa,WAAW,GAAG;IACxC,CAAC;AACF,QAAK;aAGL,OAAO,GAAG,SAAS,sBAChB,OAAO,GAAG,SAAS,qBACtB;AACA,OAAI,OAAO,GAAG,WAAW,KAAK;AAC5B;AACA;;GAIF,MAAM,CAAC,sBAAsB,YAAY,gBAAgB,QAAQ,EAAE;AACnE,gBAAa,KAAK,qBAAqB;AACvC,OAAI;aAEG,OAAO,GAAG,SAAS,cAAc;AACxC,gBAAa,KAAK,eAAe,OAAO,GAAG,CAAC;AAC5C,QAAK;aAEE,OAAO,GAAG,SAAS,SAAS;AACnC,gBAAa,KAAK,gBAAgB,OAAO,GAAG,CAAC;AAC7C,QAAK;aAEE,OAAO,GAAG,SAAS,cAAc;AAExC,gBAAa,KAAK,eAAe,OAAO,GAAG,CAAC;AAC5C,QAAK;QAIL,MAAK;AAIT,cAAY,KAAK;GACf,MAAM;GACN,UAAU;GACV,KAAK,aAAa,KAAI,UAAS,MAAM,IAAI,CAAC,KAAK,GAAG;GACnD,CAAC;AAEF,MAAI,IAAI;OAGR,MAAK;AAqBT,QAAO,CAjB0B;EAC/B,MAAM;EACN,SAAS,YAAY,SAAS;EAC9B,cAAc;AACZ,OAAI,YAAY,SAAS,YAAY,MAAM,QAAQ;IACjD,MAAM,QAAQ,YAAY,MAAM,MAAK,MAAK,EAAE,OAAO,QAAQ;AAC3D,QAAI,OAAO;KACT,MAAM,SAAS,OAAO,MAAM,GAAG;AAC/B,YAAO,OAAO,SAAS,OAAO,IAAI,WAAW,IAAI,SAAS;;;MAI5D;EACJ,OAAO;EACP,KAAK,YAAY,KAAI,SAAQ,KAAK,IAAI,CAAC,KAAK,KAAK;EAClD,EAEuB,IAAI,EAAE;;;;;ACvQhC,SAAgB,gBACd,QACA,OACA,OAC0B;CAC1B,MAAM,OAAO,OAAO,MAAM,MAAM,OAAO;CACvC,MAAM,QAAQ,OAAO,MAAM,MAAO,KAAK,OAAO,EAAE,CAAC,aAAa,GAAG,KAAK,MAAM,EAAE,CAAE;CAChF,MAAME,qBAAmC,EAAE;CAC3C,IAAI,IAAI,QAAQ;AAEhB,QAAO,IAAI,OAAO,UAAU,OAAO,GAAG,SAAS,kBAC7C,KAAI,OAAO,GAAG,SAAS,kBAAkB;EACvC,MAAM,eAAe,OAAO,IAAI;AAChC,MAAI,aACF,oBAAmB,KAAK;GACtB,MAAM;GACN,UAAU,kBAAkB,aAAa,YAAY,EAAE,CAAC;GACxD,KAAK,OAAO,aAAa,WAAW,GAAG;GACxC,CAAC;AAEJ,OAAK;YAGL,OAAO,GAAG,SAAS,sBAChB,OAAO,GAAG,SAAS,qBACtB;EAEA,MAAM,CAAC,UAAU,YAAY,UAAU,QAAQ,EAAE;AACjD,qBAAmB,KAAK,SAAS;AACjC,MAAI;OAGJ;AAcJ,QAAO,CAVgC;EACrC,MAAM;EACN;EACA;EACA,UAAU;EACV,KAAK,MAAM,KAAK,GAAG,MAAM,IAAI,mBAC1B,KAAI,UAAS,MAAM,IAAI,CACvB,KAAK,KAAK,CAAC;EACf,EAEuB,IAAI,EAAE;;;;;AC9ChC,SAAgB,eACd,QACA,OAC0B;CAC1B,MAAM,YAAY,OAAO;CAGzB,IAAI,OAAO;CACX,IAAI,QAAQ;CAEZ,MAAM,YAAY,UAAU,KAAK,MAAM,yBAAyB;AAChE,KAAI,WAAW;AACb,SAAO,UAAU;EAEjB,MAAM,OAAO,OAAO,UAAU,QAAQ,GAAG,CAAC,MAAM;AAChD,MAAI,QAAQ,CAAC,KAAK,WAAW,MAAM,EAAE;GAEnC,MAAM,QAAQ,KAAK,wBAAQ,IAAI,OAAO,IAAI,OAAO,EAAE,GAAG,CAAC,MAAM;AAC7D,OAAI,MACF,SAAQ;;QAGT;EAEH,MAAM,OAAO,OAAO,UAAU,QAAQ,GAAG,CAAC,MAAM;EAEhD,MAAM,QAEF,4DAA4D,KAAK,KAAK;AAC1E,MAAI,OAAO;AACT,UAAO,MAAM;AACb,WAAQ,OAAO,MAAM,MAAM,GAAG;;;AAIlC,KAAI,CAAC,MACH,SAAQ,KAAK,OAAO,EAAE,CAAC,aAAa,GAAG,KAAK,MAAM,EAAE;CAEtD,MAAMC,WAAyB,EAAE;CACjC,IAAI,IAAI,QAAQ;CAGhB,MAAM,4BAAY,IAAI,OAAO,cAAc,KAAK,SAAS;AAEzD,QACE,IAAI,OAAO,UACR,OAAO,GAAG,SAAS,qBACnB,CAAC,UAAU,KAAK,OAAO,GAAG,KAAK,CAElC,KAAI,OAAO,GAAG,SAAS,kBAAkB;EACvC,MAAM,eAAe,OAAO,IAAI;AAChC,MAAI,cAAc;GAChB,MAAM,cAAe,aAAa,YAAgC,EAAE;GACpE,IAAI,IAAI;AACR,QAAK,IAAI,IAAI,YAAY,SAAS,GAAG,KAAK,GAAG,KAAK;IAChD,MAAM,IAAI,YAAY;AACtB,QAAI,EAAE,SAAS,UAAU,KAAK,KAAK,EAAE,QAAQ,EAAE;AAC7C,SAAI;AACJ;;;GAGJ,MAAM,YAAY,MAAM,KAAK,YAAY,MAAM,GAAG,EAAE,GAAG;AACvD,YAAS,KAAK;IACZ,MAAM;IACN,UAAU,kBAAkB,aAAa,EAAE,CAAC;IAC5C,KAAK,OAAO,aAAa,WAAW,GAAG,CAAC,QAAQ,SAAS,GAAG,CAAC,QAAQ,gBAAgB,GAAG;IACzF,CAAC;;AAEJ,OAAK;YAGL,OAAO,GAAG,SAAS,sBAChB,OAAO,GAAG,SAAS,qBACtB;EACA,MAAM,CAAC,UAAU,YAAY,UAAU,QAAQ,EAAE;AACjD,WAAS,KAAK,SAAS;AACvB,MAAI;OAGJ;AAcJ,QAAO,CAVgC;EACrC,MAAM;EACN;EACA;EACA;EACA,KAAK,MAAM,KAAK,GAAG,MAAM,IAAI,SAAS,KAAI,MAAK,EAAE,IAAI,CAAC,KAAK,KAAK,CAAC;EAClE,EAGoB,IACkB,EAAE;;;;;AC/F3C,SAAgB,iBAAgC;AAC9C,QAAO;EACL,MAAM;EACN,KAAK;EACN;;;;;ACHH,MAAM,YAAY,IAAI,IAAI;CACxB;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAGF,MAAM,qCAAqB,IAAI,KAAqB;AAEpD,SAAgB,eAAe,OAAqC;CAClE,MAAM,MAAM,OAAO,MAAM,WAAW,GAAG;AAGvC,KAAI,WAAW,KAAK,IAAI,IAAI,SAAS,KAAK,IAAI,IAAI,UAAU,KAAK,IAAI,CACnE,QAAO;EACL,MAAM;EACN,SAAS;EACT;EACA,KAAK;EACL,SAAS;EACV;CAKH,MAAM,OADW,IAAI,MAAM,uBAAuB,GAC1B,MAAM,IAAI,aAAa;AAG/C,KAAI,CAAC,IACH,QAAO;EACL,MAAM;EACN,SAAS;EACT;EACA,KAAK;EACL,SAAS;EACV;CAIH,MAAM,cAAc,mBAAmB,KAAK,IAAI;CAChD,MAAM,SAAS,UAAU,IAAI,IAAI;CAGjC,IAAI,UAAU,mBAAmB,IAAI,IAAI;AACzC,KAAI,CAAC,SAAS;AACZ,YAAU,IAAI,OAAO,WAAW,IAAI,MAAM,IAAI;AAC9C,qBAAmB,IAAI,KAAK,QAAQ;;CAEtC,MAAM,aAAa,QAAQ,KAAK,IAAI;CAEpC,MAAM,UAAU,EAAE,UAAU,eAAe;AAM3C,QAAO;EACL,MAAM;EACN,SANc,UACZ,GAAG,IAAI,QAAQ,WAAW,GAAG,CAAC,MAAM,IAAI,KACxC;EAKF;EACA;EACA;EACD;;;;;ACzEH,SAAgB,eACd,QACA,OACe;CACf,MAAM,wBAAwB,OAAO,QAAQ;CAC7C,MAAM,mBAAmB,OAAO,sBAAsB,WAAW,GAAG;AAEpE,QAAO;EACL,MAAM;EACN,UAAU,kBAAkB,sBAAsB,YAAY,EAAE,EAAE,iBAAiB;EACnF,KAAK;EACN;;;;;ACKH,SAAgB,yBACd,UACA,IACA,UAAwB,EAAE,EACZ;CAEd,IAAI,gBAAgB,YAAY,IAAI,UAAU,CAAC,QAAQ,kBAAkB,YAAY;AACrF,KAAI,aAAa,SAAS,MAAM,CAE9B,gBAAe,aAAa,QAAQ,SAAS,QAAQ;AAEvD,KAAI,aAAa,KAAK,aAAa,CAEjC,gBAAe,aAAa,QAAQ,cAAc,KAAK;UAEhD,aAAa,KAAK,aAAa,CAEtC,gBAAe,aAAa,QAAQ,qBAAqB,KAAK;CAIhE,MAAM,SAAS,GAAG,MAAM,cAAc,EAAE,CAAC;AAEzC,KAAI,CAAC,UAAU,CAAC,MAAM,QAAQ,OAAO,CACnC,QAAO,EAAE;CAGX,MAAM,MAAM,QAAQ;CACpB,MAAM,OAAO,QAAQ;CACrB,IAAI,oBAAoB;AACxB,KAAI,OAAO,OAAO,QAAQ,WACxB,qBAAoB,IAAI,kBAAkB,IAAI;CAIhD,IAAI,SAAS,cAAc,kBAAkB;AAI7C,KAAI,QAAQ,OAAO,SAAS,YAAY;EACtC,MAAM,aAAa,KAAK,kBAAkB;AAC1C,MAAI,MAAM,QAAQ,WAAW,EAAE;GAG7B,MAAM,QAAS,WAAyB;GACxC,MAAM,YAAa,OAAmC;AACtD,OAAI,SAAS,OAAO,cAAc,SAChC,UAAS,cAAc,WAAyC;OAIhE,UAAS;;;AAIf,QAAO;;AAIT,SAAgB,cAAc,QAAuC;AAEnE,KAAI,CAAC,UAAU,CAAC,MAAM,QAAQ,OAAO,CACnC,QAAO,EAAE;CAEX,MAAMC,SAAuB,EAAE;CAC/B,IAAI,IAAI;AAMR,QAAO,IAAI,OAAO,QAAQ;EACxB,MAAM,QAAQ,OAAO;AACrB,UAAQ,MAAM,MAAd;GACE,KAAK;GACL,KAAK;GACL,KAAK;GACL,KAAK;GACL,KAAK;GACL,KAAK;GACL,KAAK,wBAAwB;IAC3B,MAAM,CAAC,aAAa,YAAY,eAAe,QAAQ,EAAE;AACzD,WAAO,KAAK,YAAY;AACxB,QAAI;AACJ;;GAGF,KAAK;AACH,WAAO,KAAK,aAAa,QAAQ,EAAE,CAAC;AACpC,SAAK;AACL;GAEF,KAAK;AACH,WAAO,KAAK,eAAe,QAAQ,EAAE,CAAC;AACtC,SAAK;AACL;GAEF,KAAK;AACH,WAAO,KAAK,eAAe,MAAM,CAAC;AAClC,SAAK;AACL;GACF,KAAK;AACH,WAAO,KAAK,eAAe,OAAO,GAAG,CAAC;AACtC,SAAK;AACL;GAEF,KAAK;AACH,WAAO,KAAK,gBAAgB,OAAO,GAAG,CAAC;AACvC,SAAK;AACL;GAEF,KAAK;GACL,KAAK,qBAAqB;IACxB,MAAM,CAAC,UAAU,YAAY,UAAU,QAAQ,EAAE;AACjD,WAAO,KAAK,SAAS;AACrB,QAAI;AACJ;;GAGF,KAAK;AACH,WAAO,KAAK,oBAAoB,CAAC;AACjC,SAAK;AACL;GAEF,KAAK,mBAAmB;IACtB,MAAM,CAAC,gBAAgB,YAAY,gBAAgB,QAAQ,EAAE;AAC7D,WAAO,KAAK,eAAe;AAC3B,QAAI;AACJ;;GAGF,KAAK,cAAc;IACjB,MAAM,CAAC,WAAW,YAAY,WAAW,QAAQ,EAAE;AACnD,WAAO,KAAK,UAAU;AACtB,QAAI;AACJ;;GAGF,KAAK,WAAW;IACd,MAAM,CAAC,oBAAoB,YAAY,oBAAoB,QAAQ,EAAE;AACrE,WAAO,KAAK,mBAAmB;AAC/B,QAAI;AACJ;;GAGF,KAAK,iBAAiB;IACpB,MAAM,CAAC,cAAc,YAAY,cAAc,QAAQ,EAAE;AACzD,WAAO,KAAK,aAAa;AACzB,QAAI;AACJ;;GAGF,KAAK,kBAAkB;IACrB,MAAM,QACF,4DAA4D,KAC5D,OAAO,MAAM,QAAQ,GAAG,CACzB;AACH,QAAI,OAAO;KACT,MAAM,CAAC,gBAAgB,YAAY,gBAAgB,QAAQ,GAAG,MAAM;AACpE,YAAO,KAAK,eAAe;AAC3B,SAAI;UAGJ,MAAK;AAEP;;GAGF,KAAK;AACH,WAAO,KAAK,gBAAgB,CAAC;AAC7B;AACA;GAEF,KAAK;AACH,WAAO,KAAK,eAAe,OAAO,GAAG,CAAC;AACtC,SAAK;AACL;GAEF,KAAK;AACH,WAAO,KAAK,GAAG,kBAAkB,MAAM,YAAY,EAAE,CAAC,CAAC;AACvD,SAAK;AACL;GACF;AAEE,SAAK;AACL;;;AAIN,QAAO;;;;;AClKT,SAAgB,YAAY,QAAgB,UAAU,KAAK,KAAK,IAAI,UAA8B,EAAE,EAAE;CAEpG,MAAM,KAAK,QAAQ,QAAQ;CAG3B,MAAMC,sBAA8C,EAClD,eAAe,QAChB;CAED,IAAIC;AACJ,KAAI,OAAO,QAAQ,SAAS,WAC1B,KAAI,QAAQ;UAEL,QAAQ,QAAQ,OAAO,QAAQ,SAAS,UAAU;EACzD,MAAM,UAAU,QAAQ;AACxB,OAAK,QAAgB,QAAQ,QAAQ,oBAAoB,QAAQ;OAGjE,MAAK,QAAgB,oBAAoB,QAAQ;AAInD,KAAI,MAAM,QAAQ,QAAQ,OAAO,CAC/B,MAAK,MAAM,KAAK,QAAQ,QAAQ;EAE9B,MAAM,aAAa;AACnB,MAAI,MAAM,QAAQ,WAAW,EAAE;GAC7B,MAAM,KAAK,WAAW;GACtB,MAAM,OAAO,WAAW;AACxB,OAAI,OAAO,OAAO,WAChB,IAAG,IAAI,IAAI,KAAK;aAEX,OAAO,eAAe,WAC7B,IAAG,IAAI,WAA+B;;AAO5C,KAAI,MAAM,QAAQ,QAAQ,MAAM,CAC9B,MAAK,MAAM,MAAM,QAAQ,MACvB,KAAI;AACF,KAAG,GAAG;UAED,GAAG;AAGR,UAAQ,MAAM,+CAA+C,EAAE;;AAMrE,IAAG,IAAI,cAAc;AACrB,IAAG,IAAI,cAAc;AACrB,IAAG,IAAI,eAAe;AACtB,IAAG,IAAIC,KAAgB;CAGvB,MAAM,2BAA6B,mBAEhC,WAAW;AACd,IAAG,IAAI,yBAAyB;AAChC,IAAG,IAAI,cAAc;AACrB,IAAG,IAAI,mBAAmB;AAG1B,IAAG,KAAK,MAAM,MAAM,SAAS,sBAAsB,UAAmB;EACpE,MAAM,IAAI;EAKV,MAAM,QADc,EAAE,IACJ,MAAM,QAAQ;AAChC,OAAK,MAAM,SAAS,EAAE,QAAQ;AAC5B,OAAI,MAAM,SAAS,WAAW,CAAC,MAAM,OAAO,CAAC,MAAM,OACjD;GACF,MAAMC,WAAmB,MAAM,IAAI;GACnC,MAAMC,UAAkB,MAAM,IAAI;GAClC,MAAMC,SAAiB,MAAM;GAC7B,MAAM,SAAS,OAAO;GACtB,MAAM,SAAS,OAAO;GAGtB,MAAM,OAAO,MADG,KAAK,IAAI,GAAG,UAAU,EAAE,KACT;GAC/B,IAAI,IAAI;AACR,UAAO,IAAI,KAAK,WAAW,KAAK,OAAO,OAAO,KAAK,OAAO,KAAO;GACjE,IAAI,QAAQ;AACZ,UAAO,IAAI,QAAQ,KAAK,UAAU,KAAK,IAAI,WAAW,OAAQ;GAC9D,IAAI,IAAI,IAAI;AACZ,UAAO,IAAI,KAAK,WAAW,KAAK,OAAO,OAAO,KAAK,OAAO,KAAO;GACjE,MAAM,SAAS,UAAU,WAAW,KAAK,SAAS,UAAU,MAAM,KAAK;GACvE,MAAM,aAAa;AACnB,cAAW,OAAO,WAAW,QAAQ,EAAE;AACtC,GAAC,WAAW,KAAiC,WAAW,CAAC;AAEzD,GAAC,WAAW,KAAiC,SAAS,CAAC,CAAC;;GAE3D;CAGF,MAAM,YAAY,OAAgB,WAAoB;EACpD,MAAM,IAAI;EACV,MAAM,QAAQ,EAAE;AAChB,MAAI,EAAE,IAAI,WAAW,IACnB,QAAO;EACT,MAAM,WAAW,EAAE,IAAI,QAAQ;EAC/B,MAAM,WAAW,EAAE,IAAI,QAAQ;AAC/B,MAAI,KAAK,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS,EAAE;AAC9C,OAAI,CAAC,QAAQ;IACX,MAAM,QAAQ,EAAE,KAAK,QAAQ,IAAI,EAAE;AACnC,UAAM,UAAU;;AAElB,KAAE,OAAO;AACT,UAAO;;AAET,SAAO;;AAGT,IAAG,OAAO,MAAM,OAAO,OAAO,QAAQ,SAAS;AAG/C,IAAG,SAAS,MAAM,SAAS,QAAiB,QAAgB;EAG1D,MAAM,aAFY,OACM;EAExB,MAAM,OAAO,OAAO,WAAW,QAAQ,GAAG,CAAC,MAAM;EACjD,MAAM,MAAM,OAAO,WAAW,WAAW,GAAG;EAC5C,MAAM,cAAc,KAAK,SAAS,mBAAmB,IAAI,CAAC,CAAC;EAC3D,MAAM,WAAW,OAAO,QAAQ,OAAO;AAGvC,SAAO,sCAAsC,YAAY,eAAe,SAAS,QAFhE,UAAU,MAAM,GAAG,IAAI,GAAG,WAEuD;;kCAEpE,SAAS,aAAa,CAAC;iDACR,YAAY,IAAI,EACvD,cACD,CAAC;;;;;CAOR,MAAM,eAAe;CACrB,MAAM,mBAAmB,OAAgB,WAAoB;EAC3D,MAAM,IAAI;AACV,MAAI,EAAE,IAAI,EAAE,SAAS,IACnB,QAAO;EACT,MAAM,QAAQ,aAAa,KAAK,EAAE,IAAI,MAAM,EAAE,IAAI,CAAC;AACnD,MAAI,CAAC,MACH,QAAO;AACT,MAAI,CAAC,QAAQ;GACX,MAAM,KAAK,MAAM;GACjB,MAAM,QAAQ,EAAE,KAAK,aAAa,QAAQ,EAAE;AAC5C,SAAM,UAAU;AAChB,SAAM,SAAS,MAAM;;AAEvB,IAAE,OAAO,MAAM,GAAG;AAClB,SAAO;;AAGT,IAAG,OAAO,MAAM,OAAO,UAAU,aAAa,gBAAgB;AAC9D,IAAG,SAAS,MAAM,aAAa,QAAiB,QAAgB;EAC9D,MAAM,YAAY;EAClB,MAAM,KAAK,OAAO,UAAU,KAAK,WAAW,GAAG;AAC/C,SAAO,mDAAmD,GAAG,+DAA+D,GAAG;;AAGjI,QAAO"}
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "stream-markdown-parser",
|
|
3
3
|
"type": "module",
|
|
4
|
-
"version": "0.0.
|
|
4
|
+
"version": "0.0.23",
|
|
5
5
|
"packageManager": "pnpm@10.22.0",
|
|
6
6
|
"description": "Pure markdown parser and renderer utilities with streaming support - framework agnostic",
|
|
7
7
|
"author": "Simon He",
|