treedex 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/pdf-parser.ts","../src/index.ts","../src/loaders.ts","../src/core.ts","../src/tree-builder.ts","../src/tree-utils.ts","../src/prompts.ts","../src/llm-backends.ts"],"sourcesContent":["/** PDF extraction and page grouping. */\n\nimport { encode } from \"gpt-tokenizer\";\nimport type { Page } from \"./types.js\";\n\n/** Count tokens using cl100k_base-compatible encoding. */\nexport function countTokens(text: string): number {\n return encode(text).length;\n}\n\n/**\n * Extract text from each page of a PDF.\n *\n * Returns a list of objects with page_num, text, and token_count.\n */\nexport async function extractPages(pdfPath: string): Promise<Page[]> {\n const fs = await import(\"node:fs/promises\");\n const pdfjs = await import(\"pdfjs-dist/legacy/build/pdf.mjs\");\n\n const data = await fs.readFile(pdfPath);\n const doc = await pdfjs.getDocument({ data }).promise;\n\n const pages: Page[] = [];\n for (let i = 0; i < doc.numPages; i++) {\n const page = await doc.getPage(i + 1); // pdfjs is 1-indexed\n const content = await page.getTextContent();\n const text = content.items\n .map((item: unknown) => {\n const obj = item as Record<string, unknown>;\n return typeof obj.str === \"string\" ? obj.str : \"\";\n })\n .join(\" \");\n pages.push({\n page_num: i,\n text,\n token_count: countTokens(text),\n });\n }\n\n return pages;\n}\n\n/**\n * Combine pages[start:end+1] into a string with physical index tags.\n *\n * Each page is wrapped like:\n * <physical_index_0>page text</physical_index_0>\n * where the number is the page's page_num.\n */\nexport function pagesToTaggedText(\n pages: Page[],\n start: number,\n end: number,\n): string {\n const parts: string[] = [];\n for (const page of pages.slice(start, end + 1)) {\n const n = page.page_num;\n parts.push(`<physical_index_${n}>${page.text}</physical_index_${n}>`);\n }\n return parts.join(\"\\n\");\n}\n\n/**\n * Split pages into token-budget groups, each returned as tagged text.\n *\n * Groups overlap by `overlap` pages for continuity.\n */\nexport function groupPages(\n pages: Page[],\n maxTokens: number = 20000,\n overlap: number = 1,\n): string[] {\n const totalTokens = pages.reduce((sum, p) => sum + p.token_count, 0);\n\n if (totalTokens <= maxTokens) {\n return [pagesToTaggedText(pages, 0, pages.length - 1)];\n }\n\n const groups: string[] = [];\n let groupStart = 0;\n\n while (groupStart < pages.length) {\n let running = 0;\n let groupEnd = groupStart;\n\n while (groupEnd < pages.length) {\n const pageTokens = pages[groupEnd].token_count;\n if (running + pageTokens > maxTokens && groupEnd > groupStart) {\n groupEnd -= 1;\n break;\n }\n running += pageTokens;\n groupEnd += 1;\n }\n\n if (groupEnd >= pages.length) {\n groupEnd = pages.length - 1;\n }\n\n groupEnd = Math.min(groupEnd, pages.length - 1);\n groups.push(pagesToTaggedText(pages, groupStart, groupEnd));\n\n if (groupEnd >= pages.length - 1) {\n break;\n }\n\n const nextStart = groupEnd + 1 - overlap;\n groupStart = Math.max(nextStart, groupStart + 1);\n }\n\n return groups;\n}\n","/** TreeDex: Tree-based document RAG framework. */\n\nexport { TreeDex, QueryResult } from \"./core.js\";\nexport {\n PDFLoader,\n TextLoader,\n HTMLLoader,\n DOCXLoader,\n autoLoader,\n textToPages,\n} from \"./loaders.js\";\nexport {\n BaseLLM,\n GeminiLLM,\n OpenAILLM,\n ClaudeLLM,\n MistralLLM,\n CohereLLM,\n OpenAICompatibleLLM,\n GroqLLM,\n TogetherLLM,\n FireworksLLM,\n OpenRouterLLM,\n DeepSeekLLM,\n CerebrasLLM,\n SambanovaLLM,\n HuggingFaceLLM,\n OllamaLLM,\n FunctionLLM,\n} from \"./llm-backends.js\";\nexport {\n listToTree,\n assignPageRanges,\n assignNodeIds,\n embedTextInTree,\n findLargeNodes,\n} from \"./tree-builder.js\";\nexport {\n createNodeMapping,\n stripTextFromTree,\n collectNodeTexts,\n countNodes,\n getLeafNodes,\n treeToFlatList,\n extractJson,\n printTree,\n} from \"./tree-utils.js\";\nexport {\n countTokens,\n extractPages,\n pagesToTaggedText,\n groupPages,\n} from \"./pdf-parser.js\";\nexport {\n structureExtractionPrompt,\n structureContinuePrompt,\n retrievalPrompt,\n} from \"./prompts.js\";\nexport type { Page, TreeNode, IndexData, Stats } from \"./types.js\";\n","/**\n * Document loaders for multiple file formats.\n *\n * Each loader returns a list of objects: [{page_num, text, token_count}]\n * matching the format used by pdf-parser extractPages().\n */\n\nimport { countTokens } from \"./pdf-parser.js\";\nimport type { Page } from \"./types.js\";\n\n/** Split plain text into synthetic pages by character count. */\nexport function textToPages(\n text: string,\n charsPerPage: number = 3000,\n): Page[] {\n const pages: Page[] = [];\n for (let i = 0; i < text.length; i += charsPerPage) {\n const chunk = text.slice(i, i + charsPerPage);\n pages.push({\n page_num: pages.length,\n text: chunk,\n token_count: countTokens(chunk),\n });\n }\n return pages;\n}\n\n/** Load PDF files using pdfjs-dist. */\nexport class PDFLoader {\n async load(path: string): Promise<Page[]> {\n const { extractPages } = await import(\"./pdf-parser.js\");\n return extractPages(path);\n }\n}\n\n/** Load plain text or markdown files. */\nexport class TextLoader {\n readonly charsPerPage: number;\n\n constructor(charsPerPage: number = 3000) {\n this.charsPerPage = charsPerPage;\n }\n\n async load(path: string): Promise<Page[]> {\n const fs = await import(\"node:fs/promises\");\n const text = await fs.readFile(path, \"utf-8\");\n return textToPages(text, this.charsPerPage);\n }\n}\n\n/** Load HTML files, stripping tags to plain text. */\nexport class HTMLLoader {\n readonly charsPerPage: number;\n\n constructor(charsPerPage: number = 3000) {\n this.charsPerPage = charsPerPage;\n }\n\n async load(path: string): Promise<Page[]> {\n const fs = await import(\"node:fs/promises\");\n const html = await fs.readFile(path, \"utf-8\");\n const text = await this.stripHtml(html);\n return textToPages(text, this.charsPerPage);\n }\n\n private async stripHtml(html: string): Promise<string> {\n try {\n // Try htmlparser2 if available\n // @ts-expect-error -- optional peer dependency\n const { Parser } = await import(\"htmlparser2\");\n return new Promise((resolve) => {\n const parts: string[] = [];\n let skip = false;\n\n const parser = new Parser({\n onopentag(name: string) {\n if (name === \"script\" || name === \"style\") skip = true;\n },\n onclosetag(name: string) {\n if (name === \"script\" || name === \"style\") skip = false;\n if (\n [\"p\", \"div\", \"br\", \"h1\", \"h2\", \"h3\", \"h4\", \"h5\", \"h6\", \"li\", \"tr\"].includes(name)\n ) {\n parts.push(\"\\n\");\n }\n },\n ontext(data: string) {\n if (!skip) parts.push(data);\n },\n onend() {\n const raw = parts.join(\"\");\n resolve(raw.replace(/\\n{3,}/g, \"\\n\\n\").trim());\n },\n });\n\n parser.write(html);\n parser.end();\n });\n } catch {\n // Fallback: simple regex-based tag stripping\n return html\n .replace(/<script[^>]*>[\\s\\S]*?<\\/script>/gi, \"\")\n .replace(/<style[^>]*>[\\s\\S]*?<\\/style>/gi, \"\")\n .replace(/<[^>]+>/g, \" \")\n .replace(/\\s+/g, \" \")\n .trim();\n }\n }\n}\n\n/** Load DOCX files using mammoth. */\nexport class DOCXLoader {\n readonly charsPerPage: number;\n\n constructor(charsPerPage: number = 3000) {\n this.charsPerPage = charsPerPage;\n }\n\n async load(path: string): Promise<Page[]> {\n const fs = await import(\"node:fs/promises\");\n // @ts-expect-error -- optional peer dependency\n const mammoth = await import(\"mammoth\");\n const buffer = await fs.readFile(path);\n const result = await mammoth.extractRawText({ buffer });\n return textToPages(result.value, this.charsPerPage);\n }\n}\n\ninterface Loader {\n load(path: string): Promise<Page[]>;\n}\n\nconst EXTENSION_MAP: Record<string, { new (): Loader }> = {\n \".pdf\": PDFLoader,\n \".txt\": TextLoader,\n \".md\": TextLoader,\n \".html\": HTMLLoader,\n \".htm\": HTMLLoader,\n \".docx\": DOCXLoader,\n};\n\n/** Auto-detect file format and load pages. */\nexport async function autoLoader(filePath: string): Promise<Page[]> {\n const { extname } = await import(\"node:path\");\n const ext = extname(filePath).toLowerCase();\n const LoaderClass = EXTENSION_MAP[ext];\n if (!LoaderClass) {\n const supported = Object.keys(EXTENSION_MAP).join(\", \");\n throw new Error(\n `Unsupported file extension '${ext}'. Supported: ${supported}`,\n );\n }\n const loader = new LoaderClass();\n return loader.load(filePath);\n}\n","/** TreeDex: Tree-based document RAG framework. */\n\nimport { autoLoader } from \"./loaders.js\";\nimport { groupPages } from \"./pdf-parser.js\";\nimport {\n assignNodeIds,\n assignPageRanges,\n embedTextInTree,\n findLargeNodes,\n listToTree,\n} from \"./tree-builder.js\";\nimport {\n collectNodeTexts,\n countNodes,\n createNodeMapping,\n extractJson,\n getLeafNodes,\n printTree,\n stripTextFromTree,\n} from \"./tree-utils.js\";\nimport {\n structureExtractionPrompt,\n structureContinuePrompt,\n retrievalPrompt,\n} from \"./prompts.js\";\nimport type { Page, TreeNode, IndexData, Stats } from \"./types.js\";\nimport type { BaseLLM } from \"./llm-backends.js\";\n\n/** Result of a TreeDex query. */\nexport class QueryResult {\n readonly context: string;\n readonly nodeIds: string[];\n readonly pageRanges: [number, number][];\n readonly reasoning: string;\n\n constructor(\n context: string,\n nodeIds: string[],\n pageRanges: [number, number][],\n reasoning: string,\n ) {\n this.context = context;\n this.nodeIds = nodeIds;\n this.pageRanges = pageRanges;\n this.reasoning = reasoning;\n }\n\n /** Human-readable page ranges like 'pages 5-8, 12-15'. */\n get pagesStr(): string {\n if (this.pageRanges.length === 0) return \"no pages\";\n const parts: string[] = [];\n for (const [start, end] of this.pageRanges) {\n if (start === end) {\n parts.push(String(start + 1));\n } else {\n parts.push(`${start + 1}-${end + 1}`);\n }\n }\n return \"pages \" + parts.join(\", \");\n }\n\n toString(): string {\n return (\n `QueryResult(nodes=${JSON.stringify(this.nodeIds)}, ${this.pagesStr}, ` +\n `context_len=${this.context.length})`\n );\n }\n}\n\n/** Tree-based document index for RAG retrieval. */\nexport class TreeDex {\n readonly tree: TreeNode[];\n readonly pages: Page[];\n llm: BaseLLM | null;\n private _nodeMap: Record<string, TreeNode>;\n\n constructor(tree: TreeNode[], pages: Page[], llm: BaseLLM | null = null) {\n this.tree = tree;\n this.pages = pages;\n this.llm = llm;\n this._nodeMap = createNodeMapping(tree);\n }\n\n /**\n * Build a TreeDex index from a file.\n *\n * @param path - Path to document (PDF, TXT, HTML, DOCX)\n * @param llm - LLM backend with .generate(prompt) method\n * @param options - Optional configuration\n */\n static async fromFile(\n path: string,\n llm: BaseLLM,\n options?: {\n loader?: { load(path: string): Promise<Page[]> };\n maxTokens?: number;\n overlap?: number;\n verbose?: boolean;\n },\n ): Promise<TreeDex> {\n const {\n loader,\n maxTokens = 20000,\n overlap = 1,\n verbose = true,\n } = options ?? {};\n\n if (verbose) {\n const { basename } = await import(\"node:path\");\n console.log(`Loading: ${basename(path)}`);\n }\n\n let pages: Page[];\n if (loader) {\n pages = await loader.load(path);\n } else {\n pages = await autoLoader(path);\n }\n\n if (verbose) {\n const totalTokens = pages.reduce((s, p) => s + p.token_count, 0);\n console.log(` ${pages.length} pages, ${totalTokens.toLocaleString()} tokens`);\n }\n\n return TreeDex.fromPages(pages, llm, { maxTokens, overlap, verbose });\n }\n\n /** Build a TreeDex index from pre-extracted pages. */\n static async fromPages(\n pages: Page[],\n llm: BaseLLM,\n options?: {\n maxTokens?: number;\n overlap?: number;\n verbose?: boolean;\n },\n ): Promise<TreeDex> {\n const { maxTokens = 20000, overlap = 1, verbose = true } = options ?? {};\n\n const groups = groupPages(pages, maxTokens, overlap);\n\n if (verbose) {\n console.log(` ${groups.length} page group(s) for structure extraction`);\n }\n\n const allSections: Array<{\n structure: string;\n title: string;\n physical_index: number;\n }> = [];\n\n for (let i = 0; i < groups.length; i++) {\n if (verbose) {\n console.log(\n ` Extracting structure from group ${i + 1}/${groups.length}...`,\n );\n }\n\n let prompt: string;\n if (i === 0) {\n prompt = structureExtractionPrompt(groups[i]);\n } else {\n const prevJson = JSON.stringify(allSections, null, 2);\n prompt = structureContinuePrompt(prevJson, groups[i]);\n }\n\n const response = await llm.generate(prompt);\n const sections = extractJson(response);\n\n if (Array.isArray(sections)) {\n allSections.push(\n ...(sections as Array<{\n structure: string;\n title: string;\n physical_index: number;\n }>),\n );\n } else if (\n sections !== null &&\n typeof sections === \"object\" &&\n \"sections\" in (sections as Record<string, unknown>)\n ) {\n allSections.push(\n ...((sections as { sections: Array<{ structure: string; title: string; physical_index: number }> }).sections),\n );\n }\n }\n\n if (verbose) {\n console.log(` Extracted ${allSections.length} sections`);\n }\n\n // Build tree\n const tree = listToTree(allSections);\n assignPageRanges(tree, pages.length);\n assignNodeIds(tree);\n embedTextInTree(tree, pages);\n\n if (verbose) {\n console.log(` Tree: ${countNodes(tree)} nodes`);\n }\n\n return new TreeDex(tree, pages, llm);\n }\n\n /** Create a TreeDex from an existing tree and pages. */\n static fromTree(\n tree: TreeNode[],\n pages: Page[],\n llm: BaseLLM | null = null,\n ): TreeDex {\n return new TreeDex(tree, pages, llm);\n }\n\n /**\n * Query the index and return relevant context.\n *\n * @param question - The user's question\n * @param llm - Optional LLM override. Uses this.llm if not provided.\n */\n async query(question: string, llm?: BaseLLM): Promise<QueryResult> {\n const activeLlm = llm ?? this.llm;\n if (!activeLlm) {\n throw new Error(\n \"No LLM provided. Pass llm to query() or TreeDex constructor.\",\n );\n }\n\n // Build lightweight tree structure for the prompt\n const stripped = stripTextFromTree(this.tree);\n const treeJson = JSON.stringify(stripped, null, 2);\n\n const prompt = retrievalPrompt(treeJson, question);\n const response = await activeLlm.generate(prompt);\n const result = extractJson(response) as {\n node_ids?: string[];\n reasoning?: string;\n };\n\n const nodeIds = result.node_ids ?? [];\n const reasoning = result.reasoning ?? \"\";\n\n // Collect context text and page ranges\n const context = collectNodeTexts(nodeIds, this._nodeMap);\n\n const pageRanges: [number, number][] = [];\n for (const nid of nodeIds) {\n const node = this._nodeMap[nid];\n if (node) {\n const start = node.start_index ?? 0;\n const end = node.end_index ?? 0;\n pageRanges.push([start, end]);\n }\n }\n\n return new QueryResult(context, nodeIds, pageRanges, reasoning);\n }\n\n /** Save the index to a JSON file. */\n async save(path: string): Promise<string> {\n const fs = await import(\"node:fs/promises\");\n const stripped = stripTextFromTree(this.tree);\n\n const data: IndexData = {\n version: \"1.0\",\n framework: \"TreeDex\",\n tree: stripped,\n pages: this.pages,\n };\n\n await fs.writeFile(path, JSON.stringify(data, null, 2), \"utf-8\");\n return path;\n }\n\n /** Load a TreeDex index from a JSON file. */\n static async load(path: string, llm?: BaseLLM | null): Promise<TreeDex> {\n const fs = await import(\"node:fs/promises\");\n const raw = await fs.readFile(path, \"utf-8\");\n const data = JSON.parse(raw) as IndexData;\n\n const tree = data.tree;\n const pages = data.pages;\n\n // Re-embed text from pages\n assignPageRanges(tree, pages.length);\n embedTextInTree(tree, pages);\n\n return new TreeDex(tree, pages, llm ?? null);\n }\n\n /** Pretty-print the tree structure. */\n showTree(): void {\n printTree(this.tree);\n }\n\n /** Return index statistics. */\n stats(): Stats {\n const totalTokens = this.pages.reduce((s, p) => s + p.token_count, 0);\n const leaves = getLeafNodes(this.tree);\n return {\n total_pages: this.pages.length,\n total_tokens: totalTokens,\n total_nodes: countNodes(this.tree),\n leaf_nodes: leaves.length,\n root_sections: this.tree.length,\n };\n }\n\n /** Find sections that exceed size thresholds. */\n findLargeSections(options?: {\n maxPages?: number;\n maxTokens?: number;\n }): TreeNode[] {\n return findLargeNodes(this.tree, {\n maxPages: options?.maxPages ?? 10,\n maxTokens: options?.maxTokens ?? 20000,\n pages: this.pages,\n });\n }\n}\n","/** Tree construction utilities. */\n\nimport type { TreeNode, Page } from \"./types.js\";\n\n/**\n * Convert a flat list with `structure` fields into a hierarchical tree.\n *\n * Each item must have a `structure` field like \"1\", \"1.1\", \"1.2.3\".\n * Parent of \"1.2.3\" is \"1.2\", parent of \"1.2\" is \"1\", \"1\" is root.\n * Output nodes get a `nodes: []` field for children.\n */\nexport function listToTree(\n flatList: Array<{\n structure: string;\n title: string;\n physical_index: number;\n [key: string]: unknown;\n }>,\n): TreeNode[] {\n const nodesByStructure: Record<string, TreeNode> = {};\n const roots: TreeNode[] = [];\n\n for (const item of flatList) {\n const node: TreeNode = { ...item, nodes: [] };\n const structure = node.structure;\n nodesByStructure[structure] = node;\n\n const parts = structure.split(\".\");\n if (parts.length === 1) {\n roots.push(node);\n } else {\n const parentStructure = parts.slice(0, -1).join(\".\");\n const parent = nodesByStructure[parentStructure];\n if (parent !== undefined) {\n parent.nodes.push(node);\n } else {\n roots.push(node);\n }\n }\n }\n\n return roots;\n}\n\nfunction assignRanges(nodes: TreeNode[], boundaryEnd: number): void {\n for (let i = 0; i < nodes.length; i++) {\n const node = nodes[i];\n node.start_index = node.physical_index ?? 0;\n\n if (i + 1 < nodes.length) {\n node.end_index = (nodes[i + 1].physical_index ?? 0) - 1;\n } else {\n node.end_index = boundaryEnd;\n }\n\n if (node.nodes.length > 0) {\n assignRanges(node.nodes, node.end_index);\n }\n }\n}\n\n/**\n * Set start_index and end_index on each node.\n *\n * - start_index = node's physical_index\n * - end_index = next sibling's physical_index - 1, or parent's end,\n * or total_pages - 1 for the last root node\n */\nexport function assignPageRanges(\n tree: TreeNode[],\n totalPages: number,\n): TreeNode[] {\n assignRanges(tree, totalPages - 1);\n return tree;\n}\n\n/** DFS traversal, assigns sequential IDs: '0001', '0002', etc. */\nexport function assignNodeIds(tree: TreeNode[]): TreeNode[] {\n let counter = 0;\n\n function walk(nodes: TreeNode[]): void {\n for (const node of nodes) {\n counter++;\n node.node_id = String(counter).padStart(4, \"0\");\n walk(node.nodes);\n }\n }\n\n walk(tree);\n return tree;\n}\n\n/** Return nodes that exceed page or token thresholds. */\nexport function findLargeNodes(\n tree: TreeNode[],\n options: {\n maxPages?: number;\n maxTokens?: number;\n pages?: Page[] | null;\n } = {},\n): TreeNode[] {\n const { maxPages = 10, maxTokens = 20000, pages = null } = options;\n const large: TreeNode[] = [];\n\n function walk(nodes: TreeNode[]): void {\n for (const node of nodes) {\n const start = node.start_index ?? 0;\n const end = node.end_index ?? 0;\n const pageCount = end - start + 1;\n\n let isLarge = pageCount > maxPages;\n\n if (!isLarge && pages !== null) {\n const tokenSum = pages\n .filter((p) => p.page_num >= start && p.page_num <= end)\n .reduce((sum, p) => sum + p.token_count, 0);\n isLarge = tokenSum > maxTokens;\n }\n\n if (isLarge) {\n large.push(node);\n }\n\n walk(node.nodes);\n }\n }\n\n walk(tree);\n return large;\n}\n\n/** Add `text` field to each node by concatenating page text for its range. */\nexport function embedTextInTree(\n tree: TreeNode[],\n pages: Page[],\n): TreeNode[] {\n function walk(nodes: TreeNode[]): void {\n for (const node of nodes) {\n const start = node.start_index ?? 0;\n const end = node.end_index ?? 0;\n node.text = pages\n .filter((p) => p.page_num >= start && p.page_num <= end)\n .map((p) => p.text)\n .join(\"\\n\");\n walk(node.nodes);\n }\n }\n\n walk(tree);\n return tree;\n}\n","/** Tree manipulation and utility functions. */\n\nimport type { TreeNode } from \"./types.js\";\n\n/** Flatten tree into {node_id: node_dict} for O(1) lookups. */\nexport function createNodeMapping(tree: TreeNode[]): Record<string, TreeNode> {\n const mapping: Record<string, TreeNode> = {};\n\n function walk(nodes: TreeNode[]): void {\n for (const node of nodes) {\n if (node.node_id !== undefined) {\n mapping[node.node_id] = node;\n }\n walk(node.nodes);\n }\n }\n\n walk(tree);\n return mapping;\n}\n\n/** Return a deep copy of the tree with all `text` fields removed. */\nexport function stripTextFromTree(tree: TreeNode[]): TreeNode[] {\n const stripped: TreeNode[] = JSON.parse(JSON.stringify(tree));\n\n function strip(nodes: TreeNode[]): void {\n for (const node of nodes) {\n delete node.text;\n strip(node.nodes);\n }\n }\n\n strip(stripped);\n return stripped;\n}\n\n/**\n * Gather and concatenate text from a list of node IDs.\n *\n * Format:\n * [Section: Title]\n * text\n *\n * [Section: Title2]\n * text2\n */\nexport function collectNodeTexts(\n nodeIds: string[],\n nodeMap: Record<string, TreeNode>,\n): string {\n const parts: string[] = [];\n for (const nid of nodeIds) {\n const node = nodeMap[nid];\n if (node === undefined) continue;\n const title = node.title ?? \"Untitled\";\n const structure = node.structure ?? \"\";\n const text = node.text ?? \"\";\n const header = structure ? `[${structure}: ${title}]` : `[${title}]`;\n parts.push(`${header}\\n${text}`);\n }\n return parts.join(\"\\n\\n\");\n}\n\n/** Recursively count total nodes in the tree. */\nexport function countNodes(tree: TreeNode[]): number {\n let total = 0;\n for (const node of tree) {\n total += 1;\n total += countNodes(node.nodes);\n }\n return total;\n}\n\n/** Return all nodes with empty `nodes` list. */\nexport function getLeafNodes(tree: TreeNode[]): TreeNode[] {\n const leaves: TreeNode[] = [];\n\n function walk(nodes: TreeNode[]): void {\n for (const node of nodes) {\n if (node.nodes.length === 0) {\n leaves.push(node);\n } else {\n walk(node.nodes);\n }\n }\n }\n\n walk(tree);\n return leaves;\n}\n\n/** Flatten hierarchy back to a list in DFS order. */\nexport function treeToFlatList(\n tree: TreeNode[],\n): Array<Omit<TreeNode, \"nodes\">> {\n const result: Array<Omit<TreeNode, \"nodes\">> = [];\n\n function walk(nodes: TreeNode[]): void {\n for (const node of nodes) {\n const { nodes: children, ...flat } = node;\n result.push(flat);\n walk(children);\n }\n }\n\n walk(tree);\n return result;\n}\n\n/**\n * Robust JSON extraction from LLM responses.\n *\n * Handles raw JSON, ```json code blocks, and minor formatting issues\n * like trailing commas.\n */\nexport function extractJson(text: string): unknown {\n // Try direct parse\n try {\n return JSON.parse(text);\n } catch {\n // continue\n }\n\n // Try code block\n const blockMatch = text.match(/```(?:json)?\\s*\\n?(.*?)```/s);\n if (blockMatch) {\n const block = blockMatch[1].trim();\n try {\n return JSON.parse(block);\n } catch {\n const cleaned = block.replace(/,\\s*([}\\]])/g, \"$1\");\n try {\n return JSON.parse(cleaned);\n } catch {\n // continue\n }\n }\n }\n\n // Try finding JSON by matching braces/brackets\n for (const [startChar, endChar] of [\n [\"{\", \"}\"],\n [\"[\", \"]\"],\n ] as const) {\n const start = text.indexOf(startChar);\n if (start === -1) continue;\n let depth = 0;\n for (let i = start; i < text.length; i++) {\n if (text[i] === startChar) depth++;\n else if (text[i] === endChar) {\n depth--;\n if (depth === 0) {\n const candidate = text.slice(start, i + 1);\n try {\n return JSON.parse(candidate);\n } catch {\n const cleaned = candidate.replace(/,\\s*([}\\]])/g, \"$1\");\n try {\n return JSON.parse(cleaned);\n } catch {\n break;\n }\n }\n }\n }\n }\n }\n\n throw new Error(\n `Could not extract JSON from text: ${text.slice(0, 200)}...`,\n );\n}\n\n/** Pretty-print tree structure for debugging. */\nexport function printTree(tree: TreeNode[], indent: number = 0): void {\n const prefix = \" \".repeat(indent);\n for (const node of tree) {\n const nodeId = node.node_id ?? \"????\";\n const structure = node.structure ?? \"\";\n const title = node.title ?? \"Untitled\";\n const start = node.start_index ?? \"?\";\n const end = node.end_index ?? \"?\";\n console.log(\n `${prefix}[${nodeId}] ${structure}: ${title} (pages ${start}-${end})`,\n );\n printTree(node.nodes, indent + 1);\n }\n}\n","/** Prompt templates for structure extraction and retrieval. */\n\nexport function structureExtractionPrompt(text: string): string {\n return `You are a document structure analyzer. Given the following document text with \\\nphysical page index tags, extract the hierarchical structure (table of contents).\n\nReturn a JSON list of objects, each with:\n- \"structure\": hierarchical numbering like \"1\", \"1.1\", \"1.2.3\"\n- \"title\": the section/chapter title\n- \"physical_index\": the page number (from the <physical_index_N> tag) where this section starts\n\nRules:\n- Use the physical_index tags to determine page numbers\n- Create a logical hierarchy: chapters -> sections -> subsections\n- Every section must have a unique structure ID\n- Return ONLY valid JSON — no extra text\n\nDocument text:\n${text}\n\nJSON output:\n`;\n}\n\nexport function structureContinuePrompt(\n previousStructure: string,\n text: string,\n): string {\n return `You are continuing to extract the hierarchical structure of a document.\n\nHere is the structure extracted so far:\n${previousStructure}\n\nNow extract the structure from the next portion of the document. \\\nContinue the numbering from where the previous structure left off. \\\nIf a section from the previous portion continues into this portion, \\\ndo NOT duplicate it.\n\nReturn a JSON list of NEW sections only (same format as before).\n\nDocument text:\n${text}\n\nJSON output:\n`;\n}\n\nexport function retrievalPrompt(\n treeStructure: string,\n query: string,\n): string {\n return `You are a document retrieval system. Given a document's tree structure and a \\\nuser query, select the most relevant sections that would contain the answer.\n\nDocument structure:\n${treeStructure}\n\nUser query: ${query}\n\nReturn a JSON object with:\n- \"node_ids\": list of node IDs (strings like \"0001\", \"0005\") that are most \\\nrelevant to the query\n- \"reasoning\": brief explanation of why these sections were selected\n\nSelect the smallest set of sections that fully covers the answer. \\\nPrefer leaf nodes over parent nodes when the leaf contains the specific content. \\\nReturn ONLY valid JSON.\n\nJSON output:\n`;\n}\n","/**\n * LLM backends for TreeDex.\n *\n * Hierarchy:\n * BaseLLM — abstract base, subclass for custom LLMs\n * ├── GeminiLLM — Google Gemini (lazy SDK)\n * ├── OpenAILLM — OpenAI (lazy SDK)\n * ├── ClaudeLLM — Anthropic Claude (lazy SDK)\n * ├── MistralLLM — Mistral AI (lazy SDK)\n * ├── CohereLLM — Cohere (lazy SDK)\n * ├── OpenAICompatibleLLM — Any OpenAI-compatible endpoint (fetch)\n * │ ├── GroqLLM — Groq (pre-configured URL)\n * │ ├── TogetherLLM — Together AI (pre-configured URL)\n * │ ├── FireworksLLM — Fireworks AI (pre-configured URL)\n * │ ├── OpenRouterLLM — OpenRouter (pre-configured URL)\n * │ ├── DeepSeekLLM — DeepSeek (pre-configured URL)\n * │ ├── CerebrasLLM — Cerebras (pre-configured URL)\n * │ └── SambanovaLLM — SambaNova (pre-configured URL)\n * ├── HuggingFaceLLM — HuggingFace Inference API (fetch)\n * ├── OllamaLLM — Ollama native /api/generate (fetch)\n * ├── LiteLLM — litellm wrapper (100+ providers)\n * └── FunctionLLM — Wrap any callable(str) -> str\n *\n * Named providers lazy-import their SDKs.\n * OpenAICompatibleLLM, HuggingFaceLLM, OllamaLLM use only fetch.\n */\n\n// ---------------------------------------------------------------------------\n// Base\n// ---------------------------------------------------------------------------\n\n/** Base class for all LLM backends. Subclass and implement generate(). */\nexport abstract class BaseLLM {\n abstract generate(prompt: string): Promise<string>;\n\n toString(): string {\n return `${this.constructor.name}()`;\n }\n}\n\n// ---------------------------------------------------------------------------\n// Named SDK providers (lazy imports)\n// ---------------------------------------------------------------------------\n\n/** Google Gemini via @google/generative-ai SDK. */\nexport class GeminiLLM extends BaseLLM {\n readonly apiKey: string;\n readonly modelName: string;\n private _client: unknown = null;\n\n constructor(apiKey: string, model: string = \"gemini-2.0-flash\") {\n super();\n this.apiKey = apiKey;\n this.modelName = model;\n }\n\n private async getClient(): Promise<unknown> {\n if (this._client === null) {\n // @ts-expect-error -- optional peer dependency\n const { GoogleGenerativeAI } = await import(\"@google/generative-ai\");\n const genai = new GoogleGenerativeAI(this.apiKey);\n this._client = genai.getGenerativeModel({ model: this.modelName });\n }\n return this._client;\n }\n\n async generate(prompt: string): Promise<string> {\n const model = await this.getClient() as { generateContent(p: string): Promise<{ response: { text(): string } }> };\n const response = await model.generateContent(prompt);\n return response.response.text();\n }\n\n toString(): string {\n return `GeminiLLM(model=${JSON.stringify(this.modelName)})`;\n }\n}\n\n/** OpenAI via openai SDK. */\nexport class OpenAILLM extends BaseLLM {\n readonly apiKey: string;\n readonly modelName: string;\n private _client: unknown = null;\n\n constructor(apiKey: string, model: string = \"gpt-4o\") {\n super();\n this.apiKey = apiKey;\n this.modelName = model;\n }\n\n private async getClient(): Promise<unknown> {\n if (this._client === null) {\n // @ts-expect-error -- optional peer dependency\n const { default: OpenAI } = await import(\"openai\");\n this._client = new OpenAI({ apiKey: this.apiKey });\n }\n return this._client;\n }\n\n async generate(prompt: string): Promise<string> {\n const client = await this.getClient() as {\n chat: {\n completions: {\n create(opts: unknown): Promise<{\n choices: Array<{ message: { content: string } }>;\n }>;\n };\n };\n };\n const response = await client.chat.completions.create({\n model: this.modelName,\n messages: [{ role: \"user\", content: prompt }],\n });\n return response.choices[0].message.content;\n }\n\n toString(): string {\n return `OpenAILLM(model=${JSON.stringify(this.modelName)})`;\n }\n}\n\n/** Anthropic Claude via @anthropic-ai/sdk. */\nexport class ClaudeLLM extends BaseLLM {\n readonly apiKey: string;\n readonly modelName: string;\n private _client: unknown = null;\n\n constructor(apiKey: string, model: string = \"claude-sonnet-4-20250514\") {\n super();\n this.apiKey = apiKey;\n this.modelName = model;\n }\n\n private async getClient(): Promise<unknown> {\n if (this._client === null) {\n // @ts-expect-error -- optional peer dependency\n const { default: Anthropic } = await import(\"@anthropic-ai/sdk\");\n this._client = new Anthropic({ apiKey: this.apiKey });\n }\n return this._client;\n }\n\n async generate(prompt: string): Promise<string> {\n const client = await this.getClient() as {\n messages: {\n create(opts: unknown): Promise<{\n content: Array<{ text: string }>;\n }>;\n };\n };\n const response = await client.messages.create({\n model: this.modelName,\n max_tokens: 4096,\n messages: [{ role: \"user\", content: prompt }],\n });\n return response.content[0].text;\n }\n\n toString(): string {\n return `ClaudeLLM(model=${JSON.stringify(this.modelName)})`;\n }\n}\n\n/** Mistral AI via @mistralai/mistralai SDK. */\nexport class MistralLLM extends BaseLLM {\n readonly apiKey: string;\n readonly modelName: string;\n private _client: unknown = null;\n\n constructor(apiKey: string, model: string = \"mistral-large-latest\") {\n super();\n this.apiKey = apiKey;\n this.modelName = model;\n }\n\n private async getClient(): Promise<unknown> {\n if (this._client === null) {\n // @ts-expect-error -- optional peer dependency\n const { Mistral } = await import(\"@mistralai/mistralai\");\n this._client = new Mistral({ apiKey: this.apiKey });\n }\n return this._client;\n }\n\n async generate(prompt: string): Promise<string> {\n const client = await this.getClient() as {\n chat: {\n complete(opts: unknown): Promise<{\n choices: Array<{ message: { content: string } }>;\n }>;\n };\n };\n const response = await client.chat.complete({\n model: this.modelName,\n messages: [{ role: \"user\", content: prompt }],\n });\n return response.choices[0].message.content;\n }\n\n toString(): string {\n return `MistralLLM(model=${JSON.stringify(this.modelName)})`;\n }\n}\n\n/** Cohere via cohere-ai SDK. */\nexport class CohereLLM extends BaseLLM {\n readonly apiKey: string;\n readonly modelName: string;\n private _client: unknown = null;\n\n constructor(apiKey: string, model: string = \"command-r-plus\") {\n super();\n this.apiKey = apiKey;\n this.modelName = model;\n }\n\n private async getClient(): Promise<unknown> {\n if (this._client === null) {\n // @ts-expect-error -- optional peer dependency\n const { CohereClientV2 } = await import(\"cohere-ai\");\n this._client = new CohereClientV2({ token: this.apiKey });\n }\n return this._client;\n }\n\n async generate(prompt: string): Promise<string> {\n const client = await this.getClient() as {\n chat(opts: unknown): Promise<{\n message: { content: Array<{ text: string }> };\n }>;\n };\n const response = await client.chat({\n model: this.modelName,\n messages: [{ role: \"user\", content: prompt }],\n });\n return response.message.content[0].text;\n }\n\n toString(): string {\n return `CohereLLM(model=${JSON.stringify(this.modelName)})`;\n }\n}\n\n// ---------------------------------------------------------------------------\n// OpenAI-compatible (fetch only) + convenience wrappers\n// ---------------------------------------------------------------------------\n\n/**\n * Universal backend for any OpenAI-compatible API endpoint.\n *\n * Works with: Groq, Together AI, Fireworks, vLLM, LM Studio,\n * OpenRouter, DeepSeek, Cerebras, SambaNova, Ollama (OpenAI mode),\n * and any other compatible service.\n */\nexport class OpenAICompatibleLLM extends BaseLLM {\n baseUrl: string;\n readonly model: string;\n readonly apiKey: string | null;\n readonly maxTokens: number;\n readonly temperature: number;\n readonly extraHeaders: Record<string, string>;\n\n constructor(options: {\n baseUrl: string;\n model: string;\n apiKey?: string | null;\n maxTokens?: number;\n temperature?: number;\n extraHeaders?: Record<string, string>;\n }) {\n super();\n this.baseUrl = options.baseUrl.replace(/\\/+$/, \"\");\n this.model = options.model;\n this.apiKey = options.apiKey ?? null;\n this.maxTokens = options.maxTokens ?? 4096;\n this.temperature = options.temperature ?? 0.0;\n this.extraHeaders = options.extraHeaders ?? {};\n }\n\n private buildHeaders(): Record<string, string> {\n const headers: Record<string, string> = {\n \"Content-Type\": \"application/json\",\n \"User-Agent\": \"TreeDex/0.1\",\n };\n if (this.apiKey) {\n headers[\"Authorization\"] = `Bearer ${this.apiKey}`;\n }\n Object.assign(headers, this.extraHeaders);\n return headers;\n }\n\n async generate(prompt: string): Promise<string> {\n const url = `${this.baseUrl}/chat/completions`;\n\n const payload = {\n model: this.model,\n messages: [{ role: \"user\", content: prompt }],\n max_tokens: this.maxTokens,\n temperature: this.temperature,\n };\n\n const resp = await fetch(url, {\n method: \"POST\",\n headers: this.buildHeaders(),\n body: JSON.stringify(payload),\n signal: AbortSignal.timeout(120_000),\n });\n\n if (!resp.ok) {\n const errorBody = await resp.text();\n throw new Error(\n `API request failed (${resp.status}): ${errorBody}`,\n );\n }\n\n const body = (await resp.json()) as {\n choices: Array<{ message: { content: string } }>;\n };\n return body.choices[0].message.content;\n }\n\n toString(): string {\n return `OpenAICompatibleLLM(baseUrl=${JSON.stringify(this.baseUrl)}, model=${JSON.stringify(this.model)})`;\n }\n}\n\n/** Groq — fast LLM inference via groq SDK. */\nexport class GroqLLM extends BaseLLM {\n readonly apiKey: string;\n readonly model: string;\n private _client: unknown = null;\n\n constructor(apiKey: string, model: string = \"llama-3.3-70b-versatile\") {\n super();\n this.apiKey = apiKey;\n this.model = model;\n }\n\n private async getClient(): Promise<unknown> {\n if (this._client === null) {\n // @ts-expect-error -- optional peer dependency\n const { default: Groq } = await import(\"groq-sdk\");\n this._client = new Groq({ apiKey: this.apiKey });\n }\n return this._client;\n }\n\n async generate(prompt: string): Promise<string> {\n const client = await this.getClient() as {\n chat: {\n completions: {\n create(opts: unknown): Promise<{\n choices: Array<{ message: { content: string } }>;\n }>;\n };\n };\n };\n const response = await client.chat.completions.create({\n model: this.model,\n messages: [{ role: \"user\", content: prompt }],\n });\n return response.choices[0].message.content;\n }\n\n toString(): string {\n return `GroqLLM(model=${JSON.stringify(this.model)})`;\n }\n}\n\n/** Together AI — open-source models. Zero SDK dependencies. */\nexport class TogetherLLM extends OpenAICompatibleLLM {\n constructor(\n apiKey: string,\n model: string = \"meta-llama/Llama-3-70b-chat-hf\",\n options?: { maxTokens?: number; temperature?: number },\n ) {\n super({\n baseUrl: \"https://api.together.xyz/v1\",\n model,\n apiKey,\n ...options,\n });\n }\n\n toString(): string {\n return `TogetherLLM(model=${JSON.stringify(this.model)})`;\n }\n}\n\n/** Fireworks AI — fast open-source inference. Zero SDK dependencies. */\nexport class FireworksLLM extends OpenAICompatibleLLM {\n constructor(\n apiKey: string,\n model: string = \"accounts/fireworks/models/llama-v3p1-70b-instruct\",\n options?: { maxTokens?: number; temperature?: number },\n ) {\n super({\n baseUrl: \"https://api.fireworks.ai/inference/v1\",\n model,\n apiKey,\n ...options,\n });\n }\n\n toString(): string {\n return `FireworksLLM(model=${JSON.stringify(this.model)})`;\n }\n}\n\n/** OpenRouter — access any model via one API. Zero SDK dependencies. */\nexport class OpenRouterLLM extends OpenAICompatibleLLM {\n constructor(\n apiKey: string,\n model: string = \"anthropic/claude-sonnet-4\",\n options?: { maxTokens?: number; temperature?: number },\n ) {\n super({\n baseUrl: \"https://openrouter.ai/api/v1\",\n model,\n apiKey,\n ...options,\n });\n }\n\n toString(): string {\n return `OpenRouterLLM(model=${JSON.stringify(this.model)})`;\n }\n}\n\n/** DeepSeek — powerful reasoning models. Zero SDK dependencies. */\nexport class DeepSeekLLM extends OpenAICompatibleLLM {\n constructor(\n apiKey: string,\n model: string = \"deepseek-chat\",\n options?: { maxTokens?: number; temperature?: number },\n ) {\n super({\n baseUrl: \"https://api.deepseek.com/v1\",\n model,\n apiKey,\n ...options,\n });\n }\n\n toString(): string {\n return `DeepSeekLLM(model=${JSON.stringify(this.model)})`;\n }\n}\n\n/** Cerebras — ultra-fast inference. Zero SDK dependencies. */\nexport class CerebrasLLM extends OpenAICompatibleLLM {\n constructor(\n apiKey: string,\n model: string = \"llama-3.3-70b\",\n options?: { maxTokens?: number; temperature?: number },\n ) {\n super({\n baseUrl: \"https://api.cerebras.ai/v1\",\n model,\n apiKey,\n ...options,\n });\n }\n\n toString(): string {\n return `CerebrasLLM(model=${JSON.stringify(this.model)})`;\n }\n}\n\n/** SambaNova — fast AI inference. Zero SDK dependencies. */\nexport class SambanovaLLM extends OpenAICompatibleLLM {\n constructor(\n apiKey: string,\n model: string = \"Meta-Llama-3.1-70B-Instruct\",\n options?: { maxTokens?: number; temperature?: number },\n ) {\n super({\n baseUrl: \"https://api.sambanova.ai/v1\",\n model,\n apiKey,\n ...options,\n });\n }\n\n toString(): string {\n return `SambanovaLLM(model=${JSON.stringify(this.model)})`;\n }\n}\n\n// ---------------------------------------------------------------------------\n// HuggingFace Inference API (fetch only)\n// ---------------------------------------------------------------------------\n\n/** HuggingFace Inference API. Zero SDK dependencies. */\nexport class HuggingFaceLLM extends BaseLLM {\n readonly apiKey: string;\n readonly model: string;\n readonly maxTokens: number;\n\n constructor(\n apiKey: string,\n model: string = \"mistralai/Mistral-7B-Instruct-v0.3\",\n maxTokens: number = 4096,\n ) {\n super();\n this.apiKey = apiKey;\n this.model = model;\n this.maxTokens = maxTokens;\n }\n\n async generate(prompt: string): Promise<string> {\n const url = `https://api-inference.huggingface.co/models/${this.model}/v1/chat/completions`;\n\n const payload = {\n model: this.model,\n messages: [{ role: \"user\", content: prompt }],\n max_tokens: this.maxTokens,\n };\n\n const resp = await fetch(url, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n \"User-Agent\": \"TreeDex/0.1\",\n Authorization: `Bearer ${this.apiKey}`,\n },\n body: JSON.stringify(payload),\n signal: AbortSignal.timeout(120_000),\n });\n\n if (!resp.ok) {\n const errorBody = await resp.text();\n throw new Error(\n `HuggingFace request failed (${resp.status}): ${errorBody}`,\n );\n }\n\n const body = (await resp.json()) as {\n choices: Array<{ message: { content: string } }>;\n };\n return body.choices[0].message.content;\n }\n\n toString(): string {\n return `HuggingFaceLLM(model=${JSON.stringify(this.model)})`;\n }\n}\n\n// ---------------------------------------------------------------------------\n// Ollama native\n// ---------------------------------------------------------------------------\n\n/** Ollama native backend using /api/generate endpoint. */\nexport class OllamaLLM extends BaseLLM {\n readonly model: string;\n baseUrl: string;\n\n constructor(\n model: string = \"llama3\",\n baseUrl: string = \"http://localhost:11434\",\n ) {\n super();\n this.model = model;\n this.baseUrl = baseUrl.replace(/\\/+$/, \"\");\n }\n\n async generate(prompt: string): Promise<string> {\n const url = `${this.baseUrl}/api/generate`;\n\n const payload = {\n model: this.model,\n prompt,\n stream: false,\n };\n\n const resp = await fetch(url, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n \"User-Agent\": \"TreeDex/0.1\",\n },\n body: JSON.stringify(payload),\n signal: AbortSignal.timeout(120_000),\n });\n\n if (!resp.ok) {\n const errorBody = await resp.text();\n throw new Error(\n `Ollama request failed (${resp.status}): ${errorBody}`,\n );\n }\n\n const body = (await resp.json()) as { response: string };\n return body.response;\n }\n\n toString(): string {\n return `OllamaLLM(model=${JSON.stringify(this.model)})`;\n }\n}\n\n// ---------------------------------------------------------------------------\n// FunctionLLM — wrap any callable\n// ---------------------------------------------------------------------------\n\n/** Wrap any async or sync function as an LLM backend. */\nexport class FunctionLLM extends BaseLLM {\n private readonly _fn: (prompt: string) => string | Promise<string>;\n\n constructor(fn: (prompt: string) => string | Promise<string>) {\n super();\n if (typeof fn !== \"function\") {\n throw new TypeError(`Expected a function, got ${typeof fn}`);\n }\n this._fn = fn;\n }\n\n async generate(prompt: string): Promise<string> {\n const result = await this._fn(prompt);\n if (typeof result !== \"string\") {\n throw new TypeError(\n `LLM function must return string, got ${typeof result}`,\n );\n }\n return result;\n }\n\n toString(): string {\n const name = this._fn.name || \"anonymous\";\n return `FunctionLLM(fn=${name})`;\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAMO,SAAS,YAAY,MAAsB;AAChD,aAAO,6BAAO,IAAI,EAAE;AACtB;AAOA,eAAsB,aAAa,SAAkC;AACnE,QAAM,KAAK,MAAM,OAAO,aAAkB;AAC1C,QAAM,QAAQ,MAAM,OAAO,iCAAiC;AAE5D,QAAM,OAAO,MAAM,GAAG,SAAS,OAAO;AACtC,QAAM,MAAM,MAAM,MAAM,YAAY,EAAE,KAAK,CAAC,EAAE;AAE9C,QAAM,QAAgB,CAAC;AACvB,WAAS,IAAI,GAAG,IAAI,IAAI,UAAU,KAAK;AACrC,UAAM,OAAO,MAAM,IAAI,QAAQ,IAAI,CAAC;AACpC,UAAM,UAAU,MAAM,KAAK,eAAe;AAC1C,UAAM,OAAO,QAAQ,MAClB,IAAI,CAAC,SAAkB;AACtB,YAAM,MAAM;AACZ,aAAO,OAAO,IAAI,QAAQ,WAAW,IAAI,MAAM;AAAA,IACjD,CAAC,EACA,KAAK,GAAG;AACX,UAAM,KAAK;AAAA,MACT,UAAU;AAAA,MACV;AAAA,MACA,aAAa,YAAY,IAAI;AAAA,IAC/B,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AASO,SAAS,kBACd,OACA,OACA,KACQ;AACR,QAAM,QAAkB,CAAC;AACzB,aAAW,QAAQ,MAAM,MAAM,OAAO,MAAM,CAAC,GAAG;AAC9C,UAAM,IAAI,KAAK;AACf,UAAM,KAAK,mBAAmB,CAAC,IAAI,KAAK,IAAI,oBAAoB,CAAC,GAAG;AAAA,EACtE;AACA,SAAO,MAAM,KAAK,IAAI;AACxB;AAOO,SAAS,WACd,OACA,YAAoB,KACpB,UAAkB,GACR;AACV,QAAM,cAAc,MAAM,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,aAAa,CAAC;AAEnE,MAAI,eAAe,WAAW;AAC5B,WAAO,CAAC,kBAAkB,OAAO,GAAG,MAAM,SAAS,CAAC,CAAC;AAAA,EACvD;AAEA,QAAM,SAAmB,CAAC;AAC1B,MAAI,aAAa;AAEjB,SAAO,aAAa,MAAM,QAAQ;AAChC,QAAI,UAAU;AACd,QAAI,WAAW;AAEf,WAAO,WAAW,MAAM,QAAQ;AAC9B,YAAM,aAAa,MAAM,QAAQ,EAAE;AACnC,UAAI,UAAU,aAAa,aAAa,WAAW,YAAY;AAC7D,oBAAY;AACZ;AAAA,MACF;AACA,iBAAW;AACX,kBAAY;AAAA,IACd;AAEA,QAAI,YAAY,MAAM,QAAQ;AAC5B,iBAAW,MAAM,SAAS;AAAA,IAC5B;AAEA,eAAW,KAAK,IAAI,UAAU,MAAM,SAAS,CAAC;AAC9C,WAAO,KAAK,kBAAkB,OAAO,YAAY,QAAQ,CAAC;AAE1D,QAAI,YAAY,MAAM,SAAS,GAAG;AAChC;AAAA,IACF;AAEA,UAAM,YAAY,WAAW,IAAI;AACjC,iBAAa,KAAK,IAAI,WAAW,aAAa,CAAC;AAAA,EACjD;AAEA,SAAO;AACT;AA/GA,IAEA;AAFA;AAAA;AAAA;AAEA,2BAAuB;AAAA;AAAA;;;ACFvB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACOA;AAIO,SAAS,YACd,MACA,eAAuB,KACf;AACR,QAAM,QAAgB,CAAC;AACvB,WAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK,cAAc;AAClD,UAAM,QAAQ,KAAK,MAAM,GAAG,IAAI,YAAY;AAC5C,UAAM,KAAK;AAAA,MACT,UAAU,MAAM;AAAA,MAChB,MAAM;AAAA,MACN,aAAa,YAAY,KAAK;AAAA,IAChC,CAAC;AAAA,EACH;AACA,SAAO;AACT;AAGO,IAAM,YAAN,MAAgB;AAAA,EACrB,MAAM,KAAK,MAA+B;AACxC,UAAM,EAAE,cAAAA,cAAa,IAAI,MAAM;AAC/B,WAAOA,cAAa,IAAI;AAAA,EAC1B;AACF;AAGO,IAAM,aAAN,MAAiB;AAAA,EACb;AAAA,EAET,YAAY,eAAuB,KAAM;AACvC,SAAK,eAAe;AAAA,EACtB;AAAA,EAEA,MAAM,KAAK,MAA+B;AACxC,UAAM,KAAK,MAAM,OAAO,aAAkB;AAC1C,UAAM,OAAO,MAAM,GAAG,SAAS,MAAM,OAAO;AAC5C,WAAO,YAAY,MAAM,KAAK,YAAY;AAAA,EAC5C;AACF;AAGO,IAAM,aAAN,MAAiB;AAAA,EACb;AAAA,EAET,YAAY,eAAuB,KAAM;AACvC,SAAK,eAAe;AAAA,EACtB;AAAA,EAEA,MAAM,KAAK,MAA+B;AACxC,UAAM,KAAK,MAAM,OAAO,aAAkB;AAC1C,UAAM,OAAO,MAAM,GAAG,SAAS,MAAM,OAAO;AAC5C,UAAM,OAAO,MAAM,KAAK,UAAU,IAAI;AACtC,WAAO,YAAY,MAAM,KAAK,YAAY;AAAA,EAC5C;AAAA,EAEA,MAAc,UAAU,MAA+B;AACrD,QAAI;AAGF,YAAM,EAAE,OAAO,IAAI,MAAM,OAAO,aAAa;AAC7C,aAAO,IAAI,QAAQ,CAAC,YAAY;AAC9B,cAAM,QAAkB,CAAC;AACzB,YAAI,OAAO;AAEX,cAAM,SAAS,IAAI,OAAO;AAAA,UACxB,UAAU,MAAc;AACtB,gBAAI,SAAS,YAAY,SAAS,QAAS,QAAO;AAAA,UACpD;AAAA,UACA,WAAW,MAAc;AACvB,gBAAI,SAAS,YAAY,SAAS,QAAS,QAAO;AAClD,gBACE,CAAC,KAAK,OAAO,MAAM,MAAM,MAAM,MAAM,MAAM,MAAM,MAAM,MAAM,IAAI,EAAE,SAAS,IAAI,GAChF;AACA,oBAAM,KAAK,IAAI;AAAA,YACjB;AAAA,UACF;AAAA,UACA,OAAO,MAAc;AACnB,gBAAI,CAAC,KAAM,OAAM,KAAK,IAAI;AAAA,UAC5B;AAAA,UACA,QAAQ;AACN,kBAAM,MAAM,MAAM,KAAK,EAAE;AACzB,oBAAQ,IAAI,QAAQ,WAAW,MAAM,EAAE,KAAK,CAAC;AAAA,UAC/C;AAAA,QACF,CAAC;AAED,eAAO,MAAM,IAAI;AACjB,eAAO,IAAI;AAAA,MACb,CAAC;AAAA,IACH,QAAQ;AAEN,aAAO,KACJ,QAAQ,qCAAqC,EAAE,EAC/C,QAAQ,mCAAmC,EAAE,EAC7C,QAAQ,YAAY,GAAG,EACvB,QAAQ,QAAQ,GAAG,EACnB,KAAK;AAAA,IACV;AAAA,EACF;AACF;AAGO,IAAM,aAAN,MAAiB;AAAA,EACb;AAAA,EAET,YAAY,eAAuB,KAAM;AACvC,SAAK,eAAe;AAAA,EACtB;AAAA,EAEA,MAAM,KAAK,MAA+B;AACxC,UAAM,KAAK,MAAM,OAAO,aAAkB;AAE1C,UAAM,UAAU,MAAM,OAAO,SAAS;AACtC,UAAM,SAAS,MAAM,GAAG,SAAS,IAAI;AACrC,UAAM,SAAS,MAAM,QAAQ,eAAe,EAAE,OAAO,CAAC;AACtD,WAAO,YAAY,OAAO,OAAO,KAAK,YAAY;AAAA,EACpD;AACF;AAMA,IAAM,gBAAoD;AAAA,EACxD,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,SAAS;AACX;AAGA,eAAsB,WAAW,UAAmC;AAClE,QAAM,EAAE,QAAQ,IAAI,MAAM,OAAO,MAAW;AAC5C,QAAM,MAAM,QAAQ,QAAQ,EAAE,YAAY;AAC1C,QAAM,cAAc,cAAc,GAAG;AACrC,MAAI,CAAC,aAAa;AAChB,UAAM,YAAY,OAAO,KAAK,aAAa,EAAE,KAAK,IAAI;AACtD,UAAM,IAAI;AAAA,MACR,+BAA+B,GAAG,iBAAiB,SAAS;AAAA,IAC9D;AAAA,EACF;AACA,QAAM,SAAS,IAAI,YAAY;AAC/B,SAAO,OAAO,KAAK,QAAQ;AAC7B;;;ACvJA;;;ACQO,SAAS,WACd,UAMY;AACZ,QAAM,mBAA6C,CAAC;AACpD,QAAM,QAAoB,CAAC;AAE3B,aAAW,QAAQ,UAAU;AAC3B,UAAM,OAAiB,EAAE,GAAG,MAAM,OAAO,CAAC,EAAE;AAC5C,UAAM,YAAY,KAAK;AACvB,qBAAiB,SAAS,IAAI;AAE9B,UAAM,QAAQ,UAAU,MAAM,GAAG;AACjC,QAAI,MAAM,WAAW,GAAG;AACtB,YAAM,KAAK,IAAI;AAAA,IACjB,OAAO;AACL,YAAM,kBAAkB,MAAM,MAAM,GAAG,EAAE,EAAE,KAAK,GAAG;AACnD,YAAM,SAAS,iBAAiB,eAAe;AAC/C,UAAI,WAAW,QAAW;AACxB,eAAO,MAAM,KAAK,IAAI;AAAA,MACxB,OAAO;AACL,cAAM,KAAK,IAAI;AAAA,MACjB;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,aAAa,OAAmB,aAA2B;AAClE,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,UAAM,OAAO,MAAM,CAAC;AACpB,SAAK,cAAc,KAAK,kBAAkB;AAE1C,QAAI,IAAI,IAAI,MAAM,QAAQ;AACxB,WAAK,aAAa,MAAM,IAAI,CAAC,EAAE,kBAAkB,KAAK;AAAA,IACxD,OAAO;AACL,WAAK,YAAY;AAAA,IACnB;AAEA,QAAI,KAAK,MAAM,SAAS,GAAG;AACzB,mBAAa,KAAK,OAAO,KAAK,SAAS;AAAA,IACzC;AAAA,EACF;AACF;AASO,SAAS,iBACd,MACA,YACY;AACZ,eAAa,MAAM,aAAa,CAAC;AACjC,SAAO;AACT;AAGO,SAAS,cAAc,MAA8B;AAC1D,MAAI,UAAU;AAEd,WAAS,KAAK,OAAyB;AACrC,eAAW,QAAQ,OAAO;AACxB;AACA,WAAK,UAAU,OAAO,OAAO,EAAE,SAAS,GAAG,GAAG;AAC9C,WAAK,KAAK,KAAK;AAAA,IACjB;AAAA,EACF;AAEA,OAAK,IAAI;AACT,SAAO;AACT;AAGO,SAAS,eACd,MACA,UAII,CAAC,GACO;AACZ,QAAM,EAAE,WAAW,IAAI,YAAY,KAAO,QAAQ,KAAK,IAAI;AAC3D,QAAM,QAAoB,CAAC;AAE3B,WAAS,KAAK,OAAyB;AACrC,eAAW,QAAQ,OAAO;AACxB,YAAM,QAAQ,KAAK,eAAe;AAClC,YAAM,MAAM,KAAK,aAAa;AAC9B,YAAM,YAAY,MAAM,QAAQ;AAEhC,UAAI,UAAU,YAAY;AAE1B,UAAI,CAAC,WAAW,UAAU,MAAM;AAC9B,cAAM,WAAW,MACd,OAAO,CAAC,MAAM,EAAE,YAAY,SAAS,EAAE,YAAY,GAAG,EACtD,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,aAAa,CAAC;AAC5C,kBAAU,WAAW;AAAA,MACvB;AAEA,UAAI,SAAS;AACX,cAAM,KAAK,IAAI;AAAA,MACjB;AAEA,WAAK,KAAK,KAAK;AAAA,IACjB;AAAA,EACF;AAEA,OAAK,IAAI;AACT,SAAO;AACT;AAGO,SAAS,gBACd,MACA,OACY;AACZ,WAAS,KAAK,OAAyB;AACrC,eAAW,QAAQ,OAAO;AACxB,YAAM,QAAQ,KAAK,eAAe;AAClC,YAAM,MAAM,KAAK,aAAa;AAC9B,WAAK,OAAO,MACT,OAAO,CAAC,MAAM,EAAE,YAAY,SAAS,EAAE,YAAY,GAAG,EACtD,IAAI,CAAC,MAAM,EAAE,IAAI,EACjB,KAAK,IAAI;AACZ,WAAK,KAAK,KAAK;AAAA,IACjB;AAAA,EACF;AAEA,OAAK,IAAI;AACT,SAAO;AACT;;;ACjJO,SAAS,kBAAkB,MAA4C;AAC5E,QAAM,UAAoC,CAAC;AAE3C,WAAS,KAAK,OAAyB;AACrC,eAAW,QAAQ,OAAO;AACxB,UAAI,KAAK,YAAY,QAAW;AAC9B,gBAAQ,KAAK,OAAO,IAAI;AAAA,MAC1B;AACA,WAAK,KAAK,KAAK;AAAA,IACjB;AAAA,EACF;AAEA,OAAK,IAAI;AACT,SAAO;AACT;AAGO,SAAS,kBAAkB,MAA8B;AAC9D,QAAM,WAAuB,KAAK,MAAM,KAAK,UAAU,IAAI,CAAC;AAE5D,WAAS,MAAM,OAAyB;AACtC,eAAW,QAAQ,OAAO;AACxB,aAAO,KAAK;AACZ,YAAM,KAAK,KAAK;AAAA,IAClB;AAAA,EACF;AAEA,QAAM,QAAQ;AACd,SAAO;AACT;AAYO,SAAS,iBACd,SACA,SACQ;AACR,QAAM,QAAkB,CAAC;AACzB,aAAW,OAAO,SAAS;AACzB,UAAM,OAAO,QAAQ,GAAG;AACxB,QAAI,SAAS,OAAW;AACxB,UAAM,QAAQ,KAAK,SAAS;AAC5B,UAAM,YAAY,KAAK,aAAa;AACpC,UAAM,OAAO,KAAK,QAAQ;AAC1B,UAAM,SAAS,YAAY,IAAI,SAAS,KAAK,KAAK,MAAM,IAAI,KAAK;AACjE,UAAM,KAAK,GAAG,MAAM;AAAA,EAAK,IAAI,EAAE;AAAA,EACjC;AACA,SAAO,MAAM,KAAK,MAAM;AAC1B;AAGO,SAAS,WAAW,MAA0B;AACnD,MAAI,QAAQ;AACZ,aAAW,QAAQ,MAAM;AACvB,aAAS;AACT,aAAS,WAAW,KAAK,KAAK;AAAA,EAChC;AACA,SAAO;AACT;AAGO,SAAS,aAAa,MAA8B;AACzD,QAAM,SAAqB,CAAC;AAE5B,WAAS,KAAK,OAAyB;AACrC,eAAW,QAAQ,OAAO;AACxB,UAAI,KAAK,MAAM,WAAW,GAAG;AAC3B,eAAO,KAAK,IAAI;AAAA,MAClB,OAAO;AACL,aAAK,KAAK,KAAK;AAAA,MACjB;AAAA,IACF;AAAA,EACF;AAEA,OAAK,IAAI;AACT,SAAO;AACT;AAGO,SAAS,eACd,MACgC;AAChC,QAAM,SAAyC,CAAC;AAEhD,WAAS,KAAK,OAAyB;AACrC,eAAW,QAAQ,OAAO;AACxB,YAAM,EAAE,OAAO,UAAU,GAAG,KAAK,IAAI;AACrC,aAAO,KAAK,IAAI;AAChB,WAAK,QAAQ;AAAA,IACf;AAAA,EACF;AAEA,OAAK,IAAI;AACT,SAAO;AACT;AAQO,SAAS,YAAY,MAAuB;AAEjD,MAAI;AACF,WAAO,KAAK,MAAM,IAAI;AAAA,EACxB,QAAQ;AAAA,EAER;AAGA,QAAM,aAAa,KAAK,MAAM,6BAA6B;AAC3D,MAAI,YAAY;AACd,UAAM,QAAQ,WAAW,CAAC,EAAE,KAAK;AACjC,QAAI;AACF,aAAO,KAAK,MAAM,KAAK;AAAA,IACzB,QAAQ;AACN,YAAM,UAAU,MAAM,QAAQ,gBAAgB,IAAI;AAClD,UAAI;AACF,eAAO,KAAK,MAAM,OAAO;AAAA,MAC3B,QAAQ;AAAA,MAER;AAAA,IACF;AAAA,EACF;AAGA,aAAW,CAAC,WAAW,OAAO,KAAK;AAAA,IACjC,CAAC,KAAK,GAAG;AAAA,IACT,CAAC,KAAK,GAAG;AAAA,EACX,GAAY;AACV,UAAM,QAAQ,KAAK,QAAQ,SAAS;AACpC,QAAI,UAAU,GAAI;AAClB,QAAI,QAAQ;AACZ,aAAS,IAAI,OAAO,IAAI,KAAK,QAAQ,KAAK;AACxC,UAAI,KAAK,CAAC,MAAM,UAAW;AAAA,eAClB,KAAK,CAAC,MAAM,SAAS;AAC5B;AACA,YAAI,UAAU,GAAG;AACf,gBAAM,YAAY,KAAK,MAAM,OAAO,IAAI,CAAC;AACzC,cAAI;AACF,mBAAO,KAAK,MAAM,SAAS;AAAA,UAC7B,QAAQ;AACN,kBAAM,UAAU,UAAU,QAAQ,gBAAgB,IAAI;AACtD,gBAAI;AACF,qBAAO,KAAK,MAAM,OAAO;AAAA,YAC3B,QAAQ;AACN;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,IAAI;AAAA,IACR,qCAAqC,KAAK,MAAM,GAAG,GAAG,CAAC;AAAA,EACzD;AACF;AAGO,SAAS,UAAU,MAAkB,SAAiB,GAAS;AACpE,QAAM,SAAS,KAAK,OAAO,MAAM;AACjC,aAAW,QAAQ,MAAM;AACvB,UAAM,SAAS,KAAK,WAAW;AAC/B,UAAM,YAAY,KAAK,aAAa;AACpC,UAAM,QAAQ,KAAK,SAAS;AAC5B,UAAM,QAAQ,KAAK,eAAe;AAClC,UAAM,MAAM,KAAK,aAAa;AAC9B,YAAQ;AAAA,MACN,GAAG,MAAM,IAAI,MAAM,KAAK,SAAS,KAAK,KAAK,WAAW,KAAK,IAAI,GAAG;AAAA,IACpE;AACA,cAAU,KAAK,OAAO,SAAS,CAAC;AAAA,EAClC;AACF;;;ACzLO,SAAS,0BAA0B,MAAsB;AAC9D,SAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeP,IAAI;AAAA;AAAA;AAAA;AAIN;AAEO,SAAS,wBACd,mBACA,MACQ;AACR,SAAO;AAAA;AAAA;AAAA,EAGP,iBAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUjB,IAAI;AAAA;AAAA;AAAA;AAIN;AAEO,SAAS,gBACd,eACA,OACQ;AACR,SAAO;AAAA;AAAA;AAAA,EAIP,aAAa;AAAA;AAAA,cAED,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAanB;;;AHzCO,IAAM,cAAN,MAAkB;AAAA,EACd;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAET,YACE,SACA,SACA,YACA,WACA;AACA,SAAK,UAAU;AACf,SAAK,UAAU;AACf,SAAK,aAAa;AAClB,SAAK,YAAY;AAAA,EACnB;AAAA;AAAA,EAGA,IAAI,WAAmB;AACrB,QAAI,KAAK,WAAW,WAAW,EAAG,QAAO;AACzC,UAAM,QAAkB,CAAC;AACzB,eAAW,CAAC,OAAO,GAAG,KAAK,KAAK,YAAY;AAC1C,UAAI,UAAU,KAAK;AACjB,cAAM,KAAK,OAAO,QAAQ,CAAC,CAAC;AAAA,MAC9B,OAAO;AACL,cAAM,KAAK,GAAG,QAAQ,CAAC,IAAI,MAAM,CAAC,EAAE;AAAA,MACtC;AAAA,IACF;AACA,WAAO,WAAW,MAAM,KAAK,IAAI;AAAA,EACnC;AAAA,EAEA,WAAmB;AACjB,WACE,qBAAqB,KAAK,UAAU,KAAK,OAAO,CAAC,KAAK,KAAK,QAAQ,iBACpD,KAAK,QAAQ,MAAM;AAAA,EAEtC;AACF;AAGO,IAAM,UAAN,MAAM,SAAQ;AAAA,EACV;AAAA,EACA;AAAA,EACT;AAAA,EACQ;AAAA,EAER,YAAY,MAAkB,OAAe,MAAsB,MAAM;AACvE,SAAK,OAAO;AACZ,SAAK,QAAQ;AACb,SAAK,MAAM;AACX,SAAK,WAAW,kBAAkB,IAAI;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,aAAa,SACX,MACA,KACA,SAMkB;AAClB,UAAM;AAAA,MACJ;AAAA,MACA,YAAY;AAAA,MACZ,UAAU;AAAA,MACV,UAAU;AAAA,IACZ,IAAI,WAAW,CAAC;AAEhB,QAAI,SAAS;AACX,YAAM,EAAE,SAAS,IAAI,MAAM,OAAO,MAAW;AAC7C,cAAQ,IAAI,YAAY,SAAS,IAAI,CAAC,EAAE;AAAA,IAC1C;AAEA,QAAI;AACJ,QAAI,QAAQ;AACV,cAAQ,MAAM,OAAO,KAAK,IAAI;AAAA,IAChC,OAAO;AACL,cAAQ,MAAM,WAAW,IAAI;AAAA,IAC/B;AAEA,QAAI,SAAS;AACX,YAAM,cAAc,MAAM,OAAO,CAAC,GAAG,MAAM,IAAI,EAAE,aAAa,CAAC;AAC/D,cAAQ,IAAI,KAAK,MAAM,MAAM,WAAW,YAAY,eAAe,CAAC,SAAS;AAAA,IAC/E;AAEA,WAAO,SAAQ,UAAU,OAAO,KAAK,EAAE,WAAW,SAAS,QAAQ,CAAC;AAAA,EACtE;AAAA;AAAA,EAGA,aAAa,UACX,OACA,KACA,SAKkB;AAClB,UAAM,EAAE,YAAY,KAAO,UAAU,GAAG,UAAU,KAAK,IAAI,WAAW,CAAC;AAEvE,UAAM,SAAS,WAAW,OAAO,WAAW,OAAO;AAEnD,QAAI,SAAS;AACX,cAAQ,IAAI,KAAK,OAAO,MAAM,yCAAyC;AAAA,IACzE;AAEA,UAAM,cAID,CAAC;AAEN,aAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAI,SAAS;AACX,gBAAQ;AAAA,UACN,qCAAqC,IAAI,CAAC,IAAI,OAAO,MAAM;AAAA,QAC7D;AAAA,MACF;AAEA,UAAI;AACJ,UAAI,MAAM,GAAG;AACX,iBAAS,0BAA0B,OAAO,CAAC,CAAC;AAAA,MAC9C,OAAO;AACL,cAAM,WAAW,KAAK,UAAU,aAAa,MAAM,CAAC;AACpD,iBAAS,wBAAwB,UAAU,OAAO,CAAC,CAAC;AAAA,MACtD;AAEA,YAAM,WAAW,MAAM,IAAI,SAAS,MAAM;AAC1C,YAAM,WAAW,YAAY,QAAQ;AAErC,UAAI,MAAM,QAAQ,QAAQ,GAAG;AAC3B,oBAAY;AAAA,UACV,GAAI;AAAA,QAKN;AAAA,MACF,WACE,aAAa,QACb,OAAO,aAAa,YACpB,cAAe,UACf;AACA,oBAAY;AAAA,UACV,GAAK,SAA+F;AAAA,QACtG;AAAA,MACF;AAAA,IACF;AAEA,QAAI,SAAS;AACX,cAAQ,IAAI,eAAe,YAAY,MAAM,WAAW;AAAA,IAC1D;AAGA,UAAM,OAAO,WAAW,WAAW;AACnC,qBAAiB,MAAM,MAAM,MAAM;AACnC,kBAAc,IAAI;AAClB,oBAAgB,MAAM,KAAK;AAE3B,QAAI,SAAS;AACX,cAAQ,IAAI,WAAW,WAAW,IAAI,CAAC,QAAQ;AAAA,IACjD;AAEA,WAAO,IAAI,SAAQ,MAAM,OAAO,GAAG;AAAA,EACrC;AAAA;AAAA,EAGA,OAAO,SACL,MACA,OACA,MAAsB,MACb;AACT,WAAO,IAAI,SAAQ,MAAM,OAAO,GAAG;AAAA,EACrC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,MAAM,UAAkB,KAAqC;AACjE,UAAM,YAAY,OAAO,KAAK;AAC9B,QAAI,CAAC,WAAW;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAGA,UAAM,WAAW,kBAAkB,KAAK,IAAI;AAC5C,UAAM,WAAW,KAAK,UAAU,UAAU,MAAM,CAAC;AAEjD,UAAM,SAAS,gBAAgB,UAAU,QAAQ;AACjD,UAAM,WAAW,MAAM,UAAU,SAAS,MAAM;AAChD,UAAM,SAAS,YAAY,QAAQ;AAKnC,UAAM,UAAU,OAAO,YAAY,CAAC;AACpC,UAAM,YAAY,OAAO,aAAa;AAGtC,UAAM,UAAU,iBAAiB,SAAS,KAAK,QAAQ;AAEvD,UAAM,aAAiC,CAAC;AACxC,eAAW,OAAO,SAAS;AACzB,YAAM,OAAO,KAAK,SAAS,GAAG;AAC9B,UAAI,MAAM;AACR,cAAM,QAAQ,KAAK,eAAe;AAClC,cAAM,MAAM,KAAK,aAAa;AAC9B,mBAAW,KAAK,CAAC,OAAO,GAAG,CAAC;AAAA,MAC9B;AAAA,IACF;AAEA,WAAO,IAAI,YAAY,SAAS,SAAS,YAAY,SAAS;AAAA,EAChE;AAAA;AAAA,EAGA,MAAM,KAAK,MAA+B;AACxC,UAAM,KAAK,MAAM,OAAO,aAAkB;AAC1C,UAAM,WAAW,kBAAkB,KAAK,IAAI;AAE5C,UAAM,OAAkB;AAAA,MACtB,SAAS;AAAA,MACT,WAAW;AAAA,MACX,MAAM;AAAA,MACN,OAAO,KAAK;AAAA,IACd;AAEA,UAAM,GAAG,UAAU,MAAM,KAAK,UAAU,MAAM,MAAM,CAAC,GAAG,OAAO;AAC/D,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,aAAa,KAAK,MAAc,KAAwC;AACtE,UAAM,KAAK,MAAM,OAAO,aAAkB;AAC1C,UAAM,MAAM,MAAM,GAAG,SAAS,MAAM,OAAO;AAC3C,UAAM,OAAO,KAAK,MAAM,GAAG;AAE3B,UAAM,OAAO,KAAK;AAClB,UAAM,QAAQ,KAAK;AAGnB,qBAAiB,MAAM,MAAM,MAAM;AACnC,oBAAgB,MAAM,KAAK;AAE3B,WAAO,IAAI,SAAQ,MAAM,OAAO,OAAO,IAAI;AAAA,EAC7C;AAAA;AAAA,EAGA,WAAiB;AACf,cAAU,KAAK,IAAI;AAAA,EACrB;AAAA;AAAA,EAGA,QAAe;AACb,UAAM,cAAc,KAAK,MAAM,OAAO,CAAC,GAAG,MAAM,IAAI,EAAE,aAAa,CAAC;AACpE,UAAM,SAAS,aAAa,KAAK,IAAI;AACrC,WAAO;AAAA,MACL,aAAa,KAAK,MAAM;AAAA,MACxB,cAAc;AAAA,MACd,aAAa,WAAW,KAAK,IAAI;AAAA,MACjC,YAAY,OAAO;AAAA,MACnB,eAAe,KAAK,KAAK;AAAA,IAC3B;AAAA,EACF;AAAA;AAAA,EAGA,kBAAkB,SAGH;AACb,WAAO,eAAe,KAAK,MAAM;AAAA,MAC/B,UAAU,SAAS,YAAY;AAAA,MAC/B,WAAW,SAAS,aAAa;AAAA,MACjC,OAAO,KAAK;AAAA,IACd,CAAC;AAAA,EACH;AACF;;;AI/RO,IAAe,UAAf,MAAuB;AAAA,EAG5B,WAAmB;AACjB,WAAO,GAAG,KAAK,YAAY,IAAI;AAAA,EACjC;AACF;AAOO,IAAM,YAAN,cAAwB,QAAQ;AAAA,EAC5B;AAAA,EACA;AAAA,EACD,UAAmB;AAAA,EAE3B,YAAY,QAAgB,QAAgB,oBAAoB;AAC9D,UAAM;AACN,SAAK,SAAS;AACd,SAAK,YAAY;AAAA,EACnB;AAAA,EAEA,MAAc,YAA8B;AAC1C,QAAI,KAAK,YAAY,MAAM;AAEzB,YAAM,EAAE,mBAAmB,IAAI,MAAM,OAAO,uBAAuB;AACnE,YAAM,QAAQ,IAAI,mBAAmB,KAAK,MAAM;AAChD,WAAK,UAAU,MAAM,mBAAmB,EAAE,OAAO,KAAK,UAAU,CAAC;AAAA,IACnE;AACA,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAM,SAAS,QAAiC;AAC9C,UAAM,QAAQ,MAAM,KAAK,UAAU;AACnC,UAAM,WAAW,MAAM,MAAM,gBAAgB,MAAM;AACnD,WAAO,SAAS,SAAS,KAAK;AAAA,EAChC;AAAA,EAEA,WAAmB;AACjB,WAAO,mBAAmB,KAAK,UAAU,KAAK,SAAS,CAAC;AAAA,EAC1D;AACF;AAGO,IAAM,YAAN,cAAwB,QAAQ;AAAA,EAC5B;AAAA,EACA;AAAA,EACD,UAAmB;AAAA,EAE3B,YAAY,QAAgB,QAAgB,UAAU;AACpD,UAAM;AACN,SAAK,SAAS;AACd,SAAK,YAAY;AAAA,EACnB;AAAA,EAEA,MAAc,YAA8B;AAC1C,QAAI,KAAK,YAAY,MAAM;AAEzB,YAAM,EAAE,SAAS,OAAO,IAAI,MAAM,OAAO,QAAQ;AACjD,WAAK,UAAU,IAAI,OAAO,EAAE,QAAQ,KAAK,OAAO,CAAC;AAAA,IACnD;AACA,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAM,SAAS,QAAiC;AAC9C,UAAM,SAAS,MAAM,KAAK,UAAU;AASpC,UAAM,WAAW,MAAM,OAAO,KAAK,YAAY,OAAO;AAAA,MACpD,OAAO,KAAK;AAAA,MACZ,UAAU,CAAC,EAAE,MAAM,QAAQ,SAAS,OAAO,CAAC;AAAA,IAC9C,CAAC;AACD,WAAO,SAAS,QAAQ,CAAC,EAAE,QAAQ;AAAA,EACrC;AAAA,EAEA,WAAmB;AACjB,WAAO,mBAAmB,KAAK,UAAU,KAAK,SAAS,CAAC;AAAA,EAC1D;AACF;AAGO,IAAM,YAAN,cAAwB,QAAQ;AAAA,EAC5B;AAAA,EACA;AAAA,EACD,UAAmB;AAAA,EAE3B,YAAY,QAAgB,QAAgB,4BAA4B;AACtE,UAAM;AACN,SAAK,SAAS;AACd,SAAK,YAAY;AAAA,EACnB;AAAA,EAEA,MAAc,YAA8B;AAC1C,QAAI,KAAK,YAAY,MAAM;AAEzB,YAAM,EAAE,SAAS,UAAU,IAAI,MAAM,OAAO,mBAAmB;AAC/D,WAAK,UAAU,IAAI,UAAU,EAAE,QAAQ,KAAK,OAAO,CAAC;AAAA,IACtD;AACA,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAM,SAAS,QAAiC;AAC9C,UAAM,SAAS,MAAM,KAAK,UAAU;AAOpC,UAAM,WAAW,MAAM,OAAO,SAAS,OAAO;AAAA,MAC5C,OAAO,KAAK;AAAA,MACZ,YAAY;AAAA,MACZ,UAAU,CAAC,EAAE,MAAM,QAAQ,SAAS,OAAO,CAAC;AAAA,IAC9C,CAAC;AACD,WAAO,SAAS,QAAQ,CAAC,EAAE;AAAA,EAC7B;AAAA,EAEA,WAAmB;AACjB,WAAO,mBAAmB,KAAK,UAAU,KAAK,SAAS,CAAC;AAAA,EAC1D;AACF;AAGO,IAAM,aAAN,cAAyB,QAAQ;AAAA,EAC7B;AAAA,EACA;AAAA,EACD,UAAmB;AAAA,EAE3B,YAAY,QAAgB,QAAgB,wBAAwB;AAClE,UAAM;AACN,SAAK,SAAS;AACd,SAAK,YAAY;AAAA,EACnB;AAAA,EAEA,MAAc,YAA8B;AAC1C,QAAI,KAAK,YAAY,MAAM;AAEzB,YAAM,EAAE,QAAQ,IAAI,MAAM,OAAO,sBAAsB;AACvD,WAAK,UAAU,IAAI,QAAQ,EAAE,QAAQ,KAAK,OAAO,CAAC;AAAA,IACpD;AACA,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAM,SAAS,QAAiC;AAC9C,UAAM,SAAS,MAAM,KAAK,UAAU;AAOpC,UAAM,WAAW,MAAM,OAAO,KAAK,SAAS;AAAA,MAC1C,OAAO,KAAK;AAAA,MACZ,UAAU,CAAC,EAAE,MAAM,QAAQ,SAAS,OAAO,CAAC;AAAA,IAC9C,CAAC;AACD,WAAO,SAAS,QAAQ,CAAC,EAAE,QAAQ;AAAA,EACrC;AAAA,EAEA,WAAmB;AACjB,WAAO,oBAAoB,KAAK,UAAU,KAAK,SAAS,CAAC;AAAA,EAC3D;AACF;AAGO,IAAM,YAAN,cAAwB,QAAQ;AAAA,EAC5B;AAAA,EACA;AAAA,EACD,UAAmB;AAAA,EAE3B,YAAY,QAAgB,QAAgB,kBAAkB;AAC5D,UAAM;AACN,SAAK,SAAS;AACd,SAAK,YAAY;AAAA,EACnB;AAAA,EAEA,MAAc,YAA8B;AAC1C,QAAI,KAAK,YAAY,MAAM;AAEzB,YAAM,EAAE,eAAe,IAAI,MAAM,OAAO,WAAW;AACnD,WAAK,UAAU,IAAI,eAAe,EAAE,OAAO,KAAK,OAAO,CAAC;AAAA,IAC1D;AACA,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAM,SAAS,QAAiC;AAC9C,UAAM,SAAS,MAAM,KAAK,UAAU;AAKpC,UAAM,WAAW,MAAM,OAAO,KAAK;AAAA,MACjC,OAAO,KAAK;AAAA,MACZ,UAAU,CAAC,EAAE,MAAM,QAAQ,SAAS,OAAO,CAAC;AAAA,IAC9C,CAAC;AACD,WAAO,SAAS,QAAQ,QAAQ,CAAC,EAAE;AAAA,EACrC;AAAA,EAEA,WAAmB;AACjB,WAAO,mBAAmB,KAAK,UAAU,KAAK,SAAS,CAAC;AAAA,EAC1D;AACF;AAaO,IAAM,sBAAN,cAAkC,QAAQ;AAAA,EAC/C;AAAA,EACS;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAET,YAAY,SAOT;AACD,UAAM;AACN,SAAK,UAAU,QAAQ,QAAQ,QAAQ,QAAQ,EAAE;AACjD,SAAK,QAAQ,QAAQ;AACrB,SAAK,SAAS,QAAQ,UAAU;AAChC,SAAK,YAAY,QAAQ,aAAa;AACtC,SAAK,cAAc,QAAQ,eAAe;AAC1C,SAAK,eAAe,QAAQ,gBAAgB,CAAC;AAAA,EAC/C;AAAA,EAEQ,eAAuC;AAC7C,UAAM,UAAkC;AAAA,MACtC,gBAAgB;AAAA,MAChB,cAAc;AAAA,IAChB;AACA,QAAI,KAAK,QAAQ;AACf,cAAQ,eAAe,IAAI,UAAU,KAAK,MAAM;AAAA,IAClD;AACA,WAAO,OAAO,SAAS,KAAK,YAAY;AACxC,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,SAAS,QAAiC;AAC9C,UAAM,MAAM,GAAG,KAAK,OAAO;AAE3B,UAAM,UAAU;AAAA,MACd,OAAO,KAAK;AAAA,MACZ,UAAU,CAAC,EAAE,MAAM,QAAQ,SAAS,OAAO,CAAC;AAAA,MAC5C,YAAY,KAAK;AAAA,MACjB,aAAa,KAAK;AAAA,IACpB;AAEA,UAAM,OAAO,MAAM,MAAM,KAAK;AAAA,MAC5B,QAAQ;AAAA,MACR,SAAS,KAAK,aAAa;AAAA,MAC3B,MAAM,KAAK,UAAU,OAAO;AAAA,MAC5B,QAAQ,YAAY,QAAQ,IAAO;AAAA,IACrC,CAAC;AAED,QAAI,CAAC,KAAK,IAAI;AACZ,YAAM,YAAY,MAAM,KAAK,KAAK;AAClC,YAAM,IAAI;AAAA,QACR,uBAAuB,KAAK,MAAM,MAAM,SAAS;AAAA,MACnD;AAAA,IACF;AAEA,UAAM,OAAQ,MAAM,KAAK,KAAK;AAG9B,WAAO,KAAK,QAAQ,CAAC,EAAE,QAAQ;AAAA,EACjC;AAAA,EAEA,WAAmB;AACjB,WAAO,+BAA+B,KAAK,UAAU,KAAK,OAAO,CAAC,WAAW,KAAK,UAAU,KAAK,KAAK,CAAC;AAAA,EACzG;AACF;AAGO,IAAM,UAAN,cAAsB,QAAQ;AAAA,EAC1B;AAAA,EACA;AAAA,EACD,UAAmB;AAAA,EAE3B,YAAY,QAAgB,QAAgB,2BAA2B;AACrE,UAAM;AACN,SAAK,SAAS;AACd,SAAK,QAAQ;AAAA,EACf;AAAA,EAEA,MAAc,YAA8B;AAC1C,QAAI,KAAK,YAAY,MAAM;AAEzB,YAAM,EAAE,SAAS,KAAK,IAAI,MAAM,OAAO,UAAU;AACjD,WAAK,UAAU,IAAI,KAAK,EAAE,QAAQ,KAAK,OAAO,CAAC;AAAA,IACjD;AACA,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAM,SAAS,QAAiC;AAC9C,UAAM,SAAS,MAAM,KAAK,UAAU;AASpC,UAAM,WAAW,MAAM,OAAO,KAAK,YAAY,OAAO;AAAA,MACpD,OAAO,KAAK;AAAA,MACZ,UAAU,CAAC,EAAE,MAAM,QAAQ,SAAS,OAAO,CAAC;AAAA,IAC9C,CAAC;AACD,WAAO,SAAS,QAAQ,CAAC,EAAE,QAAQ;AAAA,EACrC;AAAA,EAEA,WAAmB;AACjB,WAAO,iBAAiB,KAAK,UAAU,KAAK,KAAK,CAAC;AAAA,EACpD;AACF;AAGO,IAAM,cAAN,cAA0B,oBAAoB;AAAA,EACnD,YACE,QACA,QAAgB,kCAChB,SACA;AACA,UAAM;AAAA,MACJ,SAAS;AAAA,MACT;AAAA,MACA;AAAA,MACA,GAAG;AAAA,IACL,CAAC;AAAA,EACH;AAAA,EAEA,WAAmB;AACjB,WAAO,qBAAqB,KAAK,UAAU,KAAK,KAAK,CAAC;AAAA,EACxD;AACF;AAGO,IAAM,eAAN,cAA2B,oBAAoB;AAAA,EACpD,YACE,QACA,QAAgB,qDAChB,SACA;AACA,UAAM;AAAA,MACJ,SAAS;AAAA,MACT;AAAA,MACA;AAAA,MACA,GAAG;AAAA,IACL,CAAC;AAAA,EACH;AAAA,EAEA,WAAmB;AACjB,WAAO,sBAAsB,KAAK,UAAU,KAAK,KAAK,CAAC;AAAA,EACzD;AACF;AAGO,IAAM,gBAAN,cAA4B,oBAAoB;AAAA,EACrD,YACE,QACA,QAAgB,6BAChB,SACA;AACA,UAAM;AAAA,MACJ,SAAS;AAAA,MACT;AAAA,MACA;AAAA,MACA,GAAG;AAAA,IACL,CAAC;AAAA,EACH;AAAA,EAEA,WAAmB;AACjB,WAAO,uBAAuB,KAAK,UAAU,KAAK,KAAK,CAAC;AAAA,EAC1D;AACF;AAGO,IAAM,cAAN,cAA0B,oBAAoB;AAAA,EACnD,YACE,QACA,QAAgB,iBAChB,SACA;AACA,UAAM;AAAA,MACJ,SAAS;AAAA,MACT;AAAA,MACA;AAAA,MACA,GAAG;AAAA,IACL,CAAC;AAAA,EACH;AAAA,EAEA,WAAmB;AACjB,WAAO,qBAAqB,KAAK,UAAU,KAAK,KAAK,CAAC;AAAA,EACxD;AACF;AAGO,IAAM,cAAN,cAA0B,oBAAoB;AAAA,EACnD,YACE,QACA,QAAgB,iBAChB,SACA;AACA,UAAM;AAAA,MACJ,SAAS;AAAA,MACT;AAAA,MACA;AAAA,MACA,GAAG;AAAA,IACL,CAAC;AAAA,EACH;AAAA,EAEA,WAAmB;AACjB,WAAO,qBAAqB,KAAK,UAAU,KAAK,KAAK,CAAC;AAAA,EACxD;AACF;AAGO,IAAM,eAAN,cAA2B,oBAAoB;AAAA,EACpD,YACE,QACA,QAAgB,+BAChB,SACA;AACA,UAAM;AAAA,MACJ,SAAS;AAAA,MACT;AAAA,MACA;AAAA,MACA,GAAG;AAAA,IACL,CAAC;AAAA,EACH;AAAA,EAEA,WAAmB;AACjB,WAAO,sBAAsB,KAAK,UAAU,KAAK,KAAK,CAAC;AAAA,EACzD;AACF;AAOO,IAAM,iBAAN,cAA6B,QAAQ;AAAA,EACjC;AAAA,EACA;AAAA,EACA;AAAA,EAET,YACE,QACA,QAAgB,sCAChB,YAAoB,MACpB;AACA,UAAM;AACN,SAAK,SAAS;AACd,SAAK,QAAQ;AACb,SAAK,YAAY;AAAA,EACnB;AAAA,EAEA,MAAM,SAAS,QAAiC;AAC9C,UAAM,MAAM,+CAA+C,KAAK,KAAK;AAErE,UAAM,UAAU;AAAA,MACd,OAAO,KAAK;AAAA,MACZ,UAAU,CAAC,EAAE,MAAM,QAAQ,SAAS,OAAO,CAAC;AAAA,MAC5C,YAAY,KAAK;AAAA,IACnB;AAEA,UAAM,OAAO,MAAM,MAAM,KAAK;AAAA,MAC5B,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,cAAc;AAAA,QACd,eAAe,UAAU,KAAK,MAAM;AAAA,MACtC;AAAA,MACA,MAAM,KAAK,UAAU,OAAO;AAAA,MAC5B,QAAQ,YAAY,QAAQ,IAAO;AAAA,IACrC,CAAC;AAED,QAAI,CAAC,KAAK,IAAI;AACZ,YAAM,YAAY,MAAM,KAAK,KAAK;AAClC,YAAM,IAAI;AAAA,QACR,+BAA+B,KAAK,MAAM,MAAM,SAAS;AAAA,MAC3D;AAAA,IACF;AAEA,UAAM,OAAQ,MAAM,KAAK,KAAK;AAG9B,WAAO,KAAK,QAAQ,CAAC,EAAE,QAAQ;AAAA,EACjC;AAAA,EAEA,WAAmB;AACjB,WAAO,wBAAwB,KAAK,UAAU,KAAK,KAAK,CAAC;AAAA,EAC3D;AACF;AAOO,IAAM,YAAN,cAAwB,QAAQ;AAAA,EAC5B;AAAA,EACT;AAAA,EAEA,YACE,QAAgB,UAChB,UAAkB,0BAClB;AACA,UAAM;AACN,SAAK,QAAQ;AACb,SAAK,UAAU,QAAQ,QAAQ,QAAQ,EAAE;AAAA,EAC3C;AAAA,EAEA,MAAM,SAAS,QAAiC;AAC9C,UAAM,MAAM,GAAG,KAAK,OAAO;AAE3B,UAAM,UAAU;AAAA,MACd,OAAO,KAAK;AAAA,MACZ;AAAA,MACA,QAAQ;AAAA,IACV;AAEA,UAAM,OAAO,MAAM,MAAM,KAAK;AAAA,MAC5B,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,cAAc;AAAA,MAChB;AAAA,MACA,MAAM,KAAK,UAAU,OAAO;AAAA,MAC5B,QAAQ,YAAY,QAAQ,IAAO;AAAA,IACrC,CAAC;AAED,QAAI,CAAC,KAAK,IAAI;AACZ,YAAM,YAAY,MAAM,KAAK,KAAK;AAClC,YAAM,IAAI;AAAA,QACR,0BAA0B,KAAK,MAAM,MAAM,SAAS;AAAA,MACtD;AAAA,IACF;AAEA,UAAM,OAAQ,MAAM,KAAK,KAAK;AAC9B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAmB;AACjB,WAAO,mBAAmB,KAAK,UAAU,KAAK,KAAK,CAAC;AAAA,EACtD;AACF;AAOO,IAAM,cAAN,cAA0B,QAAQ;AAAA,EACtB;AAAA,EAEjB,YAAY,IAAkD;AAC5D,UAAM;AACN,QAAI,OAAO,OAAO,YAAY;AAC5B,YAAM,IAAI,UAAU,4BAA4B,OAAO,EAAE,EAAE;AAAA,IAC7D;AACA,SAAK,MAAM;AAAA,EACb;AAAA,EAEA,MAAM,SAAS,QAAiC;AAC9C,UAAM,SAAS,MAAM,KAAK,IAAI,MAAM;AACpC,QAAI,OAAO,WAAW,UAAU;AAC9B,YAAM,IAAI;AAAA,QACR,wCAAwC,OAAO,MAAM;AAAA,MACvD;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEA,WAAmB;AACjB,UAAM,OAAO,KAAK,IAAI,QAAQ;AAC9B,WAAO,kBAAkB,IAAI;AAAA,EAC/B;AACF;;;ANvkBA;","names":["extractPages"]}
@@ -0,0 +1,419 @@
1
+ /** Shared type definitions for TreeDex. */
2
+ interface Page {
3
+ page_num: number;
4
+ text: string;
5
+ token_count: number;
6
+ }
7
+ interface TreeNode {
8
+ structure: string;
9
+ title: string;
10
+ physical_index: number;
11
+ nodes: TreeNode[];
12
+ start_index?: number;
13
+ end_index?: number;
14
+ node_id?: string;
15
+ text?: string;
16
+ [key: string]: unknown;
17
+ }
18
+ interface IndexData {
19
+ version: string;
20
+ framework: string;
21
+ tree: TreeNode[];
22
+ pages: Page[];
23
+ }
24
+ interface Stats {
25
+ total_pages: number;
26
+ total_tokens: number;
27
+ total_nodes: number;
28
+ leaf_nodes: number;
29
+ root_sections: number;
30
+ }
31
+
32
+ /**
33
+ * LLM backends for TreeDex.
34
+ *
35
+ * Hierarchy:
36
+ * BaseLLM — abstract base, subclass for custom LLMs
37
+ * ├── GeminiLLM — Google Gemini (lazy SDK)
38
+ * ├── OpenAILLM — OpenAI (lazy SDK)
39
+ * ├── ClaudeLLM — Anthropic Claude (lazy SDK)
40
+ * ├── MistralLLM — Mistral AI (lazy SDK)
41
+ * ├── CohereLLM — Cohere (lazy SDK)
42
+ * ├── OpenAICompatibleLLM — Any OpenAI-compatible endpoint (fetch)
43
+ * │ ├── GroqLLM — Groq (pre-configured URL)
44
+ * │ ├── TogetherLLM — Together AI (pre-configured URL)
45
+ * │ ├── FireworksLLM — Fireworks AI (pre-configured URL)
46
+ * │ ├── OpenRouterLLM — OpenRouter (pre-configured URL)
47
+ * │ ├── DeepSeekLLM — DeepSeek (pre-configured URL)
48
+ * │ ├── CerebrasLLM — Cerebras (pre-configured URL)
49
+ * │ └── SambanovaLLM — SambaNova (pre-configured URL)
50
+ * ├── HuggingFaceLLM — HuggingFace Inference API (fetch)
51
+ * ├── OllamaLLM — Ollama native /api/generate (fetch)
52
+ * ├── LiteLLM — litellm wrapper (100+ providers)
53
+ * └── FunctionLLM — Wrap any callable(str) -> str
54
+ *
55
+ * Named providers lazy-import their SDKs.
56
+ * OpenAICompatibleLLM, HuggingFaceLLM, OllamaLLM use only fetch.
57
+ */
58
+ /** Base class for all LLM backends. Subclass and implement generate(). */
59
+ declare abstract class BaseLLM {
60
+ abstract generate(prompt: string): Promise<string>;
61
+ toString(): string;
62
+ }
63
+ /** Google Gemini via @google/generative-ai SDK. */
64
+ declare class GeminiLLM extends BaseLLM {
65
+ readonly apiKey: string;
66
+ readonly modelName: string;
67
+ private _client;
68
+ constructor(apiKey: string, model?: string);
69
+ private getClient;
70
+ generate(prompt: string): Promise<string>;
71
+ toString(): string;
72
+ }
73
+ /** OpenAI via openai SDK. */
74
+ declare class OpenAILLM extends BaseLLM {
75
+ readonly apiKey: string;
76
+ readonly modelName: string;
77
+ private _client;
78
+ constructor(apiKey: string, model?: string);
79
+ private getClient;
80
+ generate(prompt: string): Promise<string>;
81
+ toString(): string;
82
+ }
83
+ /** Anthropic Claude via @anthropic-ai/sdk. */
84
+ declare class ClaudeLLM extends BaseLLM {
85
+ readonly apiKey: string;
86
+ readonly modelName: string;
87
+ private _client;
88
+ constructor(apiKey: string, model?: string);
89
+ private getClient;
90
+ generate(prompt: string): Promise<string>;
91
+ toString(): string;
92
+ }
93
+ /** Mistral AI via @mistralai/mistralai SDK. */
94
+ declare class MistralLLM extends BaseLLM {
95
+ readonly apiKey: string;
96
+ readonly modelName: string;
97
+ private _client;
98
+ constructor(apiKey: string, model?: string);
99
+ private getClient;
100
+ generate(prompt: string): Promise<string>;
101
+ toString(): string;
102
+ }
103
+ /** Cohere via cohere-ai SDK. */
104
+ declare class CohereLLM extends BaseLLM {
105
+ readonly apiKey: string;
106
+ readonly modelName: string;
107
+ private _client;
108
+ constructor(apiKey: string, model?: string);
109
+ private getClient;
110
+ generate(prompt: string): Promise<string>;
111
+ toString(): string;
112
+ }
113
+ /**
114
+ * Universal backend for any OpenAI-compatible API endpoint.
115
+ *
116
+ * Works with: Groq, Together AI, Fireworks, vLLM, LM Studio,
117
+ * OpenRouter, DeepSeek, Cerebras, SambaNova, Ollama (OpenAI mode),
118
+ * and any other compatible service.
119
+ */
120
+ declare class OpenAICompatibleLLM extends BaseLLM {
121
+ baseUrl: string;
122
+ readonly model: string;
123
+ readonly apiKey: string | null;
124
+ readonly maxTokens: number;
125
+ readonly temperature: number;
126
+ readonly extraHeaders: Record<string, string>;
127
+ constructor(options: {
128
+ baseUrl: string;
129
+ model: string;
130
+ apiKey?: string | null;
131
+ maxTokens?: number;
132
+ temperature?: number;
133
+ extraHeaders?: Record<string, string>;
134
+ });
135
+ private buildHeaders;
136
+ generate(prompt: string): Promise<string>;
137
+ toString(): string;
138
+ }
139
+ /** Groq — fast LLM inference via groq SDK. */
140
+ declare class GroqLLM extends BaseLLM {
141
+ readonly apiKey: string;
142
+ readonly model: string;
143
+ private _client;
144
+ constructor(apiKey: string, model?: string);
145
+ private getClient;
146
+ generate(prompt: string): Promise<string>;
147
+ toString(): string;
148
+ }
149
+ /** Together AI — open-source models. Zero SDK dependencies. */
150
+ declare class TogetherLLM extends OpenAICompatibleLLM {
151
+ constructor(apiKey: string, model?: string, options?: {
152
+ maxTokens?: number;
153
+ temperature?: number;
154
+ });
155
+ toString(): string;
156
+ }
157
+ /** Fireworks AI — fast open-source inference. Zero SDK dependencies. */
158
+ declare class FireworksLLM extends OpenAICompatibleLLM {
159
+ constructor(apiKey: string, model?: string, options?: {
160
+ maxTokens?: number;
161
+ temperature?: number;
162
+ });
163
+ toString(): string;
164
+ }
165
+ /** OpenRouter — access any model via one API. Zero SDK dependencies. */
166
+ declare class OpenRouterLLM extends OpenAICompatibleLLM {
167
+ constructor(apiKey: string, model?: string, options?: {
168
+ maxTokens?: number;
169
+ temperature?: number;
170
+ });
171
+ toString(): string;
172
+ }
173
+ /** DeepSeek — powerful reasoning models. Zero SDK dependencies. */
174
+ declare class DeepSeekLLM extends OpenAICompatibleLLM {
175
+ constructor(apiKey: string, model?: string, options?: {
176
+ maxTokens?: number;
177
+ temperature?: number;
178
+ });
179
+ toString(): string;
180
+ }
181
+ /** Cerebras — ultra-fast inference. Zero SDK dependencies. */
182
+ declare class CerebrasLLM extends OpenAICompatibleLLM {
183
+ constructor(apiKey: string, model?: string, options?: {
184
+ maxTokens?: number;
185
+ temperature?: number;
186
+ });
187
+ toString(): string;
188
+ }
189
+ /** SambaNova — fast AI inference. Zero SDK dependencies. */
190
+ declare class SambanovaLLM extends OpenAICompatibleLLM {
191
+ constructor(apiKey: string, model?: string, options?: {
192
+ maxTokens?: number;
193
+ temperature?: number;
194
+ });
195
+ toString(): string;
196
+ }
197
+ /** HuggingFace Inference API. Zero SDK dependencies. */
198
+ declare class HuggingFaceLLM extends BaseLLM {
199
+ readonly apiKey: string;
200
+ readonly model: string;
201
+ readonly maxTokens: number;
202
+ constructor(apiKey: string, model?: string, maxTokens?: number);
203
+ generate(prompt: string): Promise<string>;
204
+ toString(): string;
205
+ }
206
+ /** Ollama native backend using /api/generate endpoint. */
207
+ declare class OllamaLLM extends BaseLLM {
208
+ readonly model: string;
209
+ baseUrl: string;
210
+ constructor(model?: string, baseUrl?: string);
211
+ generate(prompt: string): Promise<string>;
212
+ toString(): string;
213
+ }
214
+ /** Wrap any async or sync function as an LLM backend. */
215
+ declare class FunctionLLM extends BaseLLM {
216
+ private readonly _fn;
217
+ constructor(fn: (prompt: string) => string | Promise<string>);
218
+ generate(prompt: string): Promise<string>;
219
+ toString(): string;
220
+ }
221
+
222
+ /** TreeDex: Tree-based document RAG framework. */
223
+
224
+ /** Result of a TreeDex query. */
225
+ declare class QueryResult {
226
+ readonly context: string;
227
+ readonly nodeIds: string[];
228
+ readonly pageRanges: [number, number][];
229
+ readonly reasoning: string;
230
+ constructor(context: string, nodeIds: string[], pageRanges: [number, number][], reasoning: string);
231
+ /** Human-readable page ranges like 'pages 5-8, 12-15'. */
232
+ get pagesStr(): string;
233
+ toString(): string;
234
+ }
235
+ /** Tree-based document index for RAG retrieval. */
236
+ declare class TreeDex {
237
+ readonly tree: TreeNode[];
238
+ readonly pages: Page[];
239
+ llm: BaseLLM | null;
240
+ private _nodeMap;
241
+ constructor(tree: TreeNode[], pages: Page[], llm?: BaseLLM | null);
242
+ /**
243
+ * Build a TreeDex index from a file.
244
+ *
245
+ * @param path - Path to document (PDF, TXT, HTML, DOCX)
246
+ * @param llm - LLM backend with .generate(prompt) method
247
+ * @param options - Optional configuration
248
+ */
249
+ static fromFile(path: string, llm: BaseLLM, options?: {
250
+ loader?: {
251
+ load(path: string): Promise<Page[]>;
252
+ };
253
+ maxTokens?: number;
254
+ overlap?: number;
255
+ verbose?: boolean;
256
+ }): Promise<TreeDex>;
257
+ /** Build a TreeDex index from pre-extracted pages. */
258
+ static fromPages(pages: Page[], llm: BaseLLM, options?: {
259
+ maxTokens?: number;
260
+ overlap?: number;
261
+ verbose?: boolean;
262
+ }): Promise<TreeDex>;
263
+ /** Create a TreeDex from an existing tree and pages. */
264
+ static fromTree(tree: TreeNode[], pages: Page[], llm?: BaseLLM | null): TreeDex;
265
+ /**
266
+ * Query the index and return relevant context.
267
+ *
268
+ * @param question - The user's question
269
+ * @param llm - Optional LLM override. Uses this.llm if not provided.
270
+ */
271
+ query(question: string, llm?: BaseLLM): Promise<QueryResult>;
272
+ /** Save the index to a JSON file. */
273
+ save(path: string): Promise<string>;
274
+ /** Load a TreeDex index from a JSON file. */
275
+ static load(path: string, llm?: BaseLLM | null): Promise<TreeDex>;
276
+ /** Pretty-print the tree structure. */
277
+ showTree(): void;
278
+ /** Return index statistics. */
279
+ stats(): Stats;
280
+ /** Find sections that exceed size thresholds. */
281
+ findLargeSections(options?: {
282
+ maxPages?: number;
283
+ maxTokens?: number;
284
+ }): TreeNode[];
285
+ }
286
+
287
+ /**
288
+ * Document loaders for multiple file formats.
289
+ *
290
+ * Each loader returns a list of objects: [{page_num, text, token_count}]
291
+ * matching the format used by pdf-parser extractPages().
292
+ */
293
+
294
+ /** Split plain text into synthetic pages by character count. */
295
+ declare function textToPages(text: string, charsPerPage?: number): Page[];
296
+ /** Load PDF files using pdfjs-dist. */
297
+ declare class PDFLoader {
298
+ load(path: string): Promise<Page[]>;
299
+ }
300
+ /** Load plain text or markdown files. */
301
+ declare class TextLoader {
302
+ readonly charsPerPage: number;
303
+ constructor(charsPerPage?: number);
304
+ load(path: string): Promise<Page[]>;
305
+ }
306
+ /** Load HTML files, stripping tags to plain text. */
307
+ declare class HTMLLoader {
308
+ readonly charsPerPage: number;
309
+ constructor(charsPerPage?: number);
310
+ load(path: string): Promise<Page[]>;
311
+ private stripHtml;
312
+ }
313
+ /** Load DOCX files using mammoth. */
314
+ declare class DOCXLoader {
315
+ readonly charsPerPage: number;
316
+ constructor(charsPerPage?: number);
317
+ load(path: string): Promise<Page[]>;
318
+ }
319
+ /** Auto-detect file format and load pages. */
320
+ declare function autoLoader(filePath: string): Promise<Page[]>;
321
+
322
+ /** Tree construction utilities. */
323
+
324
+ /**
325
+ * Convert a flat list with `structure` fields into a hierarchical tree.
326
+ *
327
+ * Each item must have a `structure` field like "1", "1.1", "1.2.3".
328
+ * Parent of "1.2.3" is "1.2", parent of "1.2" is "1", "1" is root.
329
+ * Output nodes get a `nodes: []` field for children.
330
+ */
331
+ declare function listToTree(flatList: Array<{
332
+ structure: string;
333
+ title: string;
334
+ physical_index: number;
335
+ [key: string]: unknown;
336
+ }>): TreeNode[];
337
+ /**
338
+ * Set start_index and end_index on each node.
339
+ *
340
+ * - start_index = node's physical_index
341
+ * - end_index = next sibling's physical_index - 1, or parent's end,
342
+ * or total_pages - 1 for the last root node
343
+ */
344
+ declare function assignPageRanges(tree: TreeNode[], totalPages: number): TreeNode[];
345
+ /** DFS traversal, assigns sequential IDs: '0001', '0002', etc. */
346
+ declare function assignNodeIds(tree: TreeNode[]): TreeNode[];
347
+ /** Return nodes that exceed page or token thresholds. */
348
+ declare function findLargeNodes(tree: TreeNode[], options?: {
349
+ maxPages?: number;
350
+ maxTokens?: number;
351
+ pages?: Page[] | null;
352
+ }): TreeNode[];
353
+ /** Add `text` field to each node by concatenating page text for its range. */
354
+ declare function embedTextInTree(tree: TreeNode[], pages: Page[]): TreeNode[];
355
+
356
+ /** Tree manipulation and utility functions. */
357
+
358
+ /** Flatten tree into {node_id: node_dict} for O(1) lookups. */
359
+ declare function createNodeMapping(tree: TreeNode[]): Record<string, TreeNode>;
360
+ /** Return a deep copy of the tree with all `text` fields removed. */
361
+ declare function stripTextFromTree(tree: TreeNode[]): TreeNode[];
362
+ /**
363
+ * Gather and concatenate text from a list of node IDs.
364
+ *
365
+ * Format:
366
+ * [Section: Title]
367
+ * text
368
+ *
369
+ * [Section: Title2]
370
+ * text2
371
+ */
372
+ declare function collectNodeTexts(nodeIds: string[], nodeMap: Record<string, TreeNode>): string;
373
+ /** Recursively count total nodes in the tree. */
374
+ declare function countNodes(tree: TreeNode[]): number;
375
+ /** Return all nodes with empty `nodes` list. */
376
+ declare function getLeafNodes(tree: TreeNode[]): TreeNode[];
377
+ /** Flatten hierarchy back to a list in DFS order. */
378
+ declare function treeToFlatList(tree: TreeNode[]): Array<Omit<TreeNode, "nodes">>;
379
+ /**
380
+ * Robust JSON extraction from LLM responses.
381
+ *
382
+ * Handles raw JSON, ```json code blocks, and minor formatting issues
383
+ * like trailing commas.
384
+ */
385
+ declare function extractJson(text: string): unknown;
386
+ /** Pretty-print tree structure for debugging. */
387
+ declare function printTree(tree: TreeNode[], indent?: number): void;
388
+
389
+ /** PDF extraction and page grouping. */
390
+
391
+ /** Count tokens using cl100k_base-compatible encoding. */
392
+ declare function countTokens(text: string): number;
393
+ /**
394
+ * Extract text from each page of a PDF.
395
+ *
396
+ * Returns a list of objects with page_num, text, and token_count.
397
+ */
398
+ declare function extractPages(pdfPath: string): Promise<Page[]>;
399
+ /**
400
+ * Combine pages[start:end+1] into a string with physical index tags.
401
+ *
402
+ * Each page is wrapped like:
403
+ * <physical_index_0>page text</physical_index_0>
404
+ * where the number is the page's page_num.
405
+ */
406
+ declare function pagesToTaggedText(pages: Page[], start: number, end: number): string;
407
+ /**
408
+ * Split pages into token-budget groups, each returned as tagged text.
409
+ *
410
+ * Groups overlap by `overlap` pages for continuity.
411
+ */
412
+ declare function groupPages(pages: Page[], maxTokens?: number, overlap?: number): string[];
413
+
414
+ /** Prompt templates for structure extraction and retrieval. */
415
+ declare function structureExtractionPrompt(text: string): string;
416
+ declare function structureContinuePrompt(previousStructure: string, text: string): string;
417
+ declare function retrievalPrompt(treeStructure: string, query: string): string;
418
+
419
+ export { BaseLLM, CerebrasLLM, ClaudeLLM, CohereLLM, DOCXLoader, DeepSeekLLM, FireworksLLM, FunctionLLM, GeminiLLM, GroqLLM, HTMLLoader, HuggingFaceLLM, type IndexData, MistralLLM, OllamaLLM, OpenAICompatibleLLM, OpenAILLM, OpenRouterLLM, PDFLoader, type Page, QueryResult, SambanovaLLM, type Stats, TextLoader, TogetherLLM, TreeDex, type TreeNode, assignNodeIds, assignPageRanges, autoLoader, collectNodeTexts, countNodes, countTokens, createNodeMapping, embedTextInTree, extractJson, extractPages, findLargeNodes, getLeafNodes, groupPages, listToTree, pagesToTaggedText, printTree, retrievalPrompt, stripTextFromTree, structureContinuePrompt, structureExtractionPrompt, textToPages, treeToFlatList };