@operor/knowledge 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +457 -0
- package/dist/index.d.ts +437 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +1442 -0
- package/dist/index.js.map +1 -0
- package/package.json +42 -0
- package/src/EmbeddingService.ts +92 -0
- package/src/IngestionPipeline.ts +357 -0
- package/src/QueryNormalizer.ts +59 -0
- package/src/QueryRewriter.ts +73 -0
- package/src/RankFusion.ts +72 -0
- package/src/RetrievalPipeline.ts +388 -0
- package/src/SQLiteKnowledgeStore.ts +379 -0
- package/src/TextChunker.ts +34 -0
- package/src/__tests__/cli-integration.test.ts +134 -0
- package/src/__tests__/content-fetcher.test.ts +156 -0
- package/src/__tests__/knowledge.test.ts +493 -0
- package/src/__tests__/retrieval-layers.test.ts +672 -0
- package/src/index.ts +41 -0
- package/src/ingestors/FileIngestor.ts +85 -0
- package/src/ingestors/SiteCrawler.ts +153 -0
- package/src/ingestors/UrlIngestor.ts +106 -0
- package/src/ingestors/WatiFaqSync.ts +75 -0
- package/src/ingestors/content-fetcher.ts +142 -0
- package/src/types.ts +62 -0
- package/tsconfig.json +9 -0
- package/tsdown.config.ts +10 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","names":[],"sources":["../src/EmbeddingService.ts","../src/SQLiteKnowledgeStore.ts","../src/TextChunker.ts","../src/QueryNormalizer.ts","../src/IngestionPipeline.ts","../src/RankFusion.ts","../src/RetrievalPipeline.ts","../src/QueryRewriter.ts","../src/ingestors/content-fetcher.ts","../src/ingestors/UrlIngestor.ts","../src/ingestors/FileIngestor.ts","../src/ingestors/SiteCrawler.ts","../src/ingestors/WatiFaqSync.ts"],"sourcesContent":["import { embed, embedMany } from 'ai';\nimport { createOpenAI } from '@ai-sdk/openai';\nimport { createGoogleGenerativeAI } from '@ai-sdk/google';\nimport { mistral } from '@ai-sdk/mistral';\nimport { cohere } from '@ai-sdk/cohere';\n\nexport interface EmbeddingServiceConfig {\n provider: 'openai' | 'google' | 'mistral' | 'cohere' | 'ollama';\n apiKey?: string;\n model?: string;\n baseURL?: string;\n dimensions?: number;\n}\n\nexport class EmbeddingService {\n private config: EmbeddingServiceConfig;\n\n constructor(config: EmbeddingServiceConfig) {\n this.config = config;\n }\n\n private getModel() {\n const { provider, apiKey, baseURL, model } = this.config;\n\n switch (provider) {\n case 'openai': {\n const openai = createOpenAI({ apiKey, baseURL });\n return openai.embedding(model || 'text-embedding-3-small', {\n dimensions: this.config.dimensions,\n });\n }\n case 'google': {\n const google = createGoogleGenerativeAI({ apiKey, baseURL });\n return google.textEmbeddingModel(model || 'text-embedding-004');\n }\n case 'mistral': {\n return mistral.embedding(model || 'mistral-embed', { apiKey });\n }\n case 'cohere': {\n return cohere.embedding(model || 'embed-english-v3.0', { apiKey });\n }\n case 'ollama': {\n const ollama = createOpenAI({\n apiKey: apiKey || 'ollama',\n baseURL: baseURL || 'http://localhost:11434/v1',\n });\n return ollama.embedding(model || 'nomic-embed-text');\n }\n default:\n throw new Error(`Unknown embedding provider: ${provider}`);\n }\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n get dimensions(): number {\n if (this.config.dimensions) return this.config.dimensions;\n return EmbeddingService.defaultDimensions(this.config.provider, this.config.model);\n }\n\n static defaultDimensions(provider: string, model?: string): number {\n switch (provider) {\n case 'openai':\n return 1536; // text-embedding-3-small\n case 'google':\n return 768; // text-embedding-004\n case 'mistral':\n return 1024; // mistral-embed\n case 'cohere':\n return 1024; // embed-english-v3.0\n case 'ollama':\n return 768; // nomic-embed-text (varies by model)\n default:\n return 1536;\n }\n }\n\n async embed(text: string): Promise<number[]> {\n const model = this.getModel();\n const result = await embed({ model, value: text });\n return result.embedding;\n }\n\n async embedMany(texts: string[]): Promise<number[][]> {\n if (texts.length === 0) return [];\n const model = this.getModel();\n const result = await embedMany({ model, values: texts });\n return result.embeddings;\n }\n}\n","import { statSync } from 'node:fs';\nimport Database from 'better-sqlite3';\nimport * as sqliteVec from 'sqlite-vec';\nimport type { KBDocument, KBChunk, KBSearchResult, KBSearchOptions, KnowledgeStore, KBStats } from './types.js';\n\nexport class SQLiteKnowledgeStore implements KnowledgeStore {\n private db: Database.Database;\n private dbPath: string;\n private dimensions: number;\n private dimensionWarned = false;\n\n constructor(dbPath: string = './knowledge.db', dimensions: number = 1536) {\n this.db = new Database(dbPath);\n this.dbPath = dbPath;\n this.dimensions = dimensions;\n this.db.pragma('journal_mode = WAL');\n this.db.pragma('foreign_keys = ON');\n sqliteVec.load(this.db);\n }\n\n getDimensions(): number {\n return this.dimensions;\n }\n\n async initialize(): Promise<void> {\n this.db.exec(`\n CREATE TABLE IF NOT EXISTS kb_documents (\n id TEXT PRIMARY KEY,\n source_type TEXT NOT NULL,\n source_url TEXT,\n file_name TEXT,\n title TEXT,\n content TEXT NOT NULL,\n metadata TEXT,\n created_at INTEGER NOT NULL,\n updated_at INTEGER NOT NULL,\n priority INTEGER DEFAULT 2,\n content_hash TEXT\n );\n\n CREATE TABLE IF NOT EXISTS kb_chunks (\n id TEXT PRIMARY KEY,\n document_id TEXT NOT NULL,\n content TEXT NOT NULL,\n chunk_index INTEGER NOT NULL,\n metadata TEXT,\n FOREIGN KEY (document_id) REFERENCES kb_documents(id) ON DELETE CASCADE\n );\n CREATE INDEX IF NOT EXISTS idx_chunks_document ON kb_chunks(document_id);\n CREATE INDEX IF NOT EXISTS idx_documents_source_url ON kb_documents(source_url);\n `);\n\n // Migration: add columns for existing DBs\n try { this.db.exec('ALTER TABLE kb_documents ADD COLUMN priority INTEGER DEFAULT 2'); } catch {}\n try { this.db.exec('ALTER TABLE kb_documents ADD COLUMN content_hash TEXT'); } catch {}\n try { this.db.exec('CREATE INDEX IF NOT EXISTS idx_documents_source_url ON kb_documents(source_url)'); } catch {}\n\n this.db.exec(`\n CREATE VIRTUAL TABLE IF NOT EXISTS vec_chunks USING vec0(\n chunk_id TEXT PRIMARY KEY,\n embedding float[${this.dimensions}]\n );\n `);\n\n this.db.exec(`\n CREATE VIRTUAL TABLE IF NOT EXISTS fts_chunks USING fts5(\n chunk_id UNINDEXED,\n content,\n tokenize='porter unicode61'\n );\n `);\n }\n\n async close(): Promise<void> {\n this.db.close();\n }\n\n async addDocument(doc: KBDocument): Promise<void> {\n this.db.prepare(`\n INSERT OR REPLACE INTO kb_documents (id, source_type, source_url, file_name, title, content, metadata, created_at, updated_at, priority, content_hash)\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n `).run(\n doc.id,\n doc.sourceType,\n doc.sourceUrl || null,\n doc.fileName || null,\n doc.title || null,\n doc.content,\n doc.metadata ? JSON.stringify(doc.metadata) : null,\n doc.createdAt,\n doc.updatedAt,\n doc.priority ?? 2,\n doc.contentHash || null,\n );\n }\n\n async getDocument(id: string): Promise<KBDocument | null> {\n const row = this.db.prepare('SELECT * FROM kb_documents WHERE id = ?').get(id) as any;\n return row ? this.rowToDocument(row) : null;\n }\n\n async listDocuments(): Promise<KBDocument[]> {\n const rows = this.db.prepare('SELECT * FROM kb_documents ORDER BY created_at DESC').all() as any[];\n return rows.map((r) => this.rowToDocument(r));\n }\n\n async deleteDocument(id: string): Promise<void> {\n const chunks = this.db.prepare('SELECT id FROM kb_chunks WHERE document_id = ?').all(id) as any[];\n for (const chunk of chunks) {\n this.db.prepare('DELETE FROM vec_chunks WHERE chunk_id = ?').run(chunk.id);\n this.db.prepare('DELETE FROM fts_chunks WHERE chunk_id = ?').run(chunk.id);\n }\n this.db.prepare('DELETE FROM kb_chunks WHERE document_id = ?').run(id);\n this.db.prepare('DELETE FROM kb_documents WHERE id = ?').run(id);\n }\n\n async addChunks(chunks: KBChunk[]): Promise<void> {\n const insertChunk = this.db.prepare(`\n INSERT OR REPLACE INTO kb_chunks (id, document_id, content, chunk_index, metadata)\n VALUES (?, ?, ?, ?, ?)\n `);\n const insertVec = this.db.prepare(`\n INSERT OR REPLACE INTO vec_chunks (chunk_id, embedding)\n VALUES (?, ?)\n `);\n const deleteFts = this.db.prepare(`\n DELETE FROM fts_chunks WHERE chunk_id = ?\n `);\n const insertFts = this.db.prepare(`\n INSERT INTO fts_chunks (chunk_id, content)\n VALUES (?, ?)\n `);\n\n const transaction = this.db.transaction((items: KBChunk[]) => {\n for (const chunk of items) {\n insertChunk.run(\n chunk.id,\n chunk.documentId,\n chunk.content,\n chunk.chunkIndex,\n chunk.metadata ? JSON.stringify(chunk.metadata) : null,\n );\n if (chunk.embedding) {\n if (chunk.embedding.length !== this.dimensions && !this.dimensionWarned) {\n this.dimensionWarned = true;\n console.warn(\n `[KB] Dimension mismatch: store expects ${this.dimensions}d vectors but received ${chunk.embedding.length}d. ` +\n `This will cause search errors. Re-ingest your documents after switching embedding providers, ` +\n `or set the correct dimensions when creating the store.`\n );\n }\n insertVec.run(chunk.id, new Float32Array(chunk.embedding));\n }\n // FTS5: delete-then-insert for idempotent upsert (FTS5 has no OR REPLACE)\n deleteFts.run(chunk.id);\n insertFts.run(chunk.id, chunk.content);\n }\n });\n\n transaction(chunks);\n }\n\n getChunkCount(documentId: string): number {\n const row = this.db.prepare(\n 'SELECT COUNT(*) as count FROM kb_chunks WHERE document_id = ?'\n ).get(documentId) as any;\n return row?.count || 0;\n }\n\n async search(query: string, embedding: number[], options?: KBSearchOptions): Promise<KBSearchResult[]> {\n return this.searchByEmbedding(embedding, options);\n }\n\n async searchByEmbedding(embedding: number[], options?: KBSearchOptions): Promise<KBSearchResult[]> {\n const limit = options?.limit || 5;\n // When filtering by sourceTypes post-query, over-fetch to avoid missing\n // results that would be filtered out (e.g., FAQ fast-path requesting limit=1\n // but the closest vector is a non-FAQ document chunk).\n const fetchLimit = options?.sourceTypes ? Math.min(limit * 10, 100) : limit;\n\n const vecRows = this.db.prepare(`\n SELECT chunk_id, distance\n FROM vec_chunks\n WHERE embedding MATCH ?\n ORDER BY distance\n LIMIT ?\n `).all(new Float32Array(embedding), fetchLimit) as any[];\n\n if (vecRows.length === 0) return [];\n\n const results: KBSearchResult[] = [];\n for (const vecRow of vecRows) {\n const distance = vecRow.distance as number;\n const score = 1 / (1 + distance);\n\n if (options?.scoreThreshold && score < options.scoreThreshold) continue;\n\n const chunk = this.db.prepare('SELECT * FROM kb_chunks WHERE id = ?').get(vecRow.chunk_id) as any;\n if (!chunk) continue;\n\n const doc = this.db.prepare('SELECT * FROM kb_documents WHERE id = ?').get(chunk.document_id) as any;\n if (!doc) continue;\n\n if (options?.sourceTypes && !options.sourceTypes.includes(doc.source_type)) continue;\n\n results.push({\n chunk: {\n id: chunk.id,\n documentId: chunk.document_id,\n content: chunk.content,\n chunkIndex: chunk.chunk_index,\n metadata: chunk.metadata ? JSON.parse(chunk.metadata) : undefined,\n },\n document: this.rowToDocument(doc),\n score,\n distance,\n });\n }\n\n return results.slice(0, limit);\n }\n\n async searchByKeyword(query: string, options?: KBSearchOptions): Promise<KBSearchResult[]> {\n const limit = options?.limit || 5;\n\n // FTS5 MATCH query — escape double quotes in user input\n const safeQuery = query.replace(/\"/g, '\"\"');\n let ftsRows: any[];\n try {\n ftsRows = this.db.prepare(`\n SELECT chunk_id, rank\n FROM fts_chunks\n WHERE fts_chunks MATCH ?\n ORDER BY rank\n LIMIT ?\n `).all(safeQuery, limit * 2) as any[]; // fetch extra to allow post-filtering\n } catch {\n // FTS5 can throw on malformed queries (e.g. special chars)\n return [];\n }\n\n if (ftsRows.length === 0) return [];\n\n const results: KBSearchResult[] = [];\n for (const ftsRow of ftsRows) {\n if (results.length >= limit) break;\n\n const chunk = this.db.prepare('SELECT * FROM kb_chunks WHERE id = ?').get(ftsRow.chunk_id) as any;\n if (!chunk) continue;\n\n const doc = this.db.prepare('SELECT * FROM kb_documents WHERE id = ?').get(chunk.document_id) as any;\n if (!doc) continue;\n\n if (options?.sourceTypes && !options.sourceTypes.includes(doc.source_type)) continue;\n\n // BM25 rank is negative (lower = better), convert to a positive score\n const bm25Score = -ftsRow.rank;\n\n if (options?.scoreThreshold && bm25Score < options.scoreThreshold) continue;\n\n results.push({\n chunk: {\n id: chunk.id,\n documentId: chunk.document_id,\n content: chunk.content,\n chunkIndex: chunk.chunk_index,\n metadata: chunk.metadata ? JSON.parse(chunk.metadata) : undefined,\n },\n document: this.rowToDocument(doc),\n score: bm25Score,\n distance: 0,\n });\n }\n\n return results;\n }\n\n /**\n * Get all chunks from kb_chunks (text content only, no embeddings).\n * Used by rebuild to re-embed all content.\n */\n getAllChunks(): { id: string; documentId: string; content: string; chunkIndex: number; metadata?: string }[] {\n return this.db.prepare(\n 'SELECT id, document_id AS documentId, content, chunk_index AS chunkIndex, metadata FROM kb_chunks ORDER BY document_id, chunk_index'\n ).all() as any[];\n }\n\n /**\n * Drop and recreate the vec_chunks virtual table with new dimensions.\n * Preserves kb_chunks, kb_documents, and fts_chunks — only vector data is affected.\n */\n rebuildVecTable(newDimensions: number): void {\n this.db.exec('DROP TABLE IF EXISTS vec_chunks');\n this.db.exec(`\n CREATE VIRTUAL TABLE vec_chunks USING vec0(\n chunk_id TEXT PRIMARY KEY,\n embedding float[${newDimensions}]\n );\n `);\n this.dimensions = newDimensions;\n this.dimensionWarned = false;\n }\n\n /**\n * Batch-insert embeddings into vec_chunks.\n * Expects an array of { chunkId, embedding } pairs.\n */\n batchInsertEmbeddings(items: { chunkId: string; embedding: number[] }[]): void {\n const insert = this.db.prepare(\n 'INSERT OR REPLACE INTO vec_chunks (chunk_id, embedding) VALUES (?, ?)'\n );\n const tx = this.db.transaction((batch: { chunkId: string; embedding: number[] }[]) => {\n for (const item of batch) {\n insert.run(item.chunkId, new Float32Array(item.embedding));\n }\n });\n tx(items);\n }\n\n async getStats(): Promise<KBStats> {\n const docCount = this.db.prepare('SELECT COUNT(*) as count FROM kb_documents').get() as any;\n const chunkCount = this.db.prepare('SELECT COUNT(*) as count FROM kb_chunks').get() as any;\n let dbSizeBytes = 0;\n try {\n dbSizeBytes = statSync(this.dbPath).size;\n } catch {}\n return {\n documentCount: docCount.count,\n chunkCount: chunkCount.count,\n embeddingDimensions: this.dimensions,\n dbSizeBytes,\n };\n }\n\n async findBySourceUrl(url: string): Promise<KBDocument | null> {\n const row = this.db.prepare('SELECT * FROM kb_documents WHERE source_url = ?').get(url) as any;\n return row ? this.rowToDocument(row) : null;\n }\n\n async findByContentHash(hash: string): Promise<KBDocument | null> {\n const row = this.db.prepare('SELECT * FROM kb_documents WHERE content_hash = ?').get(hash) as any;\n return row ? this.rowToDocument(row) : null;\n }\n\n async updateDocument(id: string, updates: { content?: string; title?: string; contentHash?: string; priority?: number; metadata?: Record<string, any> }): Promise<void> {\n const sets: string[] = [];\n const values: any[] = [];\n if (updates.content !== undefined) { sets.push('content = ?'); values.push(updates.content); }\n if (updates.title !== undefined) { sets.push('title = ?'); values.push(updates.title); }\n if (updates.contentHash !== undefined) { sets.push('content_hash = ?'); values.push(updates.contentHash); }\n if (updates.priority !== undefined) { sets.push('priority = ?'); values.push(updates.priority); }\n if (updates.metadata !== undefined) { sets.push('metadata = ?'); values.push(JSON.stringify(updates.metadata)); }\n sets.push('updated_at = ?'); values.push(Date.now());\n values.push(id);\n this.db.prepare(`UPDATE kb_documents SET ${sets.join(', ')} WHERE id = ?`).run(...values);\n }\n\n async findSimilarFaq(embedding: number[], threshold: number): Promise<KBSearchResult | null> {\n const results = await this.searchByEmbedding(embedding, { sourceTypes: ['faq'], limit: 1 });\n if (results.length > 0 && results[0].score >= threshold) return results[0];\n return null;\n }\n\n private rowToDocument(row: any): KBDocument {\n return {\n id: row.id,\n sourceType: row.source_type,\n sourceUrl: row.source_url || undefined,\n fileName: row.file_name || undefined,\n title: row.title || undefined,\n content: row.content,\n metadata: row.metadata ? JSON.parse(row.metadata) : undefined,\n createdAt: row.created_at,\n updatedAt: row.updated_at,\n priority: row.priority ?? 2,\n contentHash: row.content_hash || undefined,\n };\n }\n}\n","import { RecursiveCharacterTextSplitter, MarkdownTextSplitter } from '@langchain/textsplitters';\n\nexport interface ChunkOptions {\n chunkSize?: number;\n chunkOverlap?: number;\n}\n\nexport class TextChunker {\n private defaultChunkSize: number;\n private defaultChunkOverlap: number;\n\n constructor(options?: ChunkOptions) {\n this.defaultChunkSize = options?.chunkSize || 3200;\n this.defaultChunkOverlap = options?.chunkOverlap || 200;\n }\n\n async chunk(text: string, options?: ChunkOptions): Promise<string[]> {\n const splitter = new RecursiveCharacterTextSplitter({\n chunkSize: options?.chunkSize || this.defaultChunkSize,\n chunkOverlap: options?.chunkOverlap || this.defaultChunkOverlap,\n });\n const docs = await splitter.createDocuments([text]);\n return docs.map((d) => d.pageContent);\n }\n\n async chunkMarkdown(markdown: string, options?: ChunkOptions): Promise<string[]> {\n const splitter = new MarkdownTextSplitter({\n chunkSize: options?.chunkSize || this.defaultChunkSize,\n chunkOverlap: options?.chunkOverlap || this.defaultChunkOverlap,\n });\n const docs = await splitter.createDocuments([markdown]);\n return docs.map((d) => d.pageContent);\n }\n}\n","/**\n * Query normalization for improved KB retrieval.\n * Expands chat abbreviations and normalizes whitespace before embedding.\n */\n\nconst ABBREVIATIONS: [RegExp, string][] = [\n [/\\bu\\b/gi, 'you'],\n [/\\bur\\b/gi, 'your'],\n [/\\br\\b/gi, 'are'],\n [/\\bpls\\b/gi, 'please'],\n [/\\bplz\\b/gi, 'please'],\n [/\\bthx\\b/gi, 'thanks'],\n [/\\bthnx\\b/gi, 'thanks'],\n [/\\bty\\b/gi, 'thank you'],\n [/\\bwat\\b/gi, 'what'],\n [/\\bbc\\b/gi, 'because'],\n [/\\bcuz\\b/gi, 'because'],\n [/\\bgonna\\b/gi, 'going to'],\n [/\\bwanna\\b/gi, 'want to'],\n [/\\bgotta\\b/gi, 'got to'],\n [/\\blemme\\b/gi, 'let me'],\n [/\\bgimme\\b/gi, 'give me'],\n [/\\bdunno\\b/gi, 'do not know'],\n [/\\bhrs\\b/gi, 'hours'],\n [/\\bmins\\b/gi, 'minutes'],\n [/\\bmsg\\b/gi, 'message'],\n [/\\bmsgs\\b/gi, 'messages'],\n [/\\binfo\\b/gi, 'information'],\n [/\\btmr\\b/gi, 'tomorrow'],\n [/\\btmrw\\b/gi, 'tomorrow'],\n [/\\bw\\/\\b/gi, 'with'],\n [/\\bw\\/o\\b/gi, 'without'],\n [/\\bidk\\b/gi, 'I do not know'],\n [/\\bimo\\b/gi, 'in my opinion'],\n [/\\bbtw\\b/gi, 'by the way'],\n [/\\basap\\b/gi, 'as soon as possible'],\n // Digit substitutions — word-boundary aware\n [/\\b4\\b/g, 'for'],\n [/\\b2\\b/g, 'to'],\n];\n\n/**\n * Normalize a user query for better embedding similarity.\n * - Expands chat abbreviations with word-boundary awareness\n * - Lowercases\n * - Collapses whitespace\n */\nexport function normalizeQuery(query: string): string {\n let normalized = query.toLowerCase();\n\n for (const [pattern, replacement] of ABBREVIATIONS) {\n normalized = normalized.replace(pattern, replacement);\n }\n\n // Collapse whitespace\n normalized = normalized.replace(/\\s+/g, ' ').trim();\n\n return normalized;\n}\n","import { randomUUID, createHash } from 'node:crypto';\nimport type { KBDocument, KBChunk, KnowledgeStore } from './types.js';\nimport type { EmbeddingService } from './EmbeddingService.js';\nimport type { TextChunker } from './TextChunker.js';\nimport type { SQLiteKnowledgeStore } from './SQLiteKnowledgeStore.js';\nimport { normalizeQuery } from './QueryNormalizer.js';\n\nexport interface ContentReformatter {\n complete(messages: { role: 'system' | 'user'; content: string }[]): Promise<{ text: string }>;\n}\n\nexport interface IngestInput {\n sourceType: KBDocument['sourceType'];\n content: string;\n title?: string;\n sourceUrl?: string;\n fileName?: string;\n metadata?: Record<string, any>;\n isMarkdown?: boolean;\n /** Opt-in to LLM Q&A extraction (expensive). Default: false (chunking path). */\n extractQA?: boolean;\n /** Document priority: 1=official, 2=supplementary, 3=archived. Auto-assigned if omitted. */\n priority?: number;\n}\n\nexport interface IngestFaqOptions {\n sourceUrl?: string;\n [key: string]: any;\n}\n\nexport interface IngestFaqResult extends KBDocument {\n existingMatch?: { id: string; question: string; answer: string; score: number };\n}\n\nexport interface RebuildResult {\n documentsRebuilt: number;\n chunksRebuilt: number;\n oldDimensions: number;\n newDimensions: number;\n}\n\nexport class IngestionPipeline {\n private store: KnowledgeStore;\n private embedder: EmbeddingService;\n private chunker: TextChunker;\n private llmProvider?: ContentReformatter;\n\n constructor(store: KnowledgeStore, embedder: EmbeddingService, chunker: TextChunker, llmProvider?: ContentReformatter) {\n this.store = store;\n this.embedder = embedder;\n this.chunker = chunker;\n this.llmProvider = llmProvider;\n }\n\n private cleanContent(text: string): string {\n return text\n .replace(/!\\[.*?\\]\\(.*?\\)/g, '') // strip image markdown\n .replace(/[ \\t]+/g, ' ') // collapse horizontal whitespace\n .replace(/(\\n\\s*){3,}/g, '\\n\\n') // collapse 3+ newlines to 2\n .split('\\n').filter((line, i, arr) => i === 0 || line !== arr[i - 1]).join('\\n') // dedup consecutive identical lines\n .trim()\n .slice(0, 15000);\n }\n\n private async extractQAPairs(content: string, title?: string): Promise<Array<{ question: string; answer: string }>> {\n const cleaned = this.cleanContent(content);\n const titleHint = title ? `\\nPage title: \"${title}\"` : '';\n const response = await this.llmProvider!.complete([\n {\n role: 'system',\n content: `You extract self-contained Q&A pairs from web page content. Each answer must include ALL relevant details (names, numbers, prices, dates) so it can be understood without the original page. Output ONLY a JSON array of {\"question\":\"...\",\"answer\":\"...\"} objects. No markdown fences.`,\n },\n {\n role: 'user',\n content: `Extract Q&A pairs from this content.${titleHint}\\n\\n${cleaned}`,\n },\n ]);\n\n try {\n // Strip markdown fences if present\n const text = response.text.replace(/^```(?:json)?\\s*/m, '').replace(/\\s*```\\s*$/m, '').trim();\n const parsed = JSON.parse(text);\n if (Array.isArray(parsed)) return parsed.filter((p: any) => p.question && p.answer);\n } catch {\n // Try to find JSON array in response\n const match = response.text.match(/\\[[\\s\\S]*\\]/);\n if (match) {\n try {\n const parsed = JSON.parse(match[0]);\n if (Array.isArray(parsed)) return parsed.filter((p: any) => p.question && p.answer);\n } catch { /* fall through */ }\n }\n }\n return [];\n }\n\n private computeHash(content: string): string {\n return createHash('sha256').update(content).digest('hex');\n }\n\n async ingest(input: IngestInput): Promise<KBDocument> {\n if (!input.content || input.content.trim().length === 0) {\n throw new Error(`No content to ingest for \"${input.title || input.sourceUrl || 'unknown'}\"`);\n }\n\n const sqliteStore = this.store as SQLiteKnowledgeStore;\n const cleaned = this.cleanContent(input.content);\n const contentHash = this.computeHash(cleaned);\n\n // Auto-assign priority by source type if not specified\n const priority = input.priority ?? (input.sourceType === 'faq' ? 1 : 2);\n\n // Dedup: check by source URL first\n if (input.sourceUrl && sqliteStore.findBySourceUrl) {\n const existing = await sqliteStore.findBySourceUrl(input.sourceUrl);\n if (existing) {\n // Update existing document instead of duplicating\n await sqliteStore.updateDocument(existing.id, {\n content: input.content,\n title: input.title,\n contentHash,\n priority,\n metadata: input.metadata,\n });\n // Delete old chunks and re-chunk\n await this.store.deleteDocument(existing.id);\n // Re-add the updated document (deleteDocument removes everything)\n // Fall through to create new doc with same content\n }\n }\n\n // Dedup: check by content hash\n if (sqliteStore.findByContentHash) {\n const existing = await sqliteStore.findByContentHash(contentHash);\n if (existing) {\n console.log(`[KB] Duplicate content detected (hash match), skipping: \"${input.title || input.sourceUrl || 'unknown'}\"`);\n return existing;\n }\n }\n\n // LLM Q&A extraction: only when explicitly opted in\n if (input.extractQA && this.llmProvider) {\n const pairs = await this.extractQAPairs(input.content, input.title);\n if (pairs.length > 0) {\n for (const pair of pairs) {\n await this.ingestFaq(pair.question, pair.answer, { sourceUrl: input.sourceUrl });\n }\n const now = Date.now();\n const parentDoc: KBDocument = {\n id: randomUUID(),\n sourceType: input.sourceType,\n sourceUrl: input.sourceUrl,\n fileName: input.fileName,\n title: input.title,\n content: `Extracted ${pairs.length} Q&A pairs`,\n metadata: { ...input.metadata, faqCount: pairs.length },\n createdAt: now,\n updatedAt: now,\n priority,\n contentHash,\n };\n await this.store.addDocument(parentDoc);\n return parentDoc;\n }\n }\n\n // Default path: chunk content → embed → store\n const now = Date.now();\n const doc: KBDocument = {\n id: randomUUID(),\n sourceType: input.sourceType,\n sourceUrl: input.sourceUrl,\n fileName: input.fileName,\n title: input.title,\n content: input.content,\n metadata: input.metadata,\n createdAt: now,\n updatedAt: now,\n priority,\n contentHash,\n };\n\n await this.store.addDocument(doc);\n\n // Use MarkdownTextSplitter for URL content or explicit markdown\n const useMarkdown = input.isMarkdown || input.sourceType === 'url';\n const texts = useMarkdown\n ? await this.chunker.chunkMarkdown(input.content)\n : await this.chunker.chunk(input.content);\n const embeddings = await this.embedder.embedMany(texts);\n\n const chunks: KBChunk[] = texts.map((text, i) => ({\n id: randomUUID(),\n documentId: doc.id,\n content: text,\n chunkIndex: i,\n embedding: embeddings[i],\n metadata: input.metadata,\n }));\n\n await this.store.addChunks(chunks);\n\n if (this.store.getChunkCount) {\n const storedCount = this.store.getChunkCount(doc.id);\n if (storedCount === 0) {\n console.warn(`[KB] WARNING: Document \"${input.title || doc.id}\" was saved but NO vector embeddings were stored.`);\n }\n }\n\n return doc;\n }\n\n async ingestFaq(question: string, answer: string, metadata?: Record<string, any> & { forceReplace?: boolean }): Promise<IngestFaqResult> {\n const embedding = await this.embedder.embed(normalizeQuery(question));\n const sqliteStore = this.store as SQLiteKnowledgeStore;\n\n // FAQ dedup: check for similar existing FAQ\n if (sqliteStore.findSimilarFaq && !metadata?.forceReplace) {\n const match = await sqliteStore.findSimilarFaq(embedding, 0.90);\n if (match) {\n const existingQ = match.chunk.metadata?.question || match.document.title;\n const existingA = match.chunk.metadata?.answer;\n // Return the new doc with existingMatch info so caller can decide\n const now = Date.now();\n const content = `Q: ${question}\\nA: ${answer}`;\n const doc: IngestFaqResult = {\n id: randomUUID(),\n sourceType: 'faq',\n sourceUrl: metadata?.sourceUrl,\n title: question,\n content,\n metadata: { ...metadata, question, answer },\n priority: 1,\n createdAt: now,\n updatedAt: now,\n existingMatch: { id: match.document.id, question: existingQ, answer: existingA, score: match.score },\n };\n return doc;\n }\n }\n\n // If forceReplace, delete the existing FAQ first (caller provides the ID via metadata)\n if (metadata?.forceReplace && metadata?.replaceId) {\n await this.store.deleteDocument(metadata.replaceId);\n }\n\n const now = Date.now();\n const content = `Q: ${question}\\nA: ${answer}`;\n const doc: IngestFaqResult = {\n id: randomUUID(),\n sourceType: 'faq',\n sourceUrl: metadata?.sourceUrl,\n title: question,\n content,\n metadata: { ...metadata, question, answer },\n priority: 1,\n createdAt: now,\n updatedAt: now,\n };\n\n await this.store.addDocument(doc);\n\n const chunk: KBChunk = {\n id: randomUUID(),\n documentId: doc.id,\n content,\n chunkIndex: 0,\n embedding,\n metadata: { question, answer },\n };\n\n await this.store.addChunks([chunk]);\n return doc;\n }\n\n /**\n * Rebuild all vector embeddings using the current embedding provider.\n * Preserves all document content, chunks, and FTS data — only replaces vectors.\n *\n * Requires the store to be a SQLiteKnowledgeStore (uses rebuild-specific methods).\n */\n async rebuild(onProgress?: (current: number, total: number, docTitle: string) => void): Promise<RebuildResult> {\n const sqliteStore = this.store as SQLiteKnowledgeStore;\n if (!sqliteStore.getAllChunks || !sqliteStore.rebuildVecTable || !sqliteStore.batchInsertEmbeddings) {\n throw new Error('Rebuild requires a SQLiteKnowledgeStore with rebuild methods.');\n }\n\n const oldDimensions = sqliteStore.getDimensions();\n const newDimensions = this.embedder.dimensions;\n\n // Get all documents (for sourceType lookup) and all chunks\n const documents = await this.store.listDocuments();\n const docMap = new Map(documents.map(d => [d.id, d]));\n const allChunks = sqliteStore.getAllChunks();\n\n if (allChunks.length === 0) {\n return { documentsRebuilt: 0, chunksRebuilt: 0, oldDimensions, newDimensions };\n }\n\n // Drop and recreate vec_chunks with new dimensions\n sqliteStore.rebuildVecTable(newDimensions);\n\n // Group chunks by document for progress reporting\n const chunksByDoc = new Map<string, typeof allChunks>();\n for (const chunk of allChunks) {\n const list = chunksByDoc.get(chunk.documentId) || [];\n list.push(chunk);\n chunksByDoc.set(chunk.documentId, list);\n }\n\n let processedDocs = 0;\n const totalDocs = chunksByDoc.size;\n let totalChunksRebuilt = 0;\n\n for (const [docId, chunks] of chunksByDoc) {\n const doc = docMap.get(docId);\n const docTitle = doc?.title || docId.slice(0, 8);\n\n onProgress?.(processedDocs, totalDocs, docTitle);\n\n // Determine what text to embed per chunk\n const textsToEmbed: string[] = [];\n for (const chunk of chunks) {\n if (doc?.sourceType === 'faq') {\n // FAQ: embed the normalized question for consistent matching\n const meta = chunk.metadata ? JSON.parse(chunk.metadata) : null;\n const question = meta?.question || doc.title || chunk.content;\n textsToEmbed.push(normalizeQuery(question));\n } else {\n textsToEmbed.push(chunk.content);\n }\n }\n\n // Embed all chunks for this document in one batch\n const embeddings = await this.embedder.embedMany(textsToEmbed);\n\n // Insert new embeddings\n const items = chunks.map((chunk, i) => ({\n chunkId: chunk.id,\n embedding: embeddings[i],\n }));\n sqliteStore.batchInsertEmbeddings(items);\n\n totalChunksRebuilt += chunks.length;\n processedDocs++;\n }\n\n onProgress?.(totalDocs, totalDocs, 'done');\n\n return {\n documentsRebuilt: totalDocs,\n chunksRebuilt: totalChunksRebuilt,\n oldDimensions,\n newDimensions,\n };\n }\n}\n","/**\n * Reciprocal Rank Fusion (RRF) for combining multiple ranked result sets.\n * Standard technique for hybrid search (vector + keyword).\n */\n\n/**\n * Fuse multiple ranked result sets using Reciprocal Rank Fusion.\n *\n * @param resultSets - Array of Maps where key = item ID, value = rank (0-based)\n * @param k - Smoothing constant (default 60, industry standard)\n * @returns Map of item ID → fused RRF score, sorted descending by score\n */\nexport function reciprocalRankFusion(\n resultSets: Map<string, number>[],\n k: number = 60,\n): Map<string, number> {\n const scores = new Map<string, number>();\n\n for (const rankMap of resultSets) {\n for (const [id, rank] of rankMap) {\n const prev = scores.get(id) ?? 0;\n scores.set(id, prev + 1 / (k + rank));\n }\n }\n\n // Sort by score descending\n const sorted = new Map(\n [...scores.entries()].sort((a, b) => b[1] - a[1]),\n );\n\n return sorted;\n}\n\n/**\n * Weighted Score Fusion: combine vector and keyword scores using weighted average.\n * BM25 scores are min-max normalized to 0-1 before combining.\n *\n * @returns Map of item ID → fused score, sorted descending\n */\nexport function weightedScoreFusion(\n vectorResults: { id: string; score: number }[],\n keywordResults: { id: string; score: number }[],\n vectorWeight: number = 0.7,\n keywordWeight: number = 0.3,\n): Map<string, number> {\n // Min-max normalize BM25 scores to 0-1\n const bm25Scores = new Map<string, number>();\n if (keywordResults.length > 0) {\n const scores = keywordResults.map(r => r.score);\n const min = Math.min(...scores);\n const max = Math.max(...scores);\n const range = max - min || 1;\n for (const r of keywordResults) {\n bm25Scores.set(r.id, (r.score - min) / range);\n }\n }\n\n const vecScores = new Map<string, number>();\n for (const r of vectorResults) vecScores.set(r.id, r.score);\n\n // Combine all IDs\n const allIds = new Set([...vecScores.keys(), ...bm25Scores.keys()]);\n const fused = new Map<string, number>();\n\n for (const id of allIds) {\n const vs = vecScores.get(id) ?? 0;\n const ks = bm25Scores.get(id) ?? 0;\n fused.set(id, vectorWeight * vs + keywordWeight * ks);\n }\n\n return new Map([...fused.entries()].sort((a, b) => b[1] - a[1]));\n}\n","import type { KBSearchResult, KBSearchOptions, KnowledgeStore } from './types.js';\nimport type { EmbeddingService } from './EmbeddingService.js';\nimport type { QueryRewriter } from './QueryRewriter.js';\nimport { normalizeQuery } from './QueryNormalizer.js';\nimport { reciprocalRankFusion, weightedScoreFusion } from './RankFusion.js';\n\nexport interface RetrievalResult {\n results: KBSearchResult[];\n context: string;\n isFaqMatch: boolean;\n rewritten?: string;\n /** Raw FAQ answer extracted from metadata (only set when isFaqMatch is true). */\n faqAnswer?: string;\n /** Raw FAQ question extracted from metadata (only set when isFaqMatch is true). */\n faqQuestion?: string;\n /** Multiple FAQ matches from compound query splitting. */\n faqMatches?: Array<{ faqQuestion: string; faqAnswer: string; score: number }>;\n}\n\nexport interface RetrievalPipelineOptions {\n faqThreshold?: number;\n faqLowThreshold?: number;\n faqScoreGap?: number;\n useHybridSearch?: boolean;\n queryRewriter?: QueryRewriter;\n rewriteHighThreshold?: number;\n rewriteLowThreshold?: number;\n fusionStrategy?: 'rrf' | 'weighted';\n}\n\n/**\n * Heuristic splitter for compound questions. No LLM call — zero latency cost.\n * Splits on \"?\" followed by more text, or \" and \" when both sides are >3 chars.\n * Returns the original query in a single-element array if no split detected.\n * Capped at 4 sub-queries max.\n */\nexport function splitCompoundQuery(query: string): string[] {\n // Strategy 1: Split on \"?\" followed by more text\n const qParts = query.split(/\\?\\s*/).filter(p => p.trim().length > 3);\n if (qParts.length > 1) {\n return qParts.slice(0, 4).map(p => p.trim());\n }\n\n // Strategy 2: Split on \" and \" when both sides are >3 chars\n // Use word boundary to avoid splitting \"android\", \"band\", etc.\n const andParts = query.split(/\\s+and\\s+/i).filter(p => p.trim().length > 3);\n if (andParts.length > 1) {\n return andParts.slice(0, 4).map(p => p.trim());\n }\n\n return [query];\n}\n\nexport class RetrievalPipeline {\n private store: KnowledgeStore;\n private embedder: EmbeddingService;\n private faqThreshold: number;\n private faqLowThreshold: number;\n private faqScoreGap: number;\n private useHybridSearch: boolean;\n private queryRewriter?: QueryRewriter;\n private rewriteHighThreshold: number;\n private rewriteLowThreshold: number;\n private fusionStrategy: 'rrf' | 'weighted';\n\n constructor(store: KnowledgeStore, embedder: EmbeddingService, faqThreshold?: number);\n constructor(store: KnowledgeStore, embedder: EmbeddingService, options?: RetrievalPipelineOptions);\n constructor(\n store: KnowledgeStore,\n embedder: EmbeddingService,\n thresholdOrOptions?: number | RetrievalPipelineOptions,\n ) {\n this.store = store;\n this.embedder = embedder;\n\n if (typeof thresholdOrOptions === 'number') {\n this.faqThreshold = thresholdOrOptions;\n this.faqLowThreshold = 0.70;\n this.faqScoreGap = 0.15;\n this.useHybridSearch = true;\n this.rewriteHighThreshold = 0.70;\n this.rewriteLowThreshold = 0.50;\n this.fusionStrategy = 'rrf';\n } else {\n const opts = thresholdOrOptions ?? {};\n this.faqThreshold = opts.faqThreshold ?? 0.85;\n this.faqLowThreshold = opts.faqLowThreshold ?? 0.70;\n this.faqScoreGap = opts.faqScoreGap ?? 0.15;\n this.useHybridSearch = opts.useHybridSearch ?? true;\n this.queryRewriter = opts.queryRewriter;\n this.rewriteHighThreshold = opts.rewriteHighThreshold ?? 0.70;\n this.rewriteLowThreshold = opts.rewriteLowThreshold ?? 0.50;\n this.fusionStrategy = opts.fusionStrategy ?? 'rrf';\n }\n }\n\n async retrieve(query: string, options?: KBSearchOptions): Promise<RetrievalResult> {\n const subQueries = splitCompoundQuery(query);\n\n // Single query — use existing path\n if (subQueries.length <= 1) {\n return this.retrieveSingle(query, options);\n }\n\n // Compound query — retrieve each sub-query independently\n const subResults = await Promise.all(\n subQueries.map(sq => this.retrieveSingle(sq, options))\n );\n\n // Collect all FAQ matches, deduplicate by document ID\n const seen = new Set<string>();\n const faqMatches: Array<{ faqQuestion: string; faqAnswer: string; score: number }> = [];\n\n for (const sr of subResults) {\n if (sr.isFaqMatch && sr.faqAnswer && sr.faqQuestion) {\n const docId = sr.results[0]?.document?.id;\n if (docId && !seen.has(docId)) {\n seen.add(docId);\n faqMatches.push({\n faqQuestion: sr.faqQuestion,\n faqAnswer: sr.faqAnswer,\n score: sr.results[0]?.score ?? 0,\n });\n }\n }\n }\n\n // If 2+ FAQ matches, return combined result\n if (faqMatches.length >= 2) {\n const allResults = subResults.flatMap(sr => sr.results);\n // Deduplicate results by chunk ID\n const seenChunks = new Set<string>();\n const dedupedResults = allResults.filter(r => {\n if (seenChunks.has(r.chunk.id)) return false;\n seenChunks.add(r.chunk.id);\n return true;\n });\n\n return {\n results: dedupedResults,\n context: this.formatContext(dedupedResults),\n isFaqMatch: true,\n faqMatches,\n // Use first match's answer for backward compat\n faqAnswer: faqMatches[0].faqAnswer,\n faqQuestion: faqMatches[0].faqQuestion,\n };\n }\n\n // 0-1 FAQ matches — fall back to single-query retrieval with original query\n return this.retrieveSingle(query, options);\n }\n\n private async retrieveSingle(query: string, options?: KBSearchOptions): Promise<RetrievalResult> {\n // Layer 1: Normalize query (expand abbreviations, lowercase, collapse whitespace)\n const normalized = normalizeQuery(query);\n const embedding = await this.embedder.embed(normalized);\n\n // FAQ fast-path: search FAQ docs first (top 2 for score gap analysis)\n const faqResults = await this.store.searchByEmbedding(embedding, {\n ...options,\n sourceTypes: ['faq'],\n limit: 2,\n });\n\n if (faqResults.length > 0) {\n // FAQ freshness tiebreak: when top 2 are within 0.02, prefer the newer one\n let top = faqResults[0];\n if (faqResults.length > 1) {\n const scoreDiff = top.score - faqResults[1].score;\n if (scoreDiff <= 0.02 && (faqResults[1].document.updatedAt ?? 0) > (top.document.updatedAt ?? 0)) {\n top = faqResults[1];\n }\n }\n\n const faqAnswer = top.chunk.metadata?.answer || top.document.metadata?.answer;\n const faqQuestion = top.chunk.metadata?.question || top.document.metadata?.question;\n\n // Layer 2: Score gap analysis\n // High confidence: score >= 0.85\n if (top.score >= this.faqThreshold) {\n return {\n results: [top],\n context: this.formatContext([top]),\n isFaqMatch: true,\n faqAnswer,\n faqQuestion,\n };\n }\n\n // Medium confidence with clear standout: score >= 0.70 && gap > 0.15\n if (top.score >= this.faqLowThreshold) {\n const gap = faqResults.length > 1 ? top.score - faqResults[1].score : 1;\n if (gap > this.faqScoreGap) {\n return {\n results: [top],\n context: this.formatContext([top]),\n isFaqMatch: true,\n faqAnswer,\n faqQuestion,\n };\n }\n }\n }\n\n // Layer 3: Hybrid search (vector + FTS5 keyword) with RRF\n const results = await this.hybridSearch(normalized, embedding, options);\n\n // Layer 4: Conditional LLM query rewrite\n // Only attempt if: rewriter is configured, top score is in the \"uncertain\" band,\n // and there's something in the KB worth re-matching against.\n const topScore = results.length > 0 ? results[0].score : 0;\n if (\n this.queryRewriter &&\n topScore >= this.rewriteLowThreshold &&\n topScore < this.rewriteHighThreshold\n ) {\n try {\n const rewriteResult = await this.queryRewriter.rewrite(normalized);\n const rewrittenEmbedding = await this.embedder.embed(rewriteResult.rewritten);\n\n // Re-run FAQ fast-path with rewritten query\n const rewrittenFaqResults = await this.store.searchByEmbedding(rewrittenEmbedding, {\n ...options,\n sourceTypes: ['faq'],\n limit: 2,\n });\n\n if (rewrittenFaqResults.length > 0 && rewrittenFaqResults[0].score >= this.faqLowThreshold) {\n const top = rewrittenFaqResults[0];\n const gap = rewrittenFaqResults.length > 1 ? top.score - rewrittenFaqResults[1].score : 1;\n if (top.score >= this.faqThreshold || gap > this.faqScoreGap) {\n const faqAnswer = top.chunk.metadata?.answer || top.document.metadata?.answer;\n const faqQuestion = top.chunk.metadata?.question || top.document.metadata?.question;\n return {\n results: [top],\n context: this.formatContext([top]),\n isFaqMatch: true,\n rewritten: rewriteResult.rewritten,\n faqAnswer,\n faqQuestion,\n };\n }\n }\n\n // Re-run hybrid search with rewritten query\n const rewrittenResults = await this.hybridSearch(rewriteResult.rewritten, rewrittenEmbedding, options);\n if (rewrittenResults.length > 0 && rewrittenResults[0].score > topScore) {\n return {\n results: rewrittenResults,\n context: this.formatContext(rewrittenResults),\n isFaqMatch: false,\n rewritten: rewriteResult.rewritten,\n };\n }\n } catch {\n // Rewrite failed — fall through with original results\n }\n }\n\n return {\n results,\n context: this.formatContext(results),\n isFaqMatch: false,\n };\n }\n\n private async hybridSearch(\n query: string,\n embedding: number[],\n options?: KBSearchOptions,\n ): Promise<KBSearchResult[]> {\n const limit = options?.limit || 5;\n\n // If hybrid search is disabled or store doesn't support keyword search, vector-only\n if (!this.useHybridSearch || !this.store.searchByKeyword) {\n const vecResults = await this.store.searchByEmbedding(embedding, {\n ...options,\n limit,\n });\n return this.applyBoosts(vecResults.slice(0, limit));\n }\n\n // Run vector search and FTS5 keyword search in parallel\n const searchOpts = { ...options, limit: limit * 2 };\n const [vecResults, ftsResults] = await Promise.all([\n this.store.searchByEmbedding(embedding, searchOpts),\n this.store.searchByKeyword(query, searchOpts),\n ]);\n\n // If FTS returned nothing, fall back to vector-only\n if (ftsResults.length === 0) {\n return this.applyBoosts(vecResults.slice(0, limit));\n }\n\n let fusedResults: KBSearchResult[];\n\n if (this.fusionStrategy === 'weighted') {\n // Weighted score fusion: combine actual scores\n const vecItems = vecResults.map(r => ({ id: r.chunk.id, score: r.score }));\n const ftsItems = ftsResults.map(r => ({ id: r.chunk.id, score: r.score }));\n const fused = weightedScoreFusion(vecItems, ftsItems);\n\n const resultMap = new Map<string, KBSearchResult>();\n for (const r of vecResults) resultMap.set(r.chunk.id, r);\n for (const r of ftsResults) {\n if (!resultMap.has(r.chunk.id)) resultMap.set(r.chunk.id, r);\n }\n\n fusedResults = [];\n for (const [chunkId, fusedScore] of fused) {\n if (fusedResults.length >= limit) break;\n const result = resultMap.get(chunkId);\n if (result) {\n // Use fused score as the result score for weighted strategy\n fusedResults.push({ ...result, score: fusedScore });\n }\n }\n } else {\n // RRF: rank-based fusion (default)\n const vecRanks = new Map<string, number>();\n vecResults.forEach((r, i) => vecRanks.set(r.chunk.id, i));\n\n const ftsRanks = new Map<string, number>();\n ftsResults.forEach((r, i) => ftsRanks.set(r.chunk.id, i));\n\n const fused = reciprocalRankFusion([vecRanks, ftsRanks]);\n\n const resultMap = new Map<string, KBSearchResult>();\n for (const r of vecResults) resultMap.set(r.chunk.id, r);\n for (const r of ftsResults) {\n if (!resultMap.has(r.chunk.id)) resultMap.set(r.chunk.id, r);\n }\n\n // RRF preserves original vector similarity scores for downstream thresholds\n fusedResults = [];\n for (const [chunkId, _rrfScore] of fused) {\n if (fusedResults.length >= limit) break;\n const result = resultMap.get(chunkId);\n if (result) {\n fusedResults.push(result);\n }\n }\n }\n\n return this.applyBoosts(fusedResults);\n }\n\n /**\n * Apply freshness and priority boosts to search results, then re-sort.\n */\n private applyBoosts(results: KBSearchResult[]): KBSearchResult[] {\n if (results.length === 0) return results;\n\n const thirtyDaysAgo = Date.now() - (30 * 24 * 60 * 60 * 1000);\n\n const boosted = results.map(r => {\n let score = r.score;\n\n // Freshness boost: +0.05 for docs updated within 30 days\n if (r.document.updatedAt && r.document.updatedAt > thirtyDaysAgo) {\n score += 0.05;\n }\n\n // Priority boost\n const priority = (r.document as any).priority ?? 2;\n if (priority === 1) score += 0.03;\n else if (priority === 3) score -= 0.02;\n\n return { ...r, score };\n });\n\n // Re-sort by boosted score\n boosted.sort((a, b) => b.score - a.score);\n return boosted;\n }\n\n private formatContext(results: KBSearchResult[]): string {\n if (results.length === 0) return '';\n\n const sections = results.map((r, i) => {\n const source = r.document.title || r.document.sourceUrl || r.document.fileName || 'Unknown';\n return `### Source ${i + 1}: ${source} (score: ${r.score.toFixed(2)})\\n${r.chunk.content}`;\n });\n\n return `## Knowledge Base Context\\n\\n${sections.join('\\n\\n')}`;\n }\n}\n","import type { LanguageModelV1 } from 'ai';\nimport { generateText } from 'ai';\n\nconst SYSTEM_PROMPT =\n 'You are a query normalizer. Rewrite the following informal/casual query into a clear, well-formed question. Only output the rewritten question, nothing else.';\n\nconst MAX_CACHE_SIZE = 1000;\n\nexport interface QueryRewriterOptions {\n model: LanguageModelV1;\n maxCacheSize?: number;\n}\n\nexport interface RewriteResult {\n original: string;\n rewritten: string;\n cached: boolean;\n tokenUsage?: { prompt: number; completion: number };\n}\n\nexport class QueryRewriter {\n private model: LanguageModelV1;\n private cache: Map<string, string>;\n private maxCacheSize: number;\n\n constructor(options: QueryRewriterOptions) {\n this.model = options.model;\n this.cache = new Map();\n this.maxCacheSize = options.maxCacheSize ?? MAX_CACHE_SIZE;\n }\n\n async rewrite(query: string): Promise<RewriteResult> {\n const cacheKey = query.toLowerCase().trim();\n\n // Check cache first\n const cached = this.cache.get(cacheKey);\n if (cached) {\n return { original: query, rewritten: cached, cached: true };\n }\n\n const { text, usage } = await generateText({\n model: this.model,\n system: SYSTEM_PROMPT,\n prompt: query,\n });\n\n const rewritten = text.trim();\n\n // LRU eviction: delete oldest entry if at capacity\n if (this.cache.size >= this.maxCacheSize) {\n const oldest = this.cache.keys().next().value!;\n this.cache.delete(oldest);\n }\n this.cache.set(cacheKey, rewritten);\n\n return {\n original: query,\n rewritten,\n cached: false,\n tokenUsage: usage\n ? { prompt: usage.promptTokens, completion: usage.completionTokens }\n : undefined,\n };\n }\n\n get cacheSize(): number {\n return this.cache.size;\n }\n\n clearCache(): void {\n this.cache.clear();\n }\n}\n","import { Readability } from '@mozilla/readability';\nimport { parseHTML } from 'linkedom';\n\nexport interface FetchContentOptions {\n crawl4aiUrl?: string;\n}\n\nexport interface FetchContentResult {\n title: string;\n content: string;\n isMarkdown: boolean;\n}\n\n// Cache Crawl4AI health check result for 5 minutes\nlet crawl4aiHealthy: boolean | null = null;\nlet crawl4aiHealthCheckedAt = 0;\nconst HEALTH_CACHE_MS = 5 * 60 * 1000;\n\n/** Reset the Crawl4AI health check cache. Used in tests. */\nexport function resetCrawl4aiHealthCache(): void {\n crawl4aiHealthy = null;\n crawl4aiHealthCheckedAt = 0;\n}\n\n/**\n * Fetch raw HTML from a URL. Used for link extraction, sitemaps, etc.\n */\nexport async function fetchHtml(url: string): Promise<string> {\n const response = await fetch(url, {\n headers: { 'User-Agent': 'Operor-KB/1.0' },\n });\n if (!response.ok) throw new Error(`Failed to fetch ${url}: ${response.status}`);\n return response.text();\n}\n\n/**\n * Smart content fetch: tries Crawl4AI first (if configured), falls back to Readability.\n */\nexport async function fetchContent(url: string, options?: FetchContentOptions): Promise<FetchContentResult> {\n if (options?.crawl4aiUrl) {\n try {\n if (await isCrawl4aiHealthy(options.crawl4aiUrl)) {\n return await fetchViaCrawl4AI(url, options.crawl4aiUrl);\n }\n } catch {\n // Fall through to Readability\n }\n }\n\n const html = await fetchHtml(url);\n const { title, content } = extractFromHtml(html, url);\n return { title, content, isMarkdown: false };\n}\n\n/**\n * Extract readable content from pre-fetched HTML using @mozilla/readability.\n */\nexport function extractFromHtml(html: string, url: string): { title: string; content: string } {\n const { document } = parseHTML(html);\n const reader = new Readability(document as any, { url });\n const article = reader.parse();\n return {\n title: article?.title || '',\n content: article?.textContent?.trim() || '',\n };\n}\n\n/**\n * Extract same-domain links from HTML.\n */\nexport function extractLinks(html: string, baseUrl: string): string[] {\n const { document } = parseHTML(html);\n const links: string[] = [];\n const base = new URL(baseUrl);\n\n for (const a of document.querySelectorAll('a[href]')) {\n try {\n const href = (a as any).getAttribute('href');\n if (!href) continue;\n const resolved = new URL(href, baseUrl);\n if (resolved.hostname === base.hostname && resolved.protocol.startsWith('http')) {\n links.push(resolved.href.split('#')[0]);\n }\n } catch {\n // Skip invalid URLs\n }\n }\n\n return [...new Set(links)];\n}\n\nasync function isCrawl4aiHealthy(baseUrl: string): Promise<boolean> {\n if (crawl4aiHealthy !== null && Date.now() - crawl4aiHealthCheckedAt < HEALTH_CACHE_MS) {\n return crawl4aiHealthy;\n }\n try {\n const res = await fetch(`${baseUrl}/health`, { signal: AbortSignal.timeout(2000) });\n crawl4aiHealthy = res.ok;\n } catch {\n crawl4aiHealthy = false;\n }\n crawl4aiHealthCheckedAt = Date.now();\n return crawl4aiHealthy;\n}\n\nasync function fetchViaCrawl4AI(url: string, baseUrl: string): Promise<FetchContentResult> {\n const res = await fetch(`${baseUrl}/crawl`, {\n method: 'POST',\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify({\n urls: [url],\n browser_config: { type: 'BrowserConfig', params: { headless: true } },\n crawler_config: {\n type: 'CrawlerRunConfig',\n params: {\n cache_mode: 'bypass',\n markdown_generator: {\n type: 'DefaultMarkdownGenerator',\n params: {\n content_filter: { type: 'PruningContentFilter', params: { threshold: 0.48 } },\n },\n },\n },\n },\n }),\n signal: AbortSignal.timeout(30000),\n });\n\n if (!res.ok) throw new Error(`Crawl4AI error: ${res.status}`);\n const data = await res.json() as any;\n const result = data.results?.[0];\n if (!result) throw new Error('Crawl4AI returned no results');\n\n const markdown = result.markdown?.fit_markdown || result.markdown?.raw_markdown || '';\n if (!markdown) throw new Error('Crawl4AI returned empty markdown');\n\n // Extract title from first heading\n const titleMatch = markdown.match(/^#\\s+(.+)$/m);\n const title = titleMatch?.[1] || '';\n\n return { title, content: markdown, isMarkdown: true };\n}\n","import { fetchHtml, fetchContent, extractLinks } from './content-fetcher.js';\nimport type { FetchContentOptions } from './content-fetcher.js';\nimport type { IngestionPipeline } from '../IngestionPipeline.js';\nimport type { KBDocument } from '../types.js';\n\nexport interface CrawlOptions {\n maxPages?: number;\n maxDepth?: number;\n}\n\nexport interface UrlIngestorOptions {\n crawl4aiUrl?: string;\n}\n\nexport class UrlIngestor {\n private pipeline: IngestionPipeline;\n private fetchOptions: FetchContentOptions;\n\n constructor(pipeline: IngestionPipeline, options?: UrlIngestorOptions) {\n this.pipeline = pipeline;\n this.fetchOptions = { crawl4aiUrl: options?.crawl4aiUrl };\n }\n\n async ingestUrl(url: string, options?: { priority?: number; extractQA?: boolean }): Promise<KBDocument> {\n const { title, content, isMarkdown } = await fetchContent(url, this.fetchOptions);\n\n return this.pipeline.ingest({\n sourceType: 'url',\n sourceUrl: url,\n title,\n content,\n isMarkdown,\n priority: options?.priority,\n extractQA: options?.extractQA,\n });\n }\n\n async ingestSitemap(sitemapUrl: string, options?: CrawlOptions): Promise<KBDocument[]> {\n const maxPages = options?.maxPages || 50;\n const xml = await fetchHtml(sitemapUrl);\n const urls = this.parseSitemapUrls(xml).slice(0, maxPages);\n\n const docs: KBDocument[] = [];\n for (const url of urls) {\n try {\n const doc = await this.ingestUrl(url);\n docs.push(doc);\n } catch {\n // Skip failed URLs\n }\n }\n return docs;\n }\n\n async crawl(startUrl: string, options?: CrawlOptions): Promise<KBDocument[]> {\n const maxPages = options?.maxPages || 20;\n const maxDepth = options?.maxDepth || 2;\n const visited = new Set<string>();\n const docs: KBDocument[] = [];\n\n const queue: { url: string; depth: number }[] = [{ url: startUrl, depth: 0 }];\n\n while (queue.length > 0 && docs.length < maxPages) {\n const item = queue.shift()!;\n if (visited.has(item.url) || item.depth > maxDepth) continue;\n visited.add(item.url);\n\n try {\n const html = await fetchHtml(item.url);\n const { title, content, isMarkdown } = await fetchContent(item.url, this.fetchOptions);\n\n const doc = await this.pipeline.ingest({\n sourceType: 'url',\n sourceUrl: item.url,\n title,\n content,\n isMarkdown,\n });\n docs.push(doc);\n\n if (item.depth < maxDepth) {\n const links = extractLinks(html, item.url);\n for (const link of links) {\n if (!visited.has(link)) {\n queue.push({ url: link, depth: item.depth + 1 });\n }\n }\n }\n } catch {\n // Skip failed URLs\n }\n }\n\n return docs;\n }\n\n private parseSitemapUrls(xml: string): string[] {\n const urls: string[] = [];\n const locRegex = /<loc>(.*?)<\\/loc>/g;\n let match;\n while ((match = locRegex.exec(xml)) !== null) {\n urls.push(match[1]);\n }\n return urls.filter(u => !u.endsWith('.xml'));\n }\n}\n","import { readFile } from 'node:fs/promises';\nimport { extname } from 'node:path';\nimport type { IngestionPipeline } from '../IngestionPipeline.js';\nimport type { KBDocument } from '../types.js';\n\nexport class FileIngestor {\n private pipeline: IngestionPipeline;\n\n constructor(pipeline: IngestionPipeline) {\n this.pipeline = pipeline;\n }\n\n async ingestFile(filePath: string, title?: string, options?: { priority?: number }): Promise<KBDocument> {\n const ext = extname(filePath).toLowerCase();\n const content = await this.extractContent(filePath, ext);\n const fileName = filePath.split('/').pop() || filePath;\n\n return this.pipeline.ingest({\n sourceType: 'file',\n fileName,\n title: title || fileName,\n content,\n priority: options?.priority,\n });\n }\n\n private async extractContent(filePath: string, ext: string): Promise<string> {\n switch (ext) {\n case '.pdf':\n return this.extractPdf(filePath);\n case '.docx':\n return this.extractDocx(filePath);\n case '.xlsx':\n case '.xls':\n return this.extractXlsx(filePath);\n case '.csv':\n case '.txt':\n case '.md':\n return readFile(filePath, 'utf-8');\n case '.html':\n case '.htm':\n return this.extractHtml(filePath);\n default:\n return readFile(filePath, 'utf-8');\n }\n }\n\n private async extractPdf(filePath: string): Promise<string> {\n const { getDocumentProxy, extractText } = await import('unpdf');\n const buffer = await readFile(filePath);\n const pdf = await getDocumentProxy(new Uint8Array(buffer));\n const { text } = await extractText(pdf, { mergePages: true });\n return text;\n }\n\n private async extractDocx(filePath: string): Promise<string> {\n const mammoth = await import('mammoth');\n const buffer = await readFile(filePath);\n const result = await mammoth.extractRawText({ buffer });\n return result.value;\n }\n\n private async extractXlsx(filePath: string): Promise<string> {\n const XLSX = await import('xlsx');\n const buffer = await readFile(filePath);\n const workbook = XLSX.read(buffer, { type: 'buffer' });\n const lines: string[] = [];\n for (const sheetName of workbook.SheetNames) {\n const sheet = workbook.Sheets[sheetName];\n const csv = XLSX.utils.sheet_to_csv(sheet);\n lines.push(`## ${sheetName}\\n${csv}`);\n }\n return lines.join('\\n\\n');\n }\n\n private async extractHtml(filePath: string): Promise<string> {\n const { parseHTML } = await import('linkedom');\n const { Readability } = await import('@mozilla/readability');\n const html = await readFile(filePath, 'utf-8');\n const { document } = parseHTML(html);\n const reader = new Readability(document as any);\n const article = reader.parse();\n return article?.textContent?.trim() || html;\n }\n}\n","import { fetchHtml, fetchContent, extractLinks } from './content-fetcher.js';\nimport type { FetchContentOptions } from './content-fetcher.js';\nimport type { IngestionPipeline } from '../IngestionPipeline.js';\nimport type { KBDocument } from '../types.js';\n\nexport interface SiteCrawlOptions {\n maxDepth?: number;\n maxPages?: number;\n useSitemap?: boolean;\n delayMs?: number;\n onProgress?: (crawled: number, discovered: number, url: string) => void;\n}\n\nexport interface SiteCrawlerOptions {\n crawl4aiUrl?: string;\n}\n\nexport class SiteCrawler {\n private pipeline: IngestionPipeline;\n private fetchOptions: FetchContentOptions;\n\n constructor(pipeline: IngestionPipeline, options?: SiteCrawlerOptions) {\n this.pipeline = pipeline;\n this.fetchOptions = { crawl4aiUrl: options?.crawl4aiUrl };\n }\n\n async crawlSite(startUrl: string, options: SiteCrawlOptions = {}): Promise<KBDocument[]> {\n const {\n maxDepth = 2,\n maxPages = 50,\n useSitemap = true,\n delayMs = 500,\n onProgress,\n } = options;\n\n const docs: KBDocument[] = [];\n const visited = new Set<string>();\n\n if (useSitemap) {\n const sitemapUrls = await this.tryFetchSitemap(startUrl);\n if (sitemapUrls.length > 0) {\n const urlsToIngest = sitemapUrls.slice(0, maxPages);\n for (const url of urlsToIngest) {\n if (visited.has(url)) continue;\n visited.add(url);\n try {\n onProgress?.(docs.length + 1, urlsToIngest.length, url);\n const doc = await this.ingestPage(url);\n docs.push(doc);\n if (delayMs > 0) await this.delay(delayMs);\n } catch {\n // Skip failed URLs\n }\n if (docs.length >= maxPages) break;\n }\n return docs;\n }\n }\n\n const queue: { url: string; depth: number }[] = [{ url: startUrl, depth: 0 }];\n\n while (queue.length > 0 && docs.length < maxPages) {\n const item = queue.shift()!;\n if (visited.has(item.url) || item.depth > maxDepth) continue;\n visited.add(item.url);\n\n try {\n onProgress?.(docs.length + 1, docs.length + queue.length + 1, item.url);\n const html = await fetchHtml(item.url);\n const { title, content, isMarkdown } = await fetchContent(item.url, this.fetchOptions);\n\n const doc = await this.pipeline.ingest({\n sourceType: 'url',\n sourceUrl: item.url,\n title,\n content,\n isMarkdown,\n });\n docs.push(doc);\n\n if (delayMs > 0) await this.delay(delayMs);\n\n if (item.depth < maxDepth) {\n for (const link of extractLinks(html, item.url)) {\n if (!visited.has(link)) {\n queue.push({ url: link, depth: item.depth + 1 });\n }\n }\n }\n } catch {\n // Skip failed URLs\n }\n }\n\n return docs;\n }\n\n private async tryFetchSitemap(baseUrl: string): Promise<string[]> {\n try {\n const url = new URL(baseUrl);\n const sitemapUrl = `${url.protocol}//${url.hostname}/sitemap.xml`;\n const xml = await fetchHtml(sitemapUrl);\n return await this.parseSitemapUrls(xml);\n } catch {\n return [];\n }\n }\n\n private async parseSitemapUrls(xml: string): Promise<string[]> {\n const urls: string[] = [];\n const locRegex = /<loc>(.*?)<\\/loc>/g;\n let match;\n while ((match = locRegex.exec(xml)) !== null) {\n urls.push(match[1]);\n }\n\n if (xml.includes('<sitemapindex') || urls.every(u => u.endsWith('.xml'))) {\n const pageUrls: string[] = [];\n for (const childSitemapUrl of urls) {\n try {\n const childXml = await fetchHtml(childSitemapUrl);\n const childUrls: string[] = [];\n const childRegex = /<loc>(.*?)<\\/loc>/g;\n let childMatch;\n while ((childMatch = childRegex.exec(childXml)) !== null) {\n childUrls.push(childMatch[1]);\n }\n pageUrls.push(...childUrls.filter(u => !u.endsWith('.xml')));\n } catch {\n // Skip unreachable child sitemaps\n }\n }\n return pageUrls;\n }\n\n return urls.filter(u => !u.endsWith('.xml'));\n }\n\n private async ingestPage(url: string): Promise<KBDocument> {\n const { title, content, isMarkdown } = await fetchContent(url, this.fetchOptions);\n return this.pipeline.ingest({\n sourceType: 'url',\n sourceUrl: url,\n title,\n content,\n isMarkdown,\n });\n }\n\n private delay(ms: number): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, ms));\n }\n}\n","import type { IngestionPipeline } from '../IngestionPipeline.js';\nimport type { KBDocument } from '../types.js';\n\nexport interface FaqPair {\n question: string;\n answer: string;\n}\n\nexport interface WatiFaqSyncOptions {\n minAnswerLength?: number;\n maxPairs?: number;\n}\n\nexport class WatiFaqSync {\n private pipeline: IngestionPipeline;\n private llmExtract?: (conversation: string) => Promise<FaqPair[]>;\n\n constructor(\n pipeline: IngestionPipeline,\n llmExtract?: (conversation: string) => Promise<FaqPair[]>,\n ) {\n this.pipeline = pipeline;\n this.llmExtract = llmExtract;\n }\n\n async syncFromConversations(\n conversations: string[],\n options?: WatiFaqSyncOptions,\n ): Promise<KBDocument[]> {\n const minLen = options?.minAnswerLength || 20;\n const maxPairs = options?.maxPairs || 100;\n\n if (!this.llmExtract) {\n throw new Error('LLM extract function required for FAQ extraction');\n }\n\n const allPairs: FaqPair[] = [];\n for (const convo of conversations) {\n const pairs = await this.llmExtract(convo);\n allPairs.push(...pairs);\n if (allPairs.length >= maxPairs) break;\n }\n\n // Quality filtering\n const filtered = allPairs\n .filter((p) => p.answer.length >= minLen && p.question.trim().length > 0)\n .slice(0, maxPairs);\n\n const docs: KBDocument[] = [];\n for (const pair of filtered) {\n const doc = await this.pipeline.ingestFaq(pair.question, pair.answer, {\n source: 'wati-sync',\n });\n docs.push(doc);\n }\n\n return docs;\n }\n\n async syncFromPairs(pairs: FaqPair[], options?: WatiFaqSyncOptions): Promise<KBDocument[]> {\n const minLen = options?.minAnswerLength || 20;\n const filtered = pairs.filter(\n (p) => p.answer.length >= minLen && p.question.trim().length > 0,\n );\n\n const docs: KBDocument[] = [];\n for (const pair of filtered) {\n const doc = await this.pipeline.ingestFaq(pair.question, pair.answer, {\n source: 'wati-sync',\n });\n docs.push(doc);\n }\n return docs;\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;AAcA,IAAa,mBAAb,MAAa,iBAAiB;CAC5B,AAAQ;CAER,YAAY,QAAgC;AAC1C,OAAK,SAAS;;CAGhB,AAAQ,WAAW;EACjB,MAAM,EAAE,UAAU,QAAQ,SAAS,UAAU,KAAK;AAElD,UAAQ,UAAR;GACE,KAAK,SAEH,QADe,aAAa;IAAE;IAAQ;IAAS,CAAC,CAClC,UAAU,SAAS,0BAA0B,EACzD,YAAY,KAAK,OAAO,YACzB,CAAC;GAEJ,KAAK,SAEH,QADe,yBAAyB;IAAE;IAAQ;IAAS,CAAC,CAC9C,mBAAmB,SAAS,qBAAqB;GAEjE,KAAK,UACH,QAAO,QAAQ,UAAU,SAAS,iBAAiB,EAAE,QAAQ,CAAC;GAEhE,KAAK,SACH,QAAO,OAAO,UAAU,SAAS,sBAAsB,EAAE,QAAQ,CAAC;GAEpE,KAAK,SAKH,QAJe,aAAa;IAC1B,QAAQ,UAAU;IAClB,SAAS,WAAW;IACrB,CAAC,CACY,UAAU,SAAS,mBAAmB;GAEtD,QACE,OAAM,IAAI,MAAM,+BAA+B,WAAW;;;CAIhE,IAAI,WAAmB;AACrB,SAAO,KAAK,OAAO;;CAGrB,IAAI,aAAqB;AACvB,MAAI,KAAK,OAAO,WAAY,QAAO,KAAK,OAAO;AAC/C,SAAO,iBAAiB,kBAAkB,KAAK,OAAO,UAAU,KAAK,OAAO,MAAM;;CAGpF,OAAO,kBAAkB,UAAkB,OAAwB;AACjE,UAAQ,UAAR;GACE,KAAK,SACH,QAAO;GACT,KAAK,SACH,QAAO;GACT,KAAK,UACH,QAAO;GACT,KAAK,SACH,QAAO;GACT,KAAK,SACH,QAAO;GACT,QACE,QAAO;;;CAIb,MAAM,MAAM,MAAiC;AAG3C,UADe,MAAM,MAAM;GAAE,OADf,KAAK,UAAU;GACO,OAAO;GAAM,CAAC,EACpC;;CAGhB,MAAM,UAAU,OAAsC;AACpD,MAAI,MAAM,WAAW,EAAG,QAAO,EAAE;AAGjC,UADe,MAAM,UAAU;GAAE,OADnB,KAAK,UAAU;GACW,QAAQ;GAAO,CAAC,EAC1C;;;;;;ACpFlB,IAAa,uBAAb,MAA4D;CAC1D,AAAQ;CACR,AAAQ;CACR,AAAQ;CACR,AAAQ,kBAAkB;CAE1B,YAAY,SAAiB,kBAAkB,aAAqB,MAAM;AACxE,OAAK,KAAK,IAAI,SAAS,OAAO;AAC9B,OAAK,SAAS;AACd,OAAK,aAAa;AAClB,OAAK,GAAG,OAAO,qBAAqB;AACpC,OAAK,GAAG,OAAO,oBAAoB;AACnC,YAAU,KAAK,KAAK,GAAG;;CAGzB,gBAAwB;AACtB,SAAO,KAAK;;CAGd,MAAM,aAA4B;AAChC,OAAK,GAAG,KAAK;;;;;;;;;;;;;;;;;;;;;;;;;MAyBX;AAGF,MAAI;AAAE,QAAK,GAAG,KAAK,iEAAiE;UAAU;AAC9F,MAAI;AAAE,QAAK,GAAG,KAAK,wDAAwD;UAAU;AACrF,MAAI;AAAE,QAAK,GAAG,KAAK,kFAAkF;UAAU;AAE/G,OAAK,GAAG,KAAK;;;0BAGS,KAAK,WAAW;;MAEpC;AAEF,OAAK,GAAG,KAAK;;;;;;MAMX;;CAGJ,MAAM,QAAuB;AAC3B,OAAK,GAAG,OAAO;;CAGjB,MAAM,YAAY,KAAgC;AAChD,OAAK,GAAG,QAAQ;;;MAGd,CAAC,IACD,IAAI,IACJ,IAAI,YACJ,IAAI,aAAa,MACjB,IAAI,YAAY,MAChB,IAAI,SAAS,MACb,IAAI,SACJ,IAAI,WAAW,KAAK,UAAU,IAAI,SAAS,GAAG,MAC9C,IAAI,WACJ,IAAI,WACJ,IAAI,YAAY,GAChB,IAAI,eAAe,KACpB;;CAGH,MAAM,YAAY,IAAwC;EACxD,MAAM,MAAM,KAAK,GAAG,QAAQ,0CAA0C,CAAC,IAAI,GAAG;AAC9E,SAAO,MAAM,KAAK,cAAc,IAAI,GAAG;;CAGzC,MAAM,gBAAuC;AAE3C,SADa,KAAK,GAAG,QAAQ,sDAAsD,CAAC,KAAK,CAC7E,KAAK,MAAM,KAAK,cAAc,EAAE,CAAC;;CAG/C,MAAM,eAAe,IAA2B;EAC9C,MAAM,SAAS,KAAK,GAAG,QAAQ,iDAAiD,CAAC,IAAI,GAAG;AACxF,OAAK,MAAM,SAAS,QAAQ;AAC1B,QAAK,GAAG,QAAQ,4CAA4C,CAAC,IAAI,MAAM,GAAG;AAC1E,QAAK,GAAG,QAAQ,4CAA4C,CAAC,IAAI,MAAM,GAAG;;AAE5E,OAAK,GAAG,QAAQ,8CAA8C,CAAC,IAAI,GAAG;AACtE,OAAK,GAAG,QAAQ,wCAAwC,CAAC,IAAI,GAAG;;CAGlE,MAAM,UAAU,QAAkC;EAChD,MAAM,cAAc,KAAK,GAAG,QAAQ;;;MAGlC;EACF,MAAM,YAAY,KAAK,GAAG,QAAQ;;;MAGhC;EACF,MAAM,YAAY,KAAK,GAAG,QAAQ;;MAEhC;EACF,MAAM,YAAY,KAAK,GAAG,QAAQ;;;MAGhC;AA4BF,EA1BoB,KAAK,GAAG,aAAa,UAAqB;AAC5D,QAAK,MAAM,SAAS,OAAO;AACzB,gBAAY,IACV,MAAM,IACN,MAAM,YACN,MAAM,SACN,MAAM,YACN,MAAM,WAAW,KAAK,UAAU,MAAM,SAAS,GAAG,KACnD;AACD,QAAI,MAAM,WAAW;AACnB,SAAI,MAAM,UAAU,WAAW,KAAK,cAAc,CAAC,KAAK,iBAAiB;AACvE,WAAK,kBAAkB;AACvB,cAAQ,KACN,0CAA0C,KAAK,WAAW,yBAAyB,MAAM,UAAU,OAAO,wJAG3G;;AAEH,eAAU,IAAI,MAAM,IAAI,IAAI,aAAa,MAAM,UAAU,CAAC;;AAG5D,cAAU,IAAI,MAAM,GAAG;AACvB,cAAU,IAAI,MAAM,IAAI,MAAM,QAAQ;;IAExC,CAEU,OAAO;;CAGrB,cAAc,YAA4B;AAIxC,SAHY,KAAK,GAAG,QAClB,gEACD,CAAC,IAAI,WAAW,EACL,SAAS;;CAGvB,MAAM,OAAO,OAAe,WAAqB,SAAsD;AACrG,SAAO,KAAK,kBAAkB,WAAW,QAAQ;;CAGnD,MAAM,kBAAkB,WAAqB,SAAsD;EACjG,MAAM,QAAQ,SAAS,SAAS;EAIhC,MAAM,aAAa,SAAS,cAAc,KAAK,IAAI,QAAQ,IAAI,IAAI,GAAG;EAEtE,MAAM,UAAU,KAAK,GAAG,QAAQ;;;;;;MAM9B,CAAC,IAAI,IAAI,aAAa,UAAU,EAAE,WAAW;AAE/C,MAAI,QAAQ,WAAW,EAAG,QAAO,EAAE;EAEnC,MAAM,UAA4B,EAAE;AACpC,OAAK,MAAM,UAAU,SAAS;GAC5B,MAAM,WAAW,OAAO;GACxB,MAAM,QAAQ,KAAK,IAAI;AAEvB,OAAI,SAAS,kBAAkB,QAAQ,QAAQ,eAAgB;GAE/D,MAAM,QAAQ,KAAK,GAAG,QAAQ,uCAAuC,CAAC,IAAI,OAAO,SAAS;AAC1F,OAAI,CAAC,MAAO;GAEZ,MAAM,MAAM,KAAK,GAAG,QAAQ,0CAA0C,CAAC,IAAI,MAAM,YAAY;AAC7F,OAAI,CAAC,IAAK;AAEV,OAAI,SAAS,eAAe,CAAC,QAAQ,YAAY,SAAS,IAAI,YAAY,CAAE;AAE5E,WAAQ,KAAK;IACX,OAAO;KACL,IAAI,MAAM;KACV,YAAY,MAAM;KAClB,SAAS,MAAM;KACf,YAAY,MAAM;KAClB,UAAU,MAAM,WAAW,KAAK,MAAM,MAAM,SAAS,GAAG;KACzD;IACD,UAAU,KAAK,cAAc,IAAI;IACjC;IACA;IACD,CAAC;;AAGJ,SAAO,QAAQ,MAAM,GAAG,MAAM;;CAGhC,MAAM,gBAAgB,OAAe,SAAsD;EACzF,MAAM,QAAQ,SAAS,SAAS;EAGhC,MAAM,YAAY,MAAM,QAAQ,MAAM,OAAK;EAC3C,IAAI;AACJ,MAAI;AACF,aAAU,KAAK,GAAG,QAAQ;;;;;;QAMxB,CAAC,IAAI,WAAW,QAAQ,EAAE;UACtB;AAEN,UAAO,EAAE;;AAGX,MAAI,QAAQ,WAAW,EAAG,QAAO,EAAE;EAEnC,MAAM,UAA4B,EAAE;AACpC,OAAK,MAAM,UAAU,SAAS;AAC5B,OAAI,QAAQ,UAAU,MAAO;GAE7B,MAAM,QAAQ,KAAK,GAAG,QAAQ,uCAAuC,CAAC,IAAI,OAAO,SAAS;AAC1F,OAAI,CAAC,MAAO;GAEZ,MAAM,MAAM,KAAK,GAAG,QAAQ,0CAA0C,CAAC,IAAI,MAAM,YAAY;AAC7F,OAAI,CAAC,IAAK;AAEV,OAAI,SAAS,eAAe,CAAC,QAAQ,YAAY,SAAS,IAAI,YAAY,CAAE;GAG5E,MAAM,YAAY,CAAC,OAAO;AAE1B,OAAI,SAAS,kBAAkB,YAAY,QAAQ,eAAgB;AAEnE,WAAQ,KAAK;IACX,OAAO;KACL,IAAI,MAAM;KACV,YAAY,MAAM;KAClB,SAAS,MAAM;KACf,YAAY,MAAM;KAClB,UAAU,MAAM,WAAW,KAAK,MAAM,MAAM,SAAS,GAAG;KACzD;IACD,UAAU,KAAK,cAAc,IAAI;IACjC,OAAO;IACP,UAAU;IACX,CAAC;;AAGJ,SAAO;;;;;;CAOT,eAA6G;AAC3G,SAAO,KAAK,GAAG,QACb,sIACD,CAAC,KAAK;;;;;;CAOT,gBAAgB,eAA6B;AAC3C,OAAK,GAAG,KAAK,kCAAkC;AAC/C,OAAK,GAAG,KAAK;;;0BAGS,cAAc;;MAElC;AACF,OAAK,aAAa;AAClB,OAAK,kBAAkB;;;;;;CAOzB,sBAAsB,OAAyD;EAC7E,MAAM,SAAS,KAAK,GAAG,QACrB,wEACD;AAMD,EALW,KAAK,GAAG,aAAa,UAAsD;AACpF,QAAK,MAAM,QAAQ,MACjB,QAAO,IAAI,KAAK,SAAS,IAAI,aAAa,KAAK,UAAU,CAAC;IAE5D,CACC,MAAM;;CAGX,MAAM,WAA6B;EACjC,MAAM,WAAW,KAAK,GAAG,QAAQ,6CAA6C,CAAC,KAAK;EACpF,MAAM,aAAa,KAAK,GAAG,QAAQ,0CAA0C,CAAC,KAAK;EACnF,IAAI,cAAc;AAClB,MAAI;AACF,iBAAc,SAAS,KAAK,OAAO,CAAC;UAC9B;AACR,SAAO;GACL,eAAe,SAAS;GACxB,YAAY,WAAW;GACvB,qBAAqB,KAAK;GAC1B;GACD;;CAGH,MAAM,gBAAgB,KAAyC;EAC7D,MAAM,MAAM,KAAK,GAAG,QAAQ,kDAAkD,CAAC,IAAI,IAAI;AACvF,SAAO,MAAM,KAAK,cAAc,IAAI,GAAG;;CAGzC,MAAM,kBAAkB,MAA0C;EAChE,MAAM,MAAM,KAAK,GAAG,QAAQ,oDAAoD,CAAC,IAAI,KAAK;AAC1F,SAAO,MAAM,KAAK,cAAc,IAAI,GAAG;;CAGzC,MAAM,eAAe,IAAY,SAAuI;EACtK,MAAM,OAAiB,EAAE;EACzB,MAAM,SAAgB,EAAE;AACxB,MAAI,QAAQ,YAAY,QAAW;AAAE,QAAK,KAAK,cAAc;AAAE,UAAO,KAAK,QAAQ,QAAQ;;AAC3F,MAAI,QAAQ,UAAU,QAAW;AAAE,QAAK,KAAK,YAAY;AAAE,UAAO,KAAK,QAAQ,MAAM;;AACrF,MAAI,QAAQ,gBAAgB,QAAW;AAAE,QAAK,KAAK,mBAAmB;AAAE,UAAO,KAAK,QAAQ,YAAY;;AACxG,MAAI,QAAQ,aAAa,QAAW;AAAE,QAAK,KAAK,eAAe;AAAE,UAAO,KAAK,QAAQ,SAAS;;AAC9F,MAAI,QAAQ,aAAa,QAAW;AAAE,QAAK,KAAK,eAAe;AAAE,UAAO,KAAK,KAAK,UAAU,QAAQ,SAAS,CAAC;;AAC9G,OAAK,KAAK,iBAAiB;AAAE,SAAO,KAAK,KAAK,KAAK,CAAC;AACpD,SAAO,KAAK,GAAG;AACf,OAAK,GAAG,QAAQ,2BAA2B,KAAK,KAAK,KAAK,CAAC,eAAe,CAAC,IAAI,GAAG,OAAO;;CAG3F,MAAM,eAAe,WAAqB,WAAmD;EAC3F,MAAM,UAAU,MAAM,KAAK,kBAAkB,WAAW;GAAE,aAAa,CAAC,MAAM;GAAE,OAAO;GAAG,CAAC;AAC3F,MAAI,QAAQ,SAAS,KAAK,QAAQ,GAAG,SAAS,UAAW,QAAO,QAAQ;AACxE,SAAO;;CAGT,AAAQ,cAAc,KAAsB;AAC1C,SAAO;GACL,IAAI,IAAI;GACR,YAAY,IAAI;GAChB,WAAW,IAAI,cAAc;GAC7B,UAAU,IAAI,aAAa;GAC3B,OAAO,IAAI,SAAS;GACpB,SAAS,IAAI;GACb,UAAU,IAAI,WAAW,KAAK,MAAM,IAAI,SAAS,GAAG;GACpD,WAAW,IAAI;GACf,WAAW,IAAI;GACf,UAAU,IAAI,YAAY;GAC1B,aAAa,IAAI,gBAAgB;GAClC;;;;;;ACjXL,IAAa,cAAb,MAAyB;CACvB,AAAQ;CACR,AAAQ;CAER,YAAY,SAAwB;AAClC,OAAK,mBAAmB,SAAS,aAAa;AAC9C,OAAK,sBAAsB,SAAS,gBAAgB;;CAGtD,MAAM,MAAM,MAAc,SAA2C;AAMnE,UADa,MAJI,IAAI,+BAA+B;GAClD,WAAW,SAAS,aAAa,KAAK;GACtC,cAAc,SAAS,gBAAgB,KAAK;GAC7C,CAAC,CAC0B,gBAAgB,CAAC,KAAK,CAAC,EACvC,KAAK,MAAM,EAAE,YAAY;;CAGvC,MAAM,cAAc,UAAkB,SAA2C;AAM/E,UADa,MAJI,IAAI,qBAAqB;GACxC,WAAW,SAAS,aAAa,KAAK;GACtC,cAAc,SAAS,gBAAgB,KAAK;GAC7C,CAAC,CAC0B,gBAAgB,CAAC,SAAS,CAAC,EAC3C,KAAK,MAAM,EAAE,YAAY;;;;;;;;;;AC1BzC,MAAM,gBAAoC;CACxC,CAAC,WAAW,MAAM;CAClB,CAAC,YAAY,OAAO;CACpB,CAAC,WAAW,MAAM;CAClB,CAAC,aAAa,SAAS;CACvB,CAAC,aAAa,SAAS;CACvB,CAAC,aAAa,SAAS;CACvB,CAAC,cAAc,SAAS;CACxB,CAAC,YAAY,YAAY;CACzB,CAAC,aAAa,OAAO;CACrB,CAAC,YAAY,UAAU;CACvB,CAAC,aAAa,UAAU;CACxB,CAAC,eAAe,WAAW;CAC3B,CAAC,eAAe,UAAU;CAC1B,CAAC,eAAe,SAAS;CACzB,CAAC,eAAe,SAAS;CACzB,CAAC,eAAe,UAAU;CAC1B,CAAC,eAAe,cAAc;CAC9B,CAAC,aAAa,QAAQ;CACtB,CAAC,cAAc,UAAU;CACzB,CAAC,aAAa,UAAU;CACxB,CAAC,cAAc,WAAW;CAC1B,CAAC,cAAc,cAAc;CAC7B,CAAC,aAAa,WAAW;CACzB,CAAC,cAAc,WAAW;CAC1B,CAAC,aAAa,OAAO;CACrB,CAAC,cAAc,UAAU;CACzB,CAAC,aAAa,gBAAgB;CAC9B,CAAC,aAAa,gBAAgB;CAC9B,CAAC,aAAa,aAAa;CAC3B,CAAC,cAAc,sBAAsB;CAErC,CAAC,UAAU,MAAM;CACjB,CAAC,UAAU,KAAK;CACjB;;;;;;;AAQD,SAAgB,eAAe,OAAuB;CACpD,IAAI,aAAa,MAAM,aAAa;AAEpC,MAAK,MAAM,CAAC,SAAS,gBAAgB,cACnC,cAAa,WAAW,QAAQ,SAAS,YAAY;AAIvD,cAAa,WAAW,QAAQ,QAAQ,IAAI,CAAC,MAAM;AAEnD,QAAO;;;;;AChBT,IAAa,oBAAb,MAA+B;CAC7B,AAAQ;CACR,AAAQ;CACR,AAAQ;CACR,AAAQ;CAER,YAAY,OAAuB,UAA4B,SAAsB,aAAkC;AACrH,OAAK,QAAQ;AACb,OAAK,WAAW;AAChB,OAAK,UAAU;AACf,OAAK,cAAc;;CAGrB,AAAQ,aAAa,MAAsB;AACzC,SAAO,KACJ,QAAQ,oBAAoB,GAAG,CAC/B,QAAQ,WAAW,IAAI,CACvB,QAAQ,gBAAgB,OAAO,CAC/B,MAAM,KAAK,CAAC,QAAQ,MAAM,GAAG,QAAQ,MAAM,KAAK,SAAS,IAAI,IAAI,GAAG,CAAC,KAAK,KAAK,CAC/E,MAAM,CACN,MAAM,GAAG,KAAM;;CAGpB,MAAc,eAAe,SAAiB,OAAsE;EAClH,MAAM,UAAU,KAAK,aAAa,QAAQ;EAC1C,MAAM,YAAY,QAAQ,kBAAkB,MAAM,KAAK;EACvD,MAAM,WAAW,MAAM,KAAK,YAAa,SAAS,CAChD;GACE,MAAM;GACN,SAAS;GACV,EACD;GACE,MAAM;GACN,SAAS,uCAAuC,UAAU,MAAM;GACjE,CACF,CAAC;AAEF,MAAI;GAEF,MAAM,OAAO,SAAS,KAAK,QAAQ,qBAAqB,GAAG,CAAC,QAAQ,eAAe,GAAG,CAAC,MAAM;GAC7F,MAAM,SAAS,KAAK,MAAM,KAAK;AAC/B,OAAI,MAAM,QAAQ,OAAO,CAAE,QAAO,OAAO,QAAQ,MAAW,EAAE,YAAY,EAAE,OAAO;UAC7E;GAEN,MAAM,QAAQ,SAAS,KAAK,MAAM,cAAc;AAChD,OAAI,MACF,KAAI;IACF,MAAM,SAAS,KAAK,MAAM,MAAM,GAAG;AACnC,QAAI,MAAM,QAAQ,OAAO,CAAE,QAAO,OAAO,QAAQ,MAAW,EAAE,YAAY,EAAE,OAAO;WAC7E;;AAGZ,SAAO,EAAE;;CAGX,AAAQ,YAAY,SAAyB;AAC3C,SAAO,WAAW,SAAS,CAAC,OAAO,QAAQ,CAAC,OAAO,MAAM;;CAG3D,MAAM,OAAO,OAAyC;AACpD,MAAI,CAAC,MAAM,WAAW,MAAM,QAAQ,MAAM,CAAC,WAAW,EACpD,OAAM,IAAI,MAAM,6BAA6B,MAAM,SAAS,MAAM,aAAa,UAAU,GAAG;EAG9F,MAAM,cAAc,KAAK;EACzB,MAAM,UAAU,KAAK,aAAa,MAAM,QAAQ;EAChD,MAAM,cAAc,KAAK,YAAY,QAAQ;EAG7C,MAAM,WAAW,MAAM,aAAa,MAAM,eAAe,QAAQ,IAAI;AAGrE,MAAI,MAAM,aAAa,YAAY,iBAAiB;GAClD,MAAM,WAAW,MAAM,YAAY,gBAAgB,MAAM,UAAU;AACnE,OAAI,UAAU;AAEZ,UAAM,YAAY,eAAe,SAAS,IAAI;KAC5C,SAAS,MAAM;KACf,OAAO,MAAM;KACb;KACA;KACA,UAAU,MAAM;KACjB,CAAC;AAEF,UAAM,KAAK,MAAM,eAAe,SAAS,GAAG;;;AAOhD,MAAI,YAAY,mBAAmB;GACjC,MAAM,WAAW,MAAM,YAAY,kBAAkB,YAAY;AACjE,OAAI,UAAU;AACZ,YAAQ,IAAI,4DAA4D,MAAM,SAAS,MAAM,aAAa,UAAU,GAAG;AACvH,WAAO;;;AAKX,MAAI,MAAM,aAAa,KAAK,aAAa;GACvC,MAAM,QAAQ,MAAM,KAAK,eAAe,MAAM,SAAS,MAAM,MAAM;AACnE,OAAI,MAAM,SAAS,GAAG;AACpB,SAAK,MAAM,QAAQ,MACjB,OAAM,KAAK,UAAU,KAAK,UAAU,KAAK,QAAQ,EAAE,WAAW,MAAM,WAAW,CAAC;IAElF,MAAM,MAAM,KAAK,KAAK;IACtB,MAAM,YAAwB;KAC5B,IAAI,YAAY;KAChB,YAAY,MAAM;KAClB,WAAW,MAAM;KACjB,UAAU,MAAM;KAChB,OAAO,MAAM;KACb,SAAS,aAAa,MAAM,OAAO;KACnC,UAAU;MAAE,GAAG,MAAM;MAAU,UAAU,MAAM;MAAQ;KACvD,WAAW;KACX,WAAW;KACX;KACA;KACD;AACD,UAAM,KAAK,MAAM,YAAY,UAAU;AACvC,WAAO;;;EAKX,MAAM,MAAM,KAAK,KAAK;EACtB,MAAM,MAAkB;GACtB,IAAI,YAAY;GAChB,YAAY,MAAM;GAClB,WAAW,MAAM;GACjB,UAAU,MAAM;GAChB,OAAO,MAAM;GACb,SAAS,MAAM;GACf,UAAU,MAAM;GAChB,WAAW;GACX,WAAW;GACX;GACA;GACD;AAED,QAAM,KAAK,MAAM,YAAY,IAAI;EAIjC,MAAM,QADc,MAAM,cAAc,MAAM,eAAe,QAEzD,MAAM,KAAK,QAAQ,cAAc,MAAM,QAAQ,GAC/C,MAAM,KAAK,QAAQ,MAAM,MAAM,QAAQ;EAC3C,MAAM,aAAa,MAAM,KAAK,SAAS,UAAU,MAAM;EAEvD,MAAM,SAAoB,MAAM,KAAK,MAAM,OAAO;GAChD,IAAI,YAAY;GAChB,YAAY,IAAI;GAChB,SAAS;GACT,YAAY;GACZ,WAAW,WAAW;GACtB,UAAU,MAAM;GACjB,EAAE;AAEH,QAAM,KAAK,MAAM,UAAU,OAAO;AAElC,MAAI,KAAK,MAAM,eAEb;OADoB,KAAK,MAAM,cAAc,IAAI,GAAG,KAChC,EAClB,SAAQ,KAAK,2BAA2B,MAAM,SAAS,IAAI,GAAG,mDAAmD;;AAIrH,SAAO;;CAGT,MAAM,UAAU,UAAkB,QAAgB,UAAuF;EACvI,MAAM,YAAY,MAAM,KAAK,SAAS,MAAM,eAAe,SAAS,CAAC;EACrE,MAAM,cAAc,KAAK;AAGzB,MAAI,YAAY,kBAAkB,CAAC,UAAU,cAAc;GACzD,MAAM,QAAQ,MAAM,YAAY,eAAe,WAAW,GAAK;AAC/D,OAAI,OAAO;IACT,MAAM,YAAY,MAAM,MAAM,UAAU,YAAY,MAAM,SAAS;IACnE,MAAM,YAAY,MAAM,MAAM,UAAU;IAExC,MAAM,MAAM,KAAK,KAAK;IACtB,MAAM,UAAU,MAAM,SAAS,OAAO;AAatC,WAZ6B;KAC3B,IAAI,YAAY;KAChB,YAAY;KACZ,WAAW,UAAU;KACrB,OAAO;KACP;KACA,UAAU;MAAE,GAAG;MAAU;MAAU;MAAQ;KAC3C,UAAU;KACV,WAAW;KACX,WAAW;KACX,eAAe;MAAE,IAAI,MAAM,SAAS;MAAI,UAAU;MAAW,QAAQ;MAAW,OAAO,MAAM;MAAO;KACrG;;;AAML,MAAI,UAAU,gBAAgB,UAAU,UACtC,OAAM,KAAK,MAAM,eAAe,SAAS,UAAU;EAGrD,MAAM,MAAM,KAAK,KAAK;EACtB,MAAM,UAAU,MAAM,SAAS,OAAO;EACtC,MAAM,MAAuB;GAC3B,IAAI,YAAY;GAChB,YAAY;GACZ,WAAW,UAAU;GACrB,OAAO;GACP;GACA,UAAU;IAAE,GAAG;IAAU;IAAU;IAAQ;GAC3C,UAAU;GACV,WAAW;GACX,WAAW;GACZ;AAED,QAAM,KAAK,MAAM,YAAY,IAAI;EAEjC,MAAM,QAAiB;GACrB,IAAI,YAAY;GAChB,YAAY,IAAI;GAChB;GACA,YAAY;GACZ;GACA,UAAU;IAAE;IAAU;IAAQ;GAC/B;AAED,QAAM,KAAK,MAAM,UAAU,CAAC,MAAM,CAAC;AACnC,SAAO;;;;;;;;CAST,MAAM,QAAQ,YAAiG;EAC7G,MAAM,cAAc,KAAK;AACzB,MAAI,CAAC,YAAY,gBAAgB,CAAC,YAAY,mBAAmB,CAAC,YAAY,sBAC5E,OAAM,IAAI,MAAM,gEAAgE;EAGlF,MAAM,gBAAgB,YAAY,eAAe;EACjD,MAAM,gBAAgB,KAAK,SAAS;EAGpC,MAAM,YAAY,MAAM,KAAK,MAAM,eAAe;EAClD,MAAM,SAAS,IAAI,IAAI,UAAU,KAAI,MAAK,CAAC,EAAE,IAAI,EAAE,CAAC,CAAC;EACrD,MAAM,YAAY,YAAY,cAAc;AAE5C,MAAI,UAAU,WAAW,EACvB,QAAO;GAAE,kBAAkB;GAAG,eAAe;GAAG;GAAe;GAAe;AAIhF,cAAY,gBAAgB,cAAc;EAG1C,MAAM,8BAAc,IAAI,KAA+B;AACvD,OAAK,MAAM,SAAS,WAAW;GAC7B,MAAM,OAAO,YAAY,IAAI,MAAM,WAAW,IAAI,EAAE;AACpD,QAAK,KAAK,MAAM;AAChB,eAAY,IAAI,MAAM,YAAY,KAAK;;EAGzC,IAAI,gBAAgB;EACpB,MAAM,YAAY,YAAY;EAC9B,IAAI,qBAAqB;AAEzB,OAAK,MAAM,CAAC,OAAO,WAAW,aAAa;GACzC,MAAM,MAAM,OAAO,IAAI,MAAM;GAC7B,MAAM,WAAW,KAAK,SAAS,MAAM,MAAM,GAAG,EAAE;AAEhD,gBAAa,eAAe,WAAW,SAAS;GAGhD,MAAM,eAAyB,EAAE;AACjC,QAAK,MAAM,SAAS,OAClB,KAAI,KAAK,eAAe,OAAO;IAG7B,MAAM,YADO,MAAM,WAAW,KAAK,MAAM,MAAM,SAAS,GAAG,OACpC,YAAY,IAAI,SAAS,MAAM;AACtD,iBAAa,KAAK,eAAe,SAAS,CAAC;SAE3C,cAAa,KAAK,MAAM,QAAQ;GAKpC,MAAM,aAAa,MAAM,KAAK,SAAS,UAAU,aAAa;GAG9D,MAAM,QAAQ,OAAO,KAAK,OAAO,OAAO;IACtC,SAAS,MAAM;IACf,WAAW,WAAW;IACvB,EAAE;AACH,eAAY,sBAAsB,MAAM;AAExC,yBAAsB,OAAO;AAC7B;;AAGF,eAAa,WAAW,WAAW,OAAO;AAE1C,SAAO;GACL,kBAAkB;GAClB,eAAe;GACf;GACA;GACD;;;;;;;;;;;;;;;;;ACtVL,SAAgB,qBACd,YACA,IAAY,IACS;CACrB,MAAM,yBAAS,IAAI,KAAqB;AAExC,MAAK,MAAM,WAAW,WACpB,MAAK,MAAM,CAAC,IAAI,SAAS,SAAS;EAChC,MAAM,OAAO,OAAO,IAAI,GAAG,IAAI;AAC/B,SAAO,IAAI,IAAI,OAAO,KAAK,IAAI,MAAM;;AASzC,QAJe,IAAI,IACjB,CAAC,GAAG,OAAO,SAAS,CAAC,CAAC,MAAM,GAAG,MAAM,EAAE,KAAK,EAAE,GAAG,CAClD;;;;;;;;AAWH,SAAgB,oBACd,eACA,gBACA,eAAuB,IACvB,gBAAwB,IACH;CAErB,MAAM,6BAAa,IAAI,KAAqB;AAC5C,KAAI,eAAe,SAAS,GAAG;EAC7B,MAAM,SAAS,eAAe,KAAI,MAAK,EAAE,MAAM;EAC/C,MAAM,MAAM,KAAK,IAAI,GAAG,OAAO;EAE/B,MAAM,QADM,KAAK,IAAI,GAAG,OAAO,GACX,OAAO;AAC3B,OAAK,MAAM,KAAK,eACd,YAAW,IAAI,EAAE,KAAK,EAAE,QAAQ,OAAO,MAAM;;CAIjD,MAAM,4BAAY,IAAI,KAAqB;AAC3C,MAAK,MAAM,KAAK,cAAe,WAAU,IAAI,EAAE,IAAI,EAAE,MAAM;CAG3D,MAAM,SAAS,IAAI,IAAI,CAAC,GAAG,UAAU,MAAM,EAAE,GAAG,WAAW,MAAM,CAAC,CAAC;CACnE,MAAM,wBAAQ,IAAI,KAAqB;AAEvC,MAAK,MAAM,MAAM,QAAQ;EACvB,MAAM,KAAK,UAAU,IAAI,GAAG,IAAI;EAChC,MAAM,KAAK,WAAW,IAAI,GAAG,IAAI;AACjC,QAAM,IAAI,IAAI,eAAe,KAAK,gBAAgB,GAAG;;AAGvD,QAAO,IAAI,IAAI,CAAC,GAAG,MAAM,SAAS,CAAC,CAAC,MAAM,GAAG,MAAM,EAAE,KAAK,EAAE,GAAG,CAAC;;;;;;;;;;;AClClE,SAAgB,mBAAmB,OAAyB;CAE1D,MAAM,SAAS,MAAM,MAAM,QAAQ,CAAC,QAAO,MAAK,EAAE,MAAM,CAAC,SAAS,EAAE;AACpE,KAAI,OAAO,SAAS,EAClB,QAAO,OAAO,MAAM,GAAG,EAAE,CAAC,KAAI,MAAK,EAAE,MAAM,CAAC;CAK9C,MAAM,WAAW,MAAM,MAAM,aAAa,CAAC,QAAO,MAAK,EAAE,MAAM,CAAC,SAAS,EAAE;AAC3E,KAAI,SAAS,SAAS,EACpB,QAAO,SAAS,MAAM,GAAG,EAAE,CAAC,KAAI,MAAK,EAAE,MAAM,CAAC;AAGhD,QAAO,CAAC,MAAM;;AAGhB,IAAa,oBAAb,MAA+B;CAC7B,AAAQ;CACR,AAAQ;CACR,AAAQ;CACR,AAAQ;CACR,AAAQ;CACR,AAAQ;CACR,AAAQ;CACR,AAAQ;CACR,AAAQ;CACR,AAAQ;CAIR,YACE,OACA,UACA,oBACA;AACA,OAAK,QAAQ;AACb,OAAK,WAAW;AAEhB,MAAI,OAAO,uBAAuB,UAAU;AAC1C,QAAK,eAAe;AACpB,QAAK,kBAAkB;AACvB,QAAK,cAAc;AACnB,QAAK,kBAAkB;AACvB,QAAK,uBAAuB;AAC5B,QAAK,sBAAsB;AAC3B,QAAK,iBAAiB;SACjB;GACL,MAAM,OAAO,sBAAsB,EAAE;AACrC,QAAK,eAAe,KAAK,gBAAgB;AACzC,QAAK,kBAAkB,KAAK,mBAAmB;AAC/C,QAAK,cAAc,KAAK,eAAe;AACvC,QAAK,kBAAkB,KAAK,mBAAmB;AAC/C,QAAK,gBAAgB,KAAK;AAC1B,QAAK,uBAAuB,KAAK,wBAAwB;AACzD,QAAK,sBAAsB,KAAK,uBAAuB;AACvD,QAAK,iBAAiB,KAAK,kBAAkB;;;CAIjD,MAAM,SAAS,OAAe,SAAqD;EACjF,MAAM,aAAa,mBAAmB,MAAM;AAG5C,MAAI,WAAW,UAAU,EACvB,QAAO,KAAK,eAAe,OAAO,QAAQ;EAI5C,MAAM,aAAa,MAAM,QAAQ,IAC/B,WAAW,KAAI,OAAM,KAAK,eAAe,IAAI,QAAQ,CAAC,CACvD;EAGD,MAAM,uBAAO,IAAI,KAAa;EAC9B,MAAM,aAA+E,EAAE;AAEvF,OAAK,MAAM,MAAM,WACf,KAAI,GAAG,cAAc,GAAG,aAAa,GAAG,aAAa;GACnD,MAAM,QAAQ,GAAG,QAAQ,IAAI,UAAU;AACvC,OAAI,SAAS,CAAC,KAAK,IAAI,MAAM,EAAE;AAC7B,SAAK,IAAI,MAAM;AACf,eAAW,KAAK;KACd,aAAa,GAAG;KAChB,WAAW,GAAG;KACd,OAAO,GAAG,QAAQ,IAAI,SAAS;KAChC,CAAC;;;AAMR,MAAI,WAAW,UAAU,GAAG;GAC1B,MAAM,aAAa,WAAW,SAAQ,OAAM,GAAG,QAAQ;GAEvD,MAAM,6BAAa,IAAI,KAAa;GACpC,MAAM,iBAAiB,WAAW,QAAO,MAAK;AAC5C,QAAI,WAAW,IAAI,EAAE,MAAM,GAAG,CAAE,QAAO;AACvC,eAAW,IAAI,EAAE,MAAM,GAAG;AAC1B,WAAO;KACP;AAEF,UAAO;IACL,SAAS;IACT,SAAS,KAAK,cAAc,eAAe;IAC3C,YAAY;IACZ;IAEA,WAAW,WAAW,GAAG;IACzB,aAAa,WAAW,GAAG;IAC5B;;AAIH,SAAO,KAAK,eAAe,OAAO,QAAQ;;CAG5C,MAAc,eAAe,OAAe,SAAqD;EAE/F,MAAM,aAAa,eAAe,MAAM;EACxC,MAAM,YAAY,MAAM,KAAK,SAAS,MAAM,WAAW;EAGvD,MAAM,aAAa,MAAM,KAAK,MAAM,kBAAkB,WAAW;GAC/D,GAAG;GACH,aAAa,CAAC,MAAM;GACpB,OAAO;GACR,CAAC;AAEF,MAAI,WAAW,SAAS,GAAG;GAEzB,IAAI,MAAM,WAAW;AACrB,OAAI,WAAW,SAAS,GAEtB;QADkB,IAAI,QAAQ,WAAW,GAAG,SAC3B,QAAS,WAAW,GAAG,SAAS,aAAa,MAAM,IAAI,SAAS,aAAa,GAC5F,OAAM,WAAW;;GAIrB,MAAM,YAAY,IAAI,MAAM,UAAU,UAAU,IAAI,SAAS,UAAU;GACvE,MAAM,cAAc,IAAI,MAAM,UAAU,YAAY,IAAI,SAAS,UAAU;AAI3E,OAAI,IAAI,SAAS,KAAK,aACpB,QAAO;IACL,SAAS,CAAC,IAAI;IACd,SAAS,KAAK,cAAc,CAAC,IAAI,CAAC;IAClC,YAAY;IACZ;IACA;IACD;AAIH,OAAI,IAAI,SAAS,KAAK,iBAEpB;SADY,WAAW,SAAS,IAAI,IAAI,QAAQ,WAAW,GAAG,QAAQ,KAC5D,KAAK,YACb,QAAO;KACL,SAAS,CAAC,IAAI;KACd,SAAS,KAAK,cAAc,CAAC,IAAI,CAAC;KAClC,YAAY;KACZ;KACA;KACD;;;EAMP,MAAM,UAAU,MAAM,KAAK,aAAa,YAAY,WAAW,QAAQ;EAKvE,MAAM,WAAW,QAAQ,SAAS,IAAI,QAAQ,GAAG,QAAQ;AACzD,MACE,KAAK,iBACL,YAAY,KAAK,uBACjB,WAAW,KAAK,qBAEhB,KAAI;GACF,MAAM,gBAAgB,MAAM,KAAK,cAAc,QAAQ,WAAW;GAClE,MAAM,qBAAqB,MAAM,KAAK,SAAS,MAAM,cAAc,UAAU;GAG7E,MAAM,sBAAsB,MAAM,KAAK,MAAM,kBAAkB,oBAAoB;IACjF,GAAG;IACH,aAAa,CAAC,MAAM;IACpB,OAAO;IACR,CAAC;AAEF,OAAI,oBAAoB,SAAS,KAAK,oBAAoB,GAAG,SAAS,KAAK,iBAAiB;IAC1F,MAAM,MAAM,oBAAoB;IAChC,MAAM,MAAM,oBAAoB,SAAS,IAAI,IAAI,QAAQ,oBAAoB,GAAG,QAAQ;AACxF,QAAI,IAAI,SAAS,KAAK,gBAAgB,MAAM,KAAK,aAAa;KAC5D,MAAM,YAAY,IAAI,MAAM,UAAU,UAAU,IAAI,SAAS,UAAU;KACvE,MAAM,cAAc,IAAI,MAAM,UAAU,YAAY,IAAI,SAAS,UAAU;AAC3E,YAAO;MACL,SAAS,CAAC,IAAI;MACd,SAAS,KAAK,cAAc,CAAC,IAAI,CAAC;MAClC,YAAY;MACZ,WAAW,cAAc;MACzB;MACA;MACD;;;GAKL,MAAM,mBAAmB,MAAM,KAAK,aAAa,cAAc,WAAW,oBAAoB,QAAQ;AACtG,OAAI,iBAAiB,SAAS,KAAK,iBAAiB,GAAG,QAAQ,SAC7D,QAAO;IACL,SAAS;IACT,SAAS,KAAK,cAAc,iBAAiB;IAC7C,YAAY;IACZ,WAAW,cAAc;IAC1B;UAEG;AAKV,SAAO;GACL;GACA,SAAS,KAAK,cAAc,QAAQ;GACpC,YAAY;GACb;;CAGH,MAAc,aACZ,OACA,WACA,SAC2B;EAC3B,MAAM,QAAQ,SAAS,SAAS;AAGhC,MAAI,CAAC,KAAK,mBAAmB,CAAC,KAAK,MAAM,iBAAiB;GACxD,MAAM,aAAa,MAAM,KAAK,MAAM,kBAAkB,WAAW;IAC/D,GAAG;IACH;IACD,CAAC;AACF,UAAO,KAAK,YAAY,WAAW,MAAM,GAAG,MAAM,CAAC;;EAIrD,MAAM,aAAa;GAAE,GAAG;GAAS,OAAO,QAAQ;GAAG;EACnD,MAAM,CAAC,YAAY,cAAc,MAAM,QAAQ,IAAI,CACjD,KAAK,MAAM,kBAAkB,WAAW,WAAW,EACnD,KAAK,MAAM,gBAAgB,OAAO,WAAW,CAC9C,CAAC;AAGF,MAAI,WAAW,WAAW,EACxB,QAAO,KAAK,YAAY,WAAW,MAAM,GAAG,MAAM,CAAC;EAGrD,IAAI;AAEJ,MAAI,KAAK,mBAAmB,YAAY;GAItC,MAAM,QAAQ,oBAFG,WAAW,KAAI,OAAM;IAAE,IAAI,EAAE,MAAM;IAAI,OAAO,EAAE;IAAO,EAAE,EACzD,WAAW,KAAI,OAAM;IAAE,IAAI,EAAE,MAAM;IAAI,OAAO,EAAE;IAAO,EAAE,CACrB;GAErD,MAAM,4BAAY,IAAI,KAA6B;AACnD,QAAK,MAAM,KAAK,WAAY,WAAU,IAAI,EAAE,MAAM,IAAI,EAAE;AACxD,QAAK,MAAM,KAAK,WACd,KAAI,CAAC,UAAU,IAAI,EAAE,MAAM,GAAG,CAAE,WAAU,IAAI,EAAE,MAAM,IAAI,EAAE;AAG9D,kBAAe,EAAE;AACjB,QAAK,MAAM,CAAC,SAAS,eAAe,OAAO;AACzC,QAAI,aAAa,UAAU,MAAO;IAClC,MAAM,SAAS,UAAU,IAAI,QAAQ;AACrC,QAAI,OAEF,cAAa,KAAK;KAAE,GAAG;KAAQ,OAAO;KAAY,CAAC;;SAGlD;GAEL,MAAM,2BAAW,IAAI,KAAqB;AAC1C,cAAW,SAAS,GAAG,MAAM,SAAS,IAAI,EAAE,MAAM,IAAI,EAAE,CAAC;GAEzD,MAAM,2BAAW,IAAI,KAAqB;AAC1C,cAAW,SAAS,GAAG,MAAM,SAAS,IAAI,EAAE,MAAM,IAAI,EAAE,CAAC;GAEzD,MAAM,QAAQ,qBAAqB,CAAC,UAAU,SAAS,CAAC;GAExD,MAAM,4BAAY,IAAI,KAA6B;AACnD,QAAK,MAAM,KAAK,WAAY,WAAU,IAAI,EAAE,MAAM,IAAI,EAAE;AACxD,QAAK,MAAM,KAAK,WACd,KAAI,CAAC,UAAU,IAAI,EAAE,MAAM,GAAG,CAAE,WAAU,IAAI,EAAE,MAAM,IAAI,EAAE;AAI9D,kBAAe,EAAE;AACjB,QAAK,MAAM,CAAC,SAAS,cAAc,OAAO;AACxC,QAAI,aAAa,UAAU,MAAO;IAClC,MAAM,SAAS,UAAU,IAAI,QAAQ;AACrC,QAAI,OACF,cAAa,KAAK,OAAO;;;AAK/B,SAAO,KAAK,YAAY,aAAa;;;;;CAMvC,AAAQ,YAAY,SAA6C;AAC/D,MAAI,QAAQ,WAAW,EAAG,QAAO;EAEjC,MAAM,gBAAgB,KAAK,KAAK,GAAI,MAAU,KAAK,KAAK;EAExD,MAAM,UAAU,QAAQ,KAAI,MAAK;GAC/B,IAAI,QAAQ,EAAE;AAGd,OAAI,EAAE,SAAS,aAAa,EAAE,SAAS,YAAY,cACjD,UAAS;GAIX,MAAM,WAAY,EAAE,SAAiB,YAAY;AACjD,OAAI,aAAa,EAAG,UAAS;YACpB,aAAa,EAAG,UAAS;AAElC,UAAO;IAAE,GAAG;IAAG;IAAO;IACtB;AAGF,UAAQ,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM;AACzC,SAAO;;CAGT,AAAQ,cAAc,SAAmC;AACvD,MAAI,QAAQ,WAAW,EAAG,QAAO;AAOjC,SAAO,gCALU,QAAQ,KAAK,GAAG,MAAM;GACrC,MAAM,SAAS,EAAE,SAAS,SAAS,EAAE,SAAS,aAAa,EAAE,SAAS,YAAY;AAClF,UAAO,cAAc,IAAI,EAAE,IAAI,OAAO,WAAW,EAAE,MAAM,QAAQ,EAAE,CAAC,KAAK,EAAE,MAAM;IACjF,CAE8C,KAAK,OAAO;;;;;;AC9XhE,MAAM,gBACJ;AAEF,MAAM,iBAAiB;AAcvB,IAAa,gBAAb,MAA2B;CACzB,AAAQ;CACR,AAAQ;CACR,AAAQ;CAER,YAAY,SAA+B;AACzC,OAAK,QAAQ,QAAQ;AACrB,OAAK,wBAAQ,IAAI,KAAK;AACtB,OAAK,eAAe,QAAQ,gBAAgB;;CAG9C,MAAM,QAAQ,OAAuC;EACnD,MAAM,WAAW,MAAM,aAAa,CAAC,MAAM;EAG3C,MAAM,SAAS,KAAK,MAAM,IAAI,SAAS;AACvC,MAAI,OACF,QAAO;GAAE,UAAU;GAAO,WAAW;GAAQ,QAAQ;GAAM;EAG7D,MAAM,EAAE,MAAM,UAAU,MAAM,aAAa;GACzC,OAAO,KAAK;GACZ,QAAQ;GACR,QAAQ;GACT,CAAC;EAEF,MAAM,YAAY,KAAK,MAAM;AAG7B,MAAI,KAAK,MAAM,QAAQ,KAAK,cAAc;GACxC,MAAM,SAAS,KAAK,MAAM,MAAM,CAAC,MAAM,CAAC;AACxC,QAAK,MAAM,OAAO,OAAO;;AAE3B,OAAK,MAAM,IAAI,UAAU,UAAU;AAEnC,SAAO;GACL,UAAU;GACV;GACA,QAAQ;GACR,YAAY,QACR;IAAE,QAAQ,MAAM;IAAc,YAAY,MAAM;IAAkB,GAClE;GACL;;CAGH,IAAI,YAAoB;AACtB,SAAO,KAAK,MAAM;;CAGpB,aAAmB;AACjB,OAAK,MAAM,OAAO;;;;;;ACxDtB,IAAI,kBAAkC;AACtC,IAAI,0BAA0B;AAC9B,MAAM,kBAAkB,MAAS;;AAGjC,SAAgB,2BAAiC;AAC/C,mBAAkB;AAClB,2BAA0B;;;;;AAM5B,eAAsB,UAAU,KAA8B;CAC5D,MAAM,WAAW,MAAM,MAAM,KAAK,EAChC,SAAS,EAAE,cAAc,iBAAiB,EAC3C,CAAC;AACF,KAAI,CAAC,SAAS,GAAI,OAAM,IAAI,MAAM,mBAAmB,IAAI,IAAI,SAAS,SAAS;AAC/E,QAAO,SAAS,MAAM;;;;;AAMxB,eAAsB,aAAa,KAAa,SAA4D;AAC1G,KAAI,SAAS,YACX,KAAI;AACF,MAAI,MAAM,kBAAkB,QAAQ,YAAY,CAC9C,QAAO,MAAM,iBAAiB,KAAK,QAAQ,YAAY;SAEnD;CAMV,MAAM,EAAE,OAAO,YAAY,gBADd,MAAM,UAAU,IAAI,EACgB,IAAI;AACrD,QAAO;EAAE;EAAO;EAAS,YAAY;EAAO;;;;;AAM9C,SAAgB,gBAAgB,MAAc,KAAiD;CAC7F,MAAM,EAAE,aAAa,UAAU,KAAK;CAEpC,MAAM,UADS,IAAI,YAAY,UAAiB,EAAE,KAAK,CAAC,CACjC,OAAO;AAC9B,QAAO;EACL,OAAO,SAAS,SAAS;EACzB,SAAS,SAAS,aAAa,MAAM,IAAI;EAC1C;;;;;AAMH,SAAgB,aAAa,MAAc,SAA2B;CACpE,MAAM,EAAE,aAAa,UAAU,KAAK;CACpC,MAAM,QAAkB,EAAE;CAC1B,MAAM,OAAO,IAAI,IAAI,QAAQ;AAE7B,MAAK,MAAM,KAAK,SAAS,iBAAiB,UAAU,CAClD,KAAI;EACF,MAAM,OAAQ,EAAU,aAAa,OAAO;AAC5C,MAAI,CAAC,KAAM;EACX,MAAM,WAAW,IAAI,IAAI,MAAM,QAAQ;AACvC,MAAI,SAAS,aAAa,KAAK,YAAY,SAAS,SAAS,WAAW,OAAO,CAC7E,OAAM,KAAK,SAAS,KAAK,MAAM,IAAI,CAAC,GAAG;SAEnC;AAKV,QAAO,CAAC,GAAG,IAAI,IAAI,MAAM,CAAC;;AAG5B,eAAe,kBAAkB,SAAmC;AAClE,KAAI,oBAAoB,QAAQ,KAAK,KAAK,GAAG,0BAA0B,gBACrE,QAAO;AAET,KAAI;AAEF,qBADY,MAAM,MAAM,GAAG,QAAQ,UAAU,EAAE,QAAQ,YAAY,QAAQ,IAAK,EAAE,CAAC,EAC7D;SAChB;AACN,oBAAkB;;AAEpB,2BAA0B,KAAK,KAAK;AACpC,QAAO;;AAGT,eAAe,iBAAiB,KAAa,SAA8C;CACzF,MAAM,MAAM,MAAM,MAAM,GAAG,QAAQ,SAAS;EAC1C,QAAQ;EACR,SAAS,EAAE,gBAAgB,oBAAoB;EAC/C,MAAM,KAAK,UAAU;GACnB,MAAM,CAAC,IAAI;GACX,gBAAgB;IAAE,MAAM;IAAiB,QAAQ,EAAE,UAAU,MAAM;IAAE;GACrE,gBAAgB;IACd,MAAM;IACN,QAAQ;KACN,YAAY;KACZ,oBAAoB;MAClB,MAAM;MACN,QAAQ,EACN,gBAAgB;OAAE,MAAM;OAAwB,QAAQ,EAAE,WAAW,KAAM;OAAE,EAC9E;MACF;KACF;IACF;GACF,CAAC;EACF,QAAQ,YAAY,QAAQ,IAAM;EACnC,CAAC;AAEF,KAAI,CAAC,IAAI,GAAI,OAAM,IAAI,MAAM,mBAAmB,IAAI,SAAS;CAE7D,MAAM,UADO,MAAM,IAAI,MAAM,EACT,UAAU;AAC9B,KAAI,CAAC,OAAQ,OAAM,IAAI,MAAM,+BAA+B;CAE5D,MAAM,WAAW,OAAO,UAAU,gBAAgB,OAAO,UAAU,gBAAgB;AACnF,KAAI,CAAC,SAAU,OAAM,IAAI,MAAM,mCAAmC;AAMlE,QAAO;EAAE,OAHU,SAAS,MAAM,cAAc,GACrB,MAAM;EAEjB,SAAS;EAAU,YAAY;EAAM;;;;;AC9HvD,IAAa,cAAb,MAAyB;CACvB,AAAQ;CACR,AAAQ;CAER,YAAY,UAA6B,SAA8B;AACrE,OAAK,WAAW;AAChB,OAAK,eAAe,EAAE,aAAa,SAAS,aAAa;;CAG3D,MAAM,UAAU,KAAa,SAA2E;EACtG,MAAM,EAAE,OAAO,SAAS,eAAe,MAAM,aAAa,KAAK,KAAK,aAAa;AAEjF,SAAO,KAAK,SAAS,OAAO;GAC1B,YAAY;GACZ,WAAW;GACX;GACA;GACA;GACA,UAAU,SAAS;GACnB,WAAW,SAAS;GACrB,CAAC;;CAGJ,MAAM,cAAc,YAAoB,SAA+C;EACrF,MAAM,WAAW,SAAS,YAAY;EACtC,MAAM,MAAM,MAAM,UAAU,WAAW;EACvC,MAAM,OAAO,KAAK,iBAAiB,IAAI,CAAC,MAAM,GAAG,SAAS;EAE1D,MAAM,OAAqB,EAAE;AAC7B,OAAK,MAAM,OAAO,KAChB,KAAI;GACF,MAAM,MAAM,MAAM,KAAK,UAAU,IAAI;AACrC,QAAK,KAAK,IAAI;UACR;AAIV,SAAO;;CAGT,MAAM,MAAM,UAAkB,SAA+C;EAC3E,MAAM,WAAW,SAAS,YAAY;EACtC,MAAM,WAAW,SAAS,YAAY;EACtC,MAAM,0BAAU,IAAI,KAAa;EACjC,MAAM,OAAqB,EAAE;EAE7B,MAAM,QAA0C,CAAC;GAAE,KAAK;GAAU,OAAO;GAAG,CAAC;AAE7E,SAAO,MAAM,SAAS,KAAK,KAAK,SAAS,UAAU;GACjD,MAAM,OAAO,MAAM,OAAO;AAC1B,OAAI,QAAQ,IAAI,KAAK,IAAI,IAAI,KAAK,QAAQ,SAAU;AACpD,WAAQ,IAAI,KAAK,IAAI;AAErB,OAAI;IACF,MAAM,OAAO,MAAM,UAAU,KAAK,IAAI;IACtC,MAAM,EAAE,OAAO,SAAS,eAAe,MAAM,aAAa,KAAK,KAAK,KAAK,aAAa;IAEtF,MAAM,MAAM,MAAM,KAAK,SAAS,OAAO;KACrC,YAAY;KACZ,WAAW,KAAK;KAChB;KACA;KACA;KACD,CAAC;AACF,SAAK,KAAK,IAAI;AAEd,QAAI,KAAK,QAAQ,UAAU;KACzB,MAAM,QAAQ,aAAa,MAAM,KAAK,IAAI;AAC1C,UAAK,MAAM,QAAQ,MACjB,KAAI,CAAC,QAAQ,IAAI,KAAK,CACpB,OAAM,KAAK;MAAE,KAAK;MAAM,OAAO,KAAK,QAAQ;MAAG,CAAC;;WAIhD;;AAKV,SAAO;;CAGT,AAAQ,iBAAiB,KAAuB;EAC9C,MAAM,OAAiB,EAAE;EACzB,MAAM,WAAW;EACjB,IAAI;AACJ,UAAQ,QAAQ,SAAS,KAAK,IAAI,MAAM,KACtC,MAAK,KAAK,MAAM,GAAG;AAErB,SAAO,KAAK,QAAO,MAAK,CAAC,EAAE,SAAS,OAAO,CAAC;;;;;;AClGhD,IAAa,eAAb,MAA0B;CACxB,AAAQ;CAER,YAAY,UAA6B;AACvC,OAAK,WAAW;;CAGlB,MAAM,WAAW,UAAkB,OAAgB,SAAsD;EACvG,MAAM,MAAM,QAAQ,SAAS,CAAC,aAAa;EAC3C,MAAM,UAAU,MAAM,KAAK,eAAe,UAAU,IAAI;EACxD,MAAM,WAAW,SAAS,MAAM,IAAI,CAAC,KAAK,IAAI;AAE9C,SAAO,KAAK,SAAS,OAAO;GAC1B,YAAY;GACZ;GACA,OAAO,SAAS;GAChB;GACA,UAAU,SAAS;GACpB,CAAC;;CAGJ,MAAc,eAAe,UAAkB,KAA8B;AAC3E,UAAQ,KAAR;GACE,KAAK,OACH,QAAO,KAAK,WAAW,SAAS;GAClC,KAAK,QACH,QAAO,KAAK,YAAY,SAAS;GACnC,KAAK;GACL,KAAK,OACH,QAAO,KAAK,YAAY,SAAS;GACnC,KAAK;GACL,KAAK;GACL,KAAK,MACH,QAAO,SAAS,UAAU,QAAQ;GACpC,KAAK;GACL,KAAK,OACH,QAAO,KAAK,YAAY,SAAS;GACnC,QACE,QAAO,SAAS,UAAU,QAAQ;;;CAIxC,MAAc,WAAW,UAAmC;EAC1D,MAAM,EAAE,kBAAkB,gBAAgB,MAAM,OAAO;EACvD,MAAM,SAAS,MAAM,SAAS,SAAS;EAEvC,MAAM,EAAE,SAAS,MAAM,YADX,MAAM,iBAAiB,IAAI,WAAW,OAAO,CAAC,EAClB,EAAE,YAAY,MAAM,CAAC;AAC7D,SAAO;;CAGT,MAAc,YAAY,UAAmC;EAC3D,MAAM,UAAU,MAAM,OAAO;EAC7B,MAAM,SAAS,MAAM,SAAS,SAAS;AAEvC,UADe,MAAM,QAAQ,eAAe,EAAE,QAAQ,CAAC,EACzC;;CAGhB,MAAc,YAAY,UAAmC;EAC3D,MAAM,OAAO,MAAM,OAAO;EAC1B,MAAM,SAAS,MAAM,SAAS,SAAS;EACvC,MAAM,WAAW,KAAK,KAAK,QAAQ,EAAE,MAAM,UAAU,CAAC;EACtD,MAAM,QAAkB,EAAE;AAC1B,OAAK,MAAM,aAAa,SAAS,YAAY;GAC3C,MAAM,QAAQ,SAAS,OAAO;GAC9B,MAAM,MAAM,KAAK,MAAM,aAAa,MAAM;AAC1C,SAAM,KAAK,MAAM,UAAU,IAAI,MAAM;;AAEvC,SAAO,MAAM,KAAK,OAAO;;CAG3B,MAAc,YAAY,UAAmC;EAC3D,MAAM,EAAE,cAAc,MAAM,OAAO;EACnC,MAAM,EAAE,gBAAgB,MAAM,OAAO;EACrC,MAAM,OAAO,MAAM,SAAS,UAAU,QAAQ;EAC9C,MAAM,EAAE,aAAa,UAAU,KAAK;AAGpC,SAFe,IAAI,YAAY,SAAgB,CACxB,OAAO,EACd,aAAa,MAAM,IAAI;;;;;;ACjE3C,IAAa,cAAb,MAAyB;CACvB,AAAQ;CACR,AAAQ;CAER,YAAY,UAA6B,SAA8B;AACrE,OAAK,WAAW;AAChB,OAAK,eAAe,EAAE,aAAa,SAAS,aAAa;;CAG3D,MAAM,UAAU,UAAkB,UAA4B,EAAE,EAAyB;EACvF,MAAM,EACJ,WAAW,GACX,WAAW,IACX,aAAa,MACb,UAAU,KACV,eACE;EAEJ,MAAM,OAAqB,EAAE;EAC7B,MAAM,0BAAU,IAAI,KAAa;AAEjC,MAAI,YAAY;GACd,MAAM,cAAc,MAAM,KAAK,gBAAgB,SAAS;AACxD,OAAI,YAAY,SAAS,GAAG;IAC1B,MAAM,eAAe,YAAY,MAAM,GAAG,SAAS;AACnD,SAAK,MAAM,OAAO,cAAc;AAC9B,SAAI,QAAQ,IAAI,IAAI,CAAE;AACtB,aAAQ,IAAI,IAAI;AAChB,SAAI;AACF,mBAAa,KAAK,SAAS,GAAG,aAAa,QAAQ,IAAI;MACvD,MAAM,MAAM,MAAM,KAAK,WAAW,IAAI;AACtC,WAAK,KAAK,IAAI;AACd,UAAI,UAAU,EAAG,OAAM,KAAK,MAAM,QAAQ;aACpC;AAGR,SAAI,KAAK,UAAU,SAAU;;AAE/B,WAAO;;;EAIX,MAAM,QAA0C,CAAC;GAAE,KAAK;GAAU,OAAO;GAAG,CAAC;AAE7E,SAAO,MAAM,SAAS,KAAK,KAAK,SAAS,UAAU;GACjD,MAAM,OAAO,MAAM,OAAO;AAC1B,OAAI,QAAQ,IAAI,KAAK,IAAI,IAAI,KAAK,QAAQ,SAAU;AACpD,WAAQ,IAAI,KAAK,IAAI;AAErB,OAAI;AACF,iBAAa,KAAK,SAAS,GAAG,KAAK,SAAS,MAAM,SAAS,GAAG,KAAK,IAAI;IACvE,MAAM,OAAO,MAAM,UAAU,KAAK,IAAI;IACtC,MAAM,EAAE,OAAO,SAAS,eAAe,MAAM,aAAa,KAAK,KAAK,KAAK,aAAa;IAEtF,MAAM,MAAM,MAAM,KAAK,SAAS,OAAO;KACrC,YAAY;KACZ,WAAW,KAAK;KAChB;KACA;KACA;KACD,CAAC;AACF,SAAK,KAAK,IAAI;AAEd,QAAI,UAAU,EAAG,OAAM,KAAK,MAAM,QAAQ;AAE1C,QAAI,KAAK,QAAQ,UACf;UAAK,MAAM,QAAQ,aAAa,MAAM,KAAK,IAAI,CAC7C,KAAI,CAAC,QAAQ,IAAI,KAAK,CACpB,OAAM,KAAK;MAAE,KAAK;MAAM,OAAO,KAAK,QAAQ;MAAG,CAAC;;WAIhD;;AAKV,SAAO;;CAGT,MAAc,gBAAgB,SAAoC;AAChE,MAAI;GACF,MAAM,MAAM,IAAI,IAAI,QAAQ;GAE5B,MAAM,MAAM,MAAM,UADC,GAAG,IAAI,SAAS,IAAI,IAAI,SAAS,cACb;AACvC,UAAO,MAAM,KAAK,iBAAiB,IAAI;UACjC;AACN,UAAO,EAAE;;;CAIb,MAAc,iBAAiB,KAAgC;EAC7D,MAAM,OAAiB,EAAE;EACzB,MAAM,WAAW;EACjB,IAAI;AACJ,UAAQ,QAAQ,SAAS,KAAK,IAAI,MAAM,KACtC,MAAK,KAAK,MAAM,GAAG;AAGrB,MAAI,IAAI,SAAS,gBAAgB,IAAI,KAAK,OAAM,MAAK,EAAE,SAAS,OAAO,CAAC,EAAE;GACxE,MAAM,WAAqB,EAAE;AAC7B,QAAK,MAAM,mBAAmB,KAC5B,KAAI;IACF,MAAM,WAAW,MAAM,UAAU,gBAAgB;IACjD,MAAM,YAAsB,EAAE;IAC9B,MAAM,aAAa;IACnB,IAAI;AACJ,YAAQ,aAAa,WAAW,KAAK,SAAS,MAAM,KAClD,WAAU,KAAK,WAAW,GAAG;AAE/B,aAAS,KAAK,GAAG,UAAU,QAAO,MAAK,CAAC,EAAE,SAAS,OAAO,CAAC,CAAC;WACtD;AAIV,UAAO;;AAGT,SAAO,KAAK,QAAO,MAAK,CAAC,EAAE,SAAS,OAAO,CAAC;;CAG9C,MAAc,WAAW,KAAkC;EACzD,MAAM,EAAE,OAAO,SAAS,eAAe,MAAM,aAAa,KAAK,KAAK,aAAa;AACjF,SAAO,KAAK,SAAS,OAAO;GAC1B,YAAY;GACZ,WAAW;GACX;GACA;GACA;GACD,CAAC;;CAGJ,AAAQ,MAAM,IAA2B;AACvC,SAAO,IAAI,SAAS,YAAY,WAAW,SAAS,GAAG,CAAC;;;;;;ACzI5D,IAAa,cAAb,MAAyB;CACvB,AAAQ;CACR,AAAQ;CAER,YACE,UACA,YACA;AACA,OAAK,WAAW;AAChB,OAAK,aAAa;;CAGpB,MAAM,sBACJ,eACA,SACuB;EACvB,MAAM,SAAS,SAAS,mBAAmB;EAC3C,MAAM,WAAW,SAAS,YAAY;AAEtC,MAAI,CAAC,KAAK,WACR,OAAM,IAAI,MAAM,mDAAmD;EAGrE,MAAM,WAAsB,EAAE;AAC9B,OAAK,MAAM,SAAS,eAAe;GACjC,MAAM,QAAQ,MAAM,KAAK,WAAW,MAAM;AAC1C,YAAS,KAAK,GAAG,MAAM;AACvB,OAAI,SAAS,UAAU,SAAU;;EAInC,MAAM,WAAW,SACd,QAAQ,MAAM,EAAE,OAAO,UAAU,UAAU,EAAE,SAAS,MAAM,CAAC,SAAS,EAAE,CACxE,MAAM,GAAG,SAAS;EAErB,MAAM,OAAqB,EAAE;AAC7B,OAAK,MAAM,QAAQ,UAAU;GAC3B,MAAM,MAAM,MAAM,KAAK,SAAS,UAAU,KAAK,UAAU,KAAK,QAAQ,EACpE,QAAQ,aACT,CAAC;AACF,QAAK,KAAK,IAAI;;AAGhB,SAAO;;CAGT,MAAM,cAAc,OAAkB,SAAqD;EACzF,MAAM,SAAS,SAAS,mBAAmB;EAC3C,MAAM,WAAW,MAAM,QACpB,MAAM,EAAE,OAAO,UAAU,UAAU,EAAE,SAAS,MAAM,CAAC,SAAS,EAChE;EAED,MAAM,OAAqB,EAAE;AAC7B,OAAK,MAAM,QAAQ,UAAU;GAC3B,MAAM,MAAM,MAAM,KAAK,SAAS,UAAU,KAAK,UAAU,KAAK,QAAQ,EACpE,QAAQ,aACT,CAAC;AACF,QAAK,KAAK,IAAI;;AAEhB,SAAO"}
|
package/package.json
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@operor/knowledge",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Knowledge base with vector search, retrieval pipeline, and document ingestors for Agent OS",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "./dist/index.js",
|
|
7
|
+
"types": "./dist/index.d.ts",
|
|
8
|
+
"exports": {
|
|
9
|
+
".": {
|
|
10
|
+
"types": "./dist/index.d.ts",
|
|
11
|
+
"import": "./dist/index.js"
|
|
12
|
+
}
|
|
13
|
+
},
|
|
14
|
+
"dependencies": {
|
|
15
|
+
"@ai-sdk/cohere": "^3.0.0",
|
|
16
|
+
"@ai-sdk/google": "^3.0.0",
|
|
17
|
+
"@ai-sdk/mistral": "^3.0.0",
|
|
18
|
+
"@ai-sdk/openai": "^3.0.0",
|
|
19
|
+
"@langchain/textsplitters": "^1.0.1",
|
|
20
|
+
"@mozilla/readability": "^0.6.0",
|
|
21
|
+
"ai": "^6.0.0",
|
|
22
|
+
"better-sqlite3": "^12.0.0",
|
|
23
|
+
"linkedom": "^0.18.12",
|
|
24
|
+
"mammoth": "^1.11.0",
|
|
25
|
+
"sqlite-vec": "^0.1.7-alpha.2",
|
|
26
|
+
"unpdf": "^1.4.0",
|
|
27
|
+
"xlsx": "^0.18.5",
|
|
28
|
+
"@operor/core": "0.1.0"
|
|
29
|
+
},
|
|
30
|
+
"devDependencies": {
|
|
31
|
+
"@types/better-sqlite3": "^7.6.13",
|
|
32
|
+
"@types/node": "^22.0.0",
|
|
33
|
+
"tsdown": "^0.20.3",
|
|
34
|
+
"typescript": "^5.7.0",
|
|
35
|
+
"vitest": "^4.0.0"
|
|
36
|
+
},
|
|
37
|
+
"scripts": {
|
|
38
|
+
"build": "tsdown",
|
|
39
|
+
"test": "vitest run",
|
|
40
|
+
"test:watch": "vitest"
|
|
41
|
+
}
|
|
42
|
+
}
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
import { embed, embedMany } from 'ai';
|
|
2
|
+
import { createOpenAI } from '@ai-sdk/openai';
|
|
3
|
+
import { createGoogleGenerativeAI } from '@ai-sdk/google';
|
|
4
|
+
import { mistral } from '@ai-sdk/mistral';
|
|
5
|
+
import { cohere } from '@ai-sdk/cohere';
|
|
6
|
+
|
|
7
|
+
export interface EmbeddingServiceConfig {
|
|
8
|
+
provider: 'openai' | 'google' | 'mistral' | 'cohere' | 'ollama';
|
|
9
|
+
apiKey?: string;
|
|
10
|
+
model?: string;
|
|
11
|
+
baseURL?: string;
|
|
12
|
+
dimensions?: number;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export class EmbeddingService {
|
|
16
|
+
private config: EmbeddingServiceConfig;
|
|
17
|
+
|
|
18
|
+
constructor(config: EmbeddingServiceConfig) {
|
|
19
|
+
this.config = config;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
private getModel() {
|
|
23
|
+
const { provider, apiKey, baseURL, model } = this.config;
|
|
24
|
+
|
|
25
|
+
switch (provider) {
|
|
26
|
+
case 'openai': {
|
|
27
|
+
const openai = createOpenAI({ apiKey, baseURL });
|
|
28
|
+
return openai.embedding(model || 'text-embedding-3-small', {
|
|
29
|
+
dimensions: this.config.dimensions,
|
|
30
|
+
});
|
|
31
|
+
}
|
|
32
|
+
case 'google': {
|
|
33
|
+
const google = createGoogleGenerativeAI({ apiKey, baseURL });
|
|
34
|
+
return google.textEmbeddingModel(model || 'text-embedding-004');
|
|
35
|
+
}
|
|
36
|
+
case 'mistral': {
|
|
37
|
+
return mistral.embedding(model || 'mistral-embed', { apiKey });
|
|
38
|
+
}
|
|
39
|
+
case 'cohere': {
|
|
40
|
+
return cohere.embedding(model || 'embed-english-v3.0', { apiKey });
|
|
41
|
+
}
|
|
42
|
+
case 'ollama': {
|
|
43
|
+
const ollama = createOpenAI({
|
|
44
|
+
apiKey: apiKey || 'ollama',
|
|
45
|
+
baseURL: baseURL || 'http://localhost:11434/v1',
|
|
46
|
+
});
|
|
47
|
+
return ollama.embedding(model || 'nomic-embed-text');
|
|
48
|
+
}
|
|
49
|
+
default:
|
|
50
|
+
throw new Error(`Unknown embedding provider: ${provider}`);
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
get provider(): string {
|
|
55
|
+
return this.config.provider;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
get dimensions(): number {
|
|
59
|
+
if (this.config.dimensions) return this.config.dimensions;
|
|
60
|
+
return EmbeddingService.defaultDimensions(this.config.provider, this.config.model);
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
static defaultDimensions(provider: string, model?: string): number {
|
|
64
|
+
switch (provider) {
|
|
65
|
+
case 'openai':
|
|
66
|
+
return 1536; // text-embedding-3-small
|
|
67
|
+
case 'google':
|
|
68
|
+
return 768; // text-embedding-004
|
|
69
|
+
case 'mistral':
|
|
70
|
+
return 1024; // mistral-embed
|
|
71
|
+
case 'cohere':
|
|
72
|
+
return 1024; // embed-english-v3.0
|
|
73
|
+
case 'ollama':
|
|
74
|
+
return 768; // nomic-embed-text (varies by model)
|
|
75
|
+
default:
|
|
76
|
+
return 1536;
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
async embed(text: string): Promise<number[]> {
|
|
81
|
+
const model = this.getModel();
|
|
82
|
+
const result = await embed({ model, value: text });
|
|
83
|
+
return result.embedding;
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
async embedMany(texts: string[]): Promise<number[][]> {
|
|
87
|
+
if (texts.length === 0) return [];
|
|
88
|
+
const model = this.getModel();
|
|
89
|
+
const result = await embedMany({ model, values: texts });
|
|
90
|
+
return result.embeddings;
|
|
91
|
+
}
|
|
92
|
+
}
|