@vectororm/core 0.1.0 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/index.d.mts +4 -4
- package/dist/index.d.ts +4 -4
- package/dist/index.js.map +1 -1
- package/dist/index.mjs.map +1 -1
- package/package.json +2 -3
package/dist/index.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/metadata/constants.ts","../src/metadata/builder.ts","../src/filters/translator.ts","../src/adapters/vector-db-adapter.ts","../src/query/filter-builder.ts","../src/query/rag-query-composer.ts","../src/embedders/embedder.ts","../src/llm/llm-client.ts","../src/llm/mock-llm.ts","../src/enrichment/classifiers/keyword-classifier.ts","../src/enrichment/classifiers/zero-shot-classifier.ts","../src/enrichment/classifiers/embedding-classifier.ts","../src/enrichment/classifiers/llm-classifier.ts","../src/enrichment/enrichment-pipeline.ts","../src/ingestion/chunkers/text-chunker.ts","../src/ingestion/chunkers/recursive-chunker.ts","../src/ingestion/ingestion-pipeline.ts","../src/ingestion/loaders/text-loader.ts","../src/ingestion/loaders/pdf-loader.ts","../src/ingestion/loaders/docx-loader.ts","../src/ingestion/loaders/html-loader.ts","../src/ingestion/loaders/loader-registry.ts","../src/ingestion/chunkers/fixed-chunker.ts","../src/ingestion/chunkers/sentence-chunker.ts","../src/client/rag-client.ts"],"sourcesContent":["/**\n * Metadata field prefixes for the three axes of Glyph's schema.\n *\n * These prefixes separate framework fields from user-defined metadata:\n * - __v_: Vertical axis (document identity)\n * - __h_: Horizontal axis (content/theme identity)\n * - __s_: Structural axis (position/hierarchy)\n */\nexport const METADATA_PREFIXES = {\n VERTICAL: '__v_',\n HORIZONTAL: '__h_',\n STRUCTURAL: '__s_',\n} as const;\n\n/**\n * Vertical axis fields - identify WHICH document a chunk belongs to.\n */\nexport const VerticalFields = {\n /** Unique document identifier */\n DOC_ID: '__v_doc_id',\n\n /** Original source path/URL */\n SOURCE: '__v_source',\n\n /** Logical partition key (for filtering by document subsets) */\n PARTITION: '__v_partition',\n\n /** Document type classification */\n DOC_TYPE: '__v_doc_type',\n\n /** Arbitrary vertical tags */\n TAGS: '__v_tags',\n} as const;\n\n/**\n * Horizontal axis fields - identify WHAT topic/theme a chunk covers.\n */\nexport const HorizontalFields = {\n /** Primary theme classification */\n THEME: '__h_theme',\n\n /** Multiple themes (if applicable) */\n THEMES: '__h_themes',\n\n /** Classification confidence score */\n THEME_CONFIDENCE: '__h_theme_confidence',\n\n /** Hierarchical section path (e.g., \"Chapter 3/Pricing/Rates\") */\n SECTION_PATH: '__h_section_path',\n\n /** Depth level in hierarchy (0 = root) */\n SECTION_LEVEL: '__h_section_level',\n\n /** Section header text */\n SECTION_TITLE: '__h_section_title',\n} as const;\n\n/**\n * Structural axis fields - track chunk position and relationships.\n */\nexport const StructuralFields = {\n /** Position in document (0-indexed) */\n CHUNK_INDEX: '__s_chunk_index',\n\n /** Parent chunk ID (for hierarchical chunking) */\n PARENT_ID: '__s_parent_id',\n\n /** Whether this chunk has children */\n HAS_CHILDREN: '__s_has_children',\n\n /** Total chunks in this document */\n TOTAL_CHUNKS: '__s_total_chunks',\n} as const;\n","import { METADATA_PREFIXES } from './constants';\n\n/**\n * MetadataBuilder provides a fluent API for constructing metadata objects\n * with proper V/H/S prefixes and type safety.\n *\n * Example:\n * ```typescript\n * const metadata = new MetadataBuilder()\n * .vertical({ doc_id: 'doc123', source: 'file.pdf' })\n * .horizontal({ theme: 'pricing' })\n * .structural({ chunk_index: 0, total_chunks: 10 })\n * .custom({ author: 'John Doe' })\n * .build();\n * ```\n *\n * Features:\n * - Fluent chaining API\n * - Automatic prefix application\n * - Skips undefined values\n * - Returns immutable copy on build()\n */\nexport class MetadataBuilder {\n private metadata: Record<string, any> = {};\n\n /**\n * Add vertical axis metadata (document identity).\n * Automatically prefixes fields with '__v_'.\n *\n * @param fields - Vertical metadata fields (doc_id, source, partition, etc.)\n * @returns This builder for chaining\n */\n vertical(fields: Record<string, any>): this {\n for (const [key, value] of Object.entries(fields)) {\n if (value !== undefined) {\n this.metadata[`${METADATA_PREFIXES.VERTICAL}${key}`] = value;\n }\n }\n return this;\n }\n\n /**\n * Add horizontal axis metadata (theme/section identity).\n * Automatically prefixes fields with '__h_'.\n *\n * @param fields - Horizontal metadata fields (theme, section_path, etc.)\n * @returns This builder for chaining\n */\n horizontal(fields: Record<string, any>): this {\n for (const [key, value] of Object.entries(fields)) {\n if (value !== undefined) {\n this.metadata[`${METADATA_PREFIXES.HORIZONTAL}${key}`] = value;\n }\n }\n return this;\n }\n\n /**\n * Add structural axis metadata (position/hierarchy).\n * Automatically prefixes fields with '__s_'.\n *\n * @param fields - Structural metadata fields (chunk_index, parent_id, etc.)\n * @returns This builder for chaining\n */\n structural(fields: Record<string, any>): this {\n for (const [key, value] of Object.entries(fields)) {\n if (value !== undefined) {\n this.metadata[`${METADATA_PREFIXES.STRUCTURAL}${key}`] = value;\n }\n }\n return this;\n }\n\n /**\n * Add custom user-defined metadata.\n * Fields are added as-is without any prefix.\n *\n * @param fields - Custom metadata fields\n * @returns This builder for chaining\n */\n custom(fields: Record<string, any>): this {\n for (const [key, value] of Object.entries(fields)) {\n if (value !== undefined) {\n this.metadata[key] = value;\n }\n }\n return this;\n }\n\n /**\n * Build and return the complete metadata object.\n * Returns a copy to prevent external modification.\n *\n * @returns Immutable copy of the metadata object\n */\n build(): Record<string, any> {\n return { ...this.metadata };\n }\n}\n","import type {\n UniversalFilter,\n FilterCondition,\n AndFilter,\n OrFilter,\n ShorthandFilter,\n FilterOperator\n} from './types';\n\n/**\n * Valid filter operators.\n */\nconst VALID_OPERATORS: FilterOperator[] = [\n 'eq', 'neq', 'in', 'nin', 'gt', 'gte', 'lt', 'lte', 'contains', 'exists'\n];\n\n/**\n * Translates filters between formats and validates structure.\n */\nexport class FilterTranslator {\n /**\n * Normalize any filter input to standard UniversalFilter format.\n *\n * Handles:\n * - Standard format (pass through)\n * - Shorthand format (convert to standard)\n * - Operator suffixes (field__op syntax)\n */\n static normalize(input: ShorthandFilter | UniversalFilter): UniversalFilter {\n // Already standard format\n if (this.isStandardFormat(input)) {\n return input as UniversalFilter;\n }\n\n // Convert shorthand\n return this.fromShorthand(input as ShorthandFilter);\n }\n\n /**\n * Validate filter structure and operators.\n *\n * Throws error if filter is invalid.\n */\n static validate(filter: UniversalFilter): void {\n if (this.isCompound(filter)) {\n const compound = filter as AndFilter | OrFilter;\n const conditions = 'and' in compound ? compound.and : compound.or;\n\n if (!Array.isArray(conditions) || conditions.length === 0) {\n throw new Error('Compound filter must have at least one condition');\n }\n\n conditions.forEach(c => this.validate(c));\n } else {\n const condition = filter as FilterCondition;\n\n if (!condition.field || typeof condition.field !== 'string') {\n throw new Error('Filter field must be a non-empty string');\n }\n\n if (!VALID_OPERATORS.includes(condition.op)) {\n throw new Error(`Invalid filter operator: ${condition.op}`);\n }\n\n if (condition.value === undefined) {\n throw new Error('Filter value is required');\n }\n }\n }\n\n /**\n * Check if filter is compound (AND/OR).\n */\n static isCompound(filter: UniversalFilter): boolean {\n return 'and' in filter || 'or' in filter;\n }\n\n /**\n * Check if input is already in standard format.\n */\n private static isStandardFormat(input: any): boolean {\n if (!input || typeof input !== 'object') {\n return false;\n }\n\n // Check for compound filter\n if ('and' in input || 'or' in input) {\n return true;\n }\n\n // Check for filter condition\n if ('field' in input && 'op' in input && 'value' in input) {\n return true;\n }\n\n return false;\n }\n\n /**\n * Convert shorthand format to standard.\n */\n private static fromShorthand(shorthand: ShorthandFilter): UniversalFilter {\n const entries = Object.entries(shorthand);\n\n if (entries.length === 0) {\n throw new Error('Cannot convert empty shorthand filter object');\n }\n\n const conditions: FilterCondition[] = [];\n\n for (const [key, value] of entries) {\n // Parse field__op syntax\n let field: string;\n let op: FilterOperator;\n\n if (key.includes('__') && !key.startsWith('__')) {\n // Has operator suffix\n const lastIndex = key.lastIndexOf('__');\n field = key.substring(0, lastIndex);\n const extractedOp = key.substring(lastIndex + 2);\n\n if (!VALID_OPERATORS.includes(extractedOp as FilterOperator)) {\n throw new Error(`Invalid filter operator in shorthand: ${extractedOp}`);\n }\n\n op = extractedOp as FilterOperator;\n } else {\n // Implicit eq\n field = key;\n op = 'eq';\n }\n\n conditions.push({ field, op, value });\n }\n\n // Single condition - return as-is\n if (conditions.length === 1) {\n return conditions[0];\n }\n\n // Multiple conditions - wrap in AND\n return { and: conditions };\n }\n}\n","import type { VectorRecord, SearchResult } from '../types';\nimport type { UniversalFilter } from '../filters';\nimport type { CollectionStats, MetadataUpdate, DistanceMetric } from './types';\n\n/**\n * Abstract base class for all vector database adapters.\n *\n * This is the KEY abstraction that enables database-agnostic operations.\n * Each database (Pinecone, Chroma, Qdrant, etc.) implements this interface,\n * allowing the SDK to work with any vector database.\n *\n * Design principles:\n * 1. All methods are abstract (must be implemented by subclasses)\n * 2. Capability flags have default implementations (can be overridden)\n * 3. Universal filter translation is adapter-specific\n * 4. Async iteration enables efficient enrichment pipelines\n *\n * @abstract\n */\nexport abstract class VectorDBAdapter {\n // ============================================================================\n // CONNECTION MANAGEMENT\n // ============================================================================\n\n /**\n * Connect to the vector database.\n *\n * Initialize client, authenticate, verify connection.\n */\n abstract connect(): Promise<void>;\n\n /**\n * Disconnect from the vector database.\n *\n * Clean up resources, close connections.\n */\n abstract disconnect(): Promise<void>;\n\n /**\n * Check if currently connected to the database.\n */\n abstract isConnected(): Promise<boolean>;\n\n // ============================================================================\n // COLLECTION MANAGEMENT\n // ============================================================================\n\n /**\n * Create a new vector collection.\n *\n * @param name - Collection name\n * @param dimension - Vector dimension\n * @param metric - Distance metric (default: cosine)\n */\n abstract createCollection(\n name: string,\n dimension: number,\n metric?: DistanceMetric\n ): Promise<void>;\n\n /**\n * Delete a collection and all its vectors.\n *\n * @param name - Collection name\n */\n abstract deleteCollection(name: string): Promise<void>;\n\n /**\n * Check if a collection exists.\n *\n * @param name - Collection name\n */\n abstract collectionExists(name: string): Promise<boolean>;\n\n /**\n * Get statistics about a collection.\n *\n * @param name - Collection name\n */\n abstract getCollectionStats(name: string): Promise<CollectionStats>;\n\n // ============================================================================\n // VECTOR OPERATIONS\n // ============================================================================\n\n /**\n * Upsert (insert or update) vector records.\n *\n * This is the primary method for adding vectors to the database.\n * If a record with the same ID exists, it is updated.\n *\n * @param collection - Collection name\n * @param records - Vector records to upsert\n */\n abstract upsert(collection: string, records: VectorRecord[]): Promise<void>;\n\n /**\n * Fetch vector records by ID.\n *\n * @param collection - Collection name\n * @param ids - Record IDs to fetch\n * @returns Array of matching records (may be empty)\n */\n abstract fetch(collection: string, ids: string[]): Promise<VectorRecord[]>;\n\n /**\n * Delete vector records by ID.\n *\n * @param collection - Collection name\n * @param ids - Record IDs to delete\n */\n abstract delete(collection: string, ids: string[]): Promise<void>;\n\n // ============================================================================\n // METADATA OPERATIONS (CRITICAL FOR ENRICHMENT)\n // ============================================================================\n\n /**\n * Update metadata for existing records without re-uploading vectors.\n *\n * This is CRITICAL for enrichment pipelines where we need to:\n * 1. Insert initial vectors with basic metadata\n * 2. Later enrich with vertical/horizontal metadata\n * 3. Avoid re-uploading large embedding vectors\n *\n * @param collection - Collection name\n * @param updates - Metadata updates to apply\n */\n abstract updateMetadata(\n collection: string,\n updates: MetadataUpdate[]\n ): Promise<void>;\n\n // ============================================================================\n // SEARCH OPERATIONS\n // ============================================================================\n\n /**\n * Search for similar vectors.\n *\n * @param collection - Collection name\n * @param queryVector - Query vector to search with\n * @param options - Search options\n * @returns Search results\n */\n abstract search(\n collection: string,\n queryVector: number[],\n options?: {\n topK?: number;\n filter?: UniversalFilter;\n includeMetadata?: boolean;\n includeValues?: boolean;\n }\n ): Promise<SearchResult>;\n\n // ============================================================================\n // FILTER TRANSLATION (KEY FOR DB AGNOSTICISM)\n // ============================================================================\n\n /**\n * Translate universal filter to database-specific filter format.\n *\n * This is the KEY method that enables database-agnostic filtering.\n * Each adapter translates the universal filter to its native format:\n *\n * - Pinecone: {field: {$eq: value}}\n * - Qdrant: {must: [{key: field, match: {value}}]}\n * - Chroma: {field: value}\n *\n * @param filter - Universal filter\n * @returns Database-specific filter object\n */\n abstract translateFilter(filter: UniversalFilter): any;\n\n // ============================================================================\n // ITERATION (FOR ENRICHMENT PIPELINE)\n // ============================================================================\n\n /**\n * Iterate over all vectors in a collection in batches.\n *\n * This enables efficient enrichment pipelines:\n * 1. Fetch vectors in batches\n * 2. Enrich each batch with metadata\n * 3. Update metadata back to DB\n *\n * @param collection - Collection name\n * @param options - Iteration options\n * @yields Batches of vector records\n */\n abstract iterate(\n collection: string,\n options?: {\n batchSize?: number;\n filter?: UniversalFilter;\n }\n ): AsyncIterableIterator<VectorRecord[]>;\n\n // ============================================================================\n // CAPABILITY FLAGS (WITH DEFAULT IMPLEMENTATIONS)\n // ============================================================================\n\n /**\n * Whether this adapter supports metadata updates without re-uploading vectors.\n *\n * Default: false (must re-upload entire record)\n * Override to return true if your DB supports partial updates.\n */\n supportsMetadataUpdate(): boolean {\n return false;\n }\n\n /**\n * Whether this adapter supports filtering during search.\n *\n * Default: false (no filtering support)\n * Override to return true if your DB supports metadata filtering.\n */\n supportsFiltering(): boolean {\n return false;\n }\n\n /**\n * Whether this adapter supports batch operations efficiently.\n *\n * Default: false (single operations only)\n * Override to return true if your DB supports batch upsert/delete.\n */\n supportsBatchOperations(): boolean {\n return false;\n }\n}\n","import type { UniversalFilter, AndFilter } from '../filters/types';\nimport { FilterTranslator } from '../filters/translator';\n\n/**\n * FilterBuilder - Utility for combining multiple filters with fluent API.\n *\n * Provides a convenient way to combine vertical, horizontal, and custom filters\n * into a single UniversalFilter with AND logic.\n *\n * @example\n * ```typescript\n * const filter = new FilterBuilder()\n * .withVerticalFilter({ field: 'doc_id', op: 'eq', value: 'doc123' })\n * .withHorizontalFilter({ field: 'theme', op: 'eq', value: 'legal' })\n * .build();\n * ```\n */\nexport class FilterBuilder {\n private verticalFilter?: UniversalFilter;\n private horizontalFilter?: UniversalFilter;\n private customFilter?: UniversalFilter;\n\n /**\n * Add a vertical (document-level) filter.\n *\n * @param filter - The vertical filter to add (standard or shorthand format)\n * @returns This builder for method chaining\n */\n withVerticalFilter(filter: UniversalFilter | Record<string, any>): this {\n this.verticalFilter = FilterTranslator.normalize(filter);\n return this;\n }\n\n /**\n * Add a horizontal (theme-level) filter.\n *\n * @param filter - The horizontal filter to add (standard or shorthand format)\n * @returns This builder for method chaining\n */\n withHorizontalFilter(filter: UniversalFilter | Record<string, any>): this {\n this.horizontalFilter = FilterTranslator.normalize(filter);\n return this;\n }\n\n /**\n * Add a custom user-defined filter.\n *\n * @param filter - The custom filter to add (standard or shorthand format)\n * @returns This builder for method chaining\n */\n withCustomFilter(filter: UniversalFilter | Record<string, any>): this {\n this.customFilter = FilterTranslator.normalize(filter);\n return this;\n }\n\n /**\n * Build the combined filter.\n *\n * Combination logic:\n * - If no filters: returns undefined\n * - If single filter: returns it directly\n * - If multiple filters: combines with AND logic\n *\n * @returns The combined filter, or undefined if no filters were added\n */\n build(): UniversalFilter | undefined {\n const filters: UniversalFilter[] = [];\n\n if (this.verticalFilter) {\n filters.push(this.verticalFilter);\n }\n\n if (this.horizontalFilter) {\n filters.push(this.horizontalFilter);\n }\n\n if (this.customFilter) {\n filters.push(this.customFilter);\n }\n\n // No filters\n if (filters.length === 0) {\n return undefined;\n }\n\n // Single filter - return as-is\n if (filters.length === 1) {\n return filters[0];\n }\n\n // Multiple filters - combine with AND\n return { and: filters } as AndFilter;\n }\n}\n","import type { VectorDBAdapter } from '../adapters/vector-db-adapter';\nimport type { Embedder } from '../embedders/embedder';\nimport type { RetrievalParams, RetrievalResult } from './types';\nimport type { VectorRecord } from '../types/vector-record';\nimport { FilterBuilder } from './filter-builder';\nimport { VerticalFields, HorizontalFields } from '../metadata/constants';\n\n/**\n * RAGQueryComposer - Main orchestrator for retrieval operations.\n *\n * Coordinates between embedder and vector database adapter to perform\n * semantic search with filtering. Provides specialized methods for\n * grouping results by vertical (document) or horizontal (theme) dimensions.\n *\n * @example\n * ```typescript\n * const composer = new RAGQueryComposer(adapter, embedder);\n *\n * // Basic retrieval\n * const result = await composer.retrieve({\n * query: 'pricing information',\n * collection: 'documents',\n * topK: 10\n * });\n *\n * // Retrieval with filters\n * const filtered = await composer.retrieve({\n * query: 'pricing information',\n * collection: 'documents',\n * topK: 10,\n * verticalFilters: { doc_id: 'contract-123' },\n * horizontalFilters: { theme: 'legal' }\n * });\n *\n * // Grouped by document\n * const byDocument = await composer.retrieveVertical({\n * query: 'pricing information',\n * collection: 'documents',\n * topK: 10\n * });\n * ```\n */\nexport class RAGQueryComposer {\n /**\n * Create a new RAGQueryComposer.\n *\n * @param adapter - Vector database adapter for search operations\n * @param embedder - Embedder for converting text queries to vectors\n */\n constructor(\n private readonly adapter: VectorDBAdapter,\n private readonly embedder: Embedder\n ) {}\n\n /**\n * Main retrieval method.\n *\n * Performs semantic search with optional filtering:\n * 1. Embeds query text using embedder\n * 2. Builds combined filter using FilterBuilder\n * 3. Calls adapter.search() with query vector and filter\n * 4. Returns results with filter information\n *\n * @param params - Retrieval parameters\n * @returns Retrieval result with records and filter information\n */\n async retrieve(params: RetrievalParams): Promise<RetrievalResult> {\n // 1. Embed the query text\n const queryVector = await this.embedder.embed(params.query);\n\n // 2. Build combined filter using FilterBuilder\n const filterBuilder = new FilterBuilder();\n\n if (params.verticalFilters) {\n filterBuilder.withVerticalFilter(params.verticalFilters);\n }\n\n if (params.horizontalFilters) {\n filterBuilder.withHorizontalFilter(params.horizontalFilters);\n }\n\n if (params.customFilters) {\n filterBuilder.withCustomFilter(params.customFilters);\n }\n\n const combinedFilter = filterBuilder.build();\n\n // 3. Call adapter.search() with query vector and filter\n const searchResult = await this.adapter.search(\n params.collection,\n queryVector,\n {\n topK: params.topK,\n filter: combinedFilter,\n includeMetadata: true,\n includeValues: params.includeEmbeddings\n }\n );\n\n // 4. Return RetrievalResult with records and filters applied\n return {\n records: searchResult.records,\n query: params.query,\n filtersApplied: {\n ...(params.verticalFilters && { vertical: params.verticalFilters }),\n ...(params.horizontalFilters && { horizontal: params.horizontalFilters }),\n ...(params.customFilters && { custom: params.customFilters })\n }\n };\n }\n\n /**\n * Retrieve and group results by document ID.\n *\n * Calls retrieve() and organizes results into a Map keyed by __v_doc_id.\n * Records without a doc_id are excluded.\n *\n * @param params - Retrieval parameters\n * @returns Map of document ID to array of records\n */\n async retrieveVertical(\n params: RetrievalParams\n ): Promise<Map<string, VectorRecord[]>> {\n const result = await this.retrieve(params);\n\n const grouped = new Map<string, VectorRecord[]>();\n\n for (const record of result.records) {\n const docId = record.metadata[VerticalFields.DOC_ID];\n\n if (typeof docId === 'string') {\n if (!grouped.has(docId)) {\n grouped.set(docId, []);\n }\n grouped.get(docId)!.push(record);\n }\n }\n\n return grouped;\n }\n\n /**\n * Retrieve and group results by theme.\n *\n * Calls retrieve() and organizes results into a Map keyed by __h_theme.\n * Records without a theme are excluded.\n *\n * @param params - Retrieval parameters\n * @returns Map of theme to array of records\n */\n async retrieveHorizontal(\n params: RetrievalParams\n ): Promise<Map<string, VectorRecord[]>> {\n const result = await this.retrieve(params);\n\n const grouped = new Map<string, VectorRecord[]>();\n\n for (const record of result.records) {\n const theme = record.metadata[HorizontalFields.THEME];\n\n if (typeof theme === 'string') {\n if (!grouped.has(theme)) {\n grouped.set(theme, []);\n }\n grouped.get(theme)!.push(record);\n }\n }\n\n return grouped;\n }\n}\n","/**\n * Abstract base class for text embedding models.\n *\n * This abstraction allows the VectorORM to work with any embedding provider\n * (OpenAI, Cohere, HuggingFace, etc.) by implementing a consistent interface.\n *\n * Implementations must provide:\n * - `embed()`: Convert a single text string into a vector embedding\n * - `embedBatch()`: Convert multiple texts into embeddings efficiently\n * - `dimensions`: The size of the embedding vectors produced\n * - `modelName`: Identifier for the embedding model being used\n *\n * @example\n * ```typescript\n * class OpenAIEmbedder extends Embedder {\n * get dimensions(): number { return 1536; }\n * get modelName(): string { return 'text-embedding-ada-002'; }\n *\n * async embed(text: string): Promise<number[]> {\n * // Call OpenAI API\n * }\n *\n * async embedBatch(texts: string[]): Promise<number[][]> {\n * // Batch call to OpenAI API\n * }\n * }\n * ```\n */\nexport abstract class Embedder {\n /**\n * The dimensionality of embeddings produced by this model.\n * Must be consistent across all embeddings from the same model.\n */\n abstract get dimensions(): number;\n\n /**\n * Identifier for the embedding model.\n * Used for tracking which model generated embeddings.\n */\n abstract get modelName(): string;\n\n /**\n * Embed a single text string into a vector.\n *\n * @param text - The text to embed\n * @returns A promise that resolves to a number array representing the embedding\n */\n abstract embed(text: string): Promise<number[]>;\n\n /**\n * Embed multiple texts into vectors efficiently.\n * Implementations should maintain the order of input texts in the output.\n *\n * @param texts - Array of texts to embed\n * @returns A promise that resolves to an array of embeddings, one per input text\n */\n abstract embedBatch(texts: string[]): Promise<number[][]>;\n\n /**\n * Constructor is protected to prevent direct instantiation of abstract class.\n * Subclasses can call super() in their constructors.\n */\n protected constructor() {\n if (new.target === Embedder) {\n throw new Error('Cannot instantiate abstract class Embedder directly');\n }\n }\n}\n","import type { GenerateOptions } from './types';\n\n/**\n * Abstract base class for LLM (Large Language Model) clients.\n *\n * This abstraction allows the VectorORM to work with any LLM provider\n * (OpenAI, Anthropic, Google, etc.) by implementing a consistent interface.\n *\n * Implementations must provide:\n * - `generate()`: Generate text from a prompt\n * - `generateJSON<T>()`: Generate structured JSON output\n * - `generateBatch()`: Generate multiple responses efficiently\n * - `modelName`: Identifier for the LLM model being used\n * - `provider`: Name of the LLM provider\n *\n * @example\n * ```typescript\n * class OpenAIClient extends LLMClient {\n * get modelName(): string { return 'gpt-4'; }\n * get provider(): string { return 'openai'; }\n *\n * async generate(prompt: string, options?: GenerateOptions): Promise<string> {\n * // Call OpenAI API\n * }\n *\n * async generateJSON<T>(prompt: string, options?: GenerateOptions): Promise<T> {\n * // Call OpenAI API with JSON mode\n * }\n *\n * async generateBatch(prompts: string[], options?: GenerateOptions): Promise<string[]> {\n * // Batch call to OpenAI API\n * }\n * }\n * ```\n */\nexport abstract class LLMClient {\n /**\n * Identifier for the LLM model.\n * Used for tracking which model generated responses.\n */\n abstract get modelName(): string;\n\n /**\n * Name of the LLM provider.\n * Examples: 'openai', 'anthropic', 'google', 'mock'\n */\n abstract get provider(): string;\n\n /**\n * Generate text from a prompt.\n *\n * @param prompt - The text prompt to send to the LLM\n * @param options - Optional generation parameters\n * @returns A promise that resolves to the generated text\n */\n abstract generate(prompt: string, options?: GenerateOptions): Promise<string>;\n\n /**\n * Generate structured JSON output from a prompt.\n * The LLM will be instructed to return valid JSON that matches type T.\n *\n * @param prompt - The text prompt to send to the LLM\n * @param options - Optional generation parameters\n * @returns A promise that resolves to the parsed JSON object\n */\n abstract generateJSON<T>(\n prompt: string,\n options?: GenerateOptions\n ): Promise<T>;\n\n /**\n * Generate multiple responses efficiently.\n * Implementations should maintain the order of input prompts in the output.\n *\n * @param prompts - Array of prompts to process\n * @param options - Optional generation parameters\n * @returns A promise that resolves to an array of responses, one per input prompt\n */\n abstract generateBatch(\n prompts: string[],\n options?: GenerateOptions\n ): Promise<string[]>;\n\n /**\n * Constructor is protected to prevent direct instantiation of abstract class.\n * Subclasses can call super() in their constructors.\n */\n protected constructor() {\n if (new.target === LLMClient) {\n throw new Error('Cannot instantiate abstract class LLMClient directly');\n }\n }\n}\n","import { LLMClient } from './llm-client';\nimport type { GenerateOptions } from './types';\n\n/**\n * MockLLM for testing purposes only.\n * Returns canned responses that can be set programmatically.\n *\n * @example\n * ```typescript\n * const llm = new MockLLM();\n * llm.setResponse('Hello, world!');\n * const result = await llm.generate('Say hello'); // Returns 'Hello, world!'\n * ```\n */\nexport class MockLLM extends LLMClient {\n private _response: string = '';\n\n constructor() {\n super();\n }\n\n get modelName(): string {\n return 'mock-llm-v1';\n }\n\n get provider(): string {\n return 'mock';\n }\n\n /**\n * Set the canned response that will be returned by generate methods.\n *\n * @param response - The response text to return\n */\n setResponse(response: string): void {\n this._response = response;\n }\n\n async generate(\n prompt: string,\n options?: GenerateOptions\n ): Promise<string> {\n // Ignore prompt and options, return canned response\n return this._response;\n }\n\n async generateJSON<T>(\n prompt: string,\n options?: GenerateOptions\n ): Promise<T> {\n // Parse the canned response as JSON\n try {\n return JSON.parse(this._response) as T;\n } catch (error) {\n throw new Error(\n `Failed to parse mock response as JSON: ${error instanceof Error ? error.message : 'unknown error'}`\n );\n }\n }\n\n async generateBatch(\n prompts: string[],\n options?: GenerateOptions\n ): Promise<string[]> {\n // Return the same canned response for all prompts\n return prompts.map(() => this._response);\n }\n}\n","/**\n * Fast, deterministic keyword-based theme classifier\n * Uses precompiled regex patterns with word boundaries for efficient matching\n */\n\nexport interface ThemeClassificationResult {\n theme: string;\n confidence: number;\n allScores?: Record<string, number>;\n}\n\nexport class KeywordThemeClassifier {\n private patterns: Map<string, RegExp[]>;\n private keywordCounts: Map<string, number>;\n\n /**\n * Creates a new KeywordThemeClassifier\n * @param themes - Array of theme names\n * @param keywords - Map of theme names to their keyword arrays\n * @param caseSensitive - Whether matching should be case sensitive (default: false)\n */\n constructor(\n private themes: string[],\n keywords: Record<string, string[]>,\n private caseSensitive: boolean = false\n ) {\n this.patterns = new Map();\n this.keywordCounts = new Map();\n\n // Precompile regex patterns for each theme\n for (const theme of themes) {\n const themeKeywords = keywords[theme] || [];\n this.keywordCounts.set(theme, themeKeywords.length);\n\n const patterns = themeKeywords.map((keyword) => {\n const escapedKeyword = this.escapeRegex(keyword);\n const flags = caseSensitive ? 'g' : 'gi';\n return new RegExp(`\\\\b${escapedKeyword}\\\\b`, flags);\n });\n\n this.patterns.set(theme, patterns);\n }\n }\n\n /**\n * Classify a single text\n * @param text - Text to classify\n * @returns Classification result with theme, confidence, and all scores\n */\n classify(text: string): ThemeClassificationResult {\n if (!text || text.trim().length === 0) {\n return {\n theme: 'unknown',\n confidence: 0,\n allScores: {},\n };\n }\n\n const scores: Record<string, number> = {};\n let maxScore = 0;\n let winningTheme = 'unknown';\n\n // Count keyword matches for each theme\n for (const theme of this.themes) {\n const patterns = this.patterns.get(theme) || [];\n let matchCount = 0;\n\n for (const pattern of patterns) {\n const matches = text.match(pattern);\n if (matches) {\n matchCount += matches.length;\n }\n }\n\n scores[theme] = matchCount;\n\n // Track highest scoring theme (first theme wins ties)\n if (matchCount > maxScore) {\n maxScore = matchCount;\n winningTheme = theme;\n }\n }\n\n // No matches found\n if (maxScore === 0) {\n return {\n theme: 'unknown',\n confidence: 0,\n allScores: scores,\n };\n }\n\n // Normalize confidence: matches / total keywords for winning theme\n const totalKeywords = this.keywordCounts.get(winningTheme) || 1;\n const confidence = maxScore / totalKeywords;\n\n return {\n theme: winningTheme,\n confidence: Math.min(confidence, 1.0), // Cap at 1.0\n allScores: scores,\n };\n }\n\n /**\n * Classify multiple texts in batch\n * @param texts - Array of texts to classify\n * @returns Array of classification results\n */\n classifyBatch(texts: string[]): ThemeClassificationResult[] {\n return texts.map((text) => this.classify(text));\n }\n\n /**\n * Escape special regex characters in a string\n * @param str - String to escape\n * @returns Escaped string safe for use in regex\n */\n private escapeRegex(str: string): string {\n return str.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&');\n }\n}\n","/**\n * Zero-shot theme classifier using Transformers.js\n * Uses pre-trained models without requiring fine-tuning or training data\n */\n\nimport type { ThemeClassifier, ThemeClassification } from './theme-classifier';\n\n/**\n * Zero-shot classification using pre-trained transformer models.\n *\n * This classifier uses Hugging Face's zero-shot classification pipeline\n * to classify text into themes without requiring training data or fine-tuning.\n * The model is loaded lazily on the first classify() call to improve startup time.\n *\n * Features:\n * - No training data required\n * - Works with any set of theme labels\n * - Lazy model loading (loads on first classification)\n * - Sequential batch processing to avoid memory issues\n * - Handles empty text with uniform scores\n *\n * @example\n * ```typescript\n * const classifier = new ZeroShotThemeClassifier(['technology', 'sports', 'business']);\n * const result = await classifier.classify('Machine learning is transforming AI');\n * console.log(result.theme); // 'technology'\n * console.log(result.confidence); // 0.95\n * ```\n */\nexport class ZeroShotThemeClassifier implements ThemeClassifier {\n private model: any = null;\n private modelName: string;\n private themes: string[];\n\n /**\n * Creates a new ZeroShotThemeClassifier\n *\n * @param themes - Array of theme labels to classify into\n * @param modelName - Name of the Hugging Face model to use (default: 'Xenova/distilbert-base-uncased-mnli')\n *\n * @example\n * ```typescript\n * // Use default model\n * const classifier = new ZeroShotThemeClassifier(['technology', 'sports', 'finance']);\n *\n * // Use custom model\n * const classifier = new ZeroShotThemeClassifier(\n * ['positive', 'negative'],\n * 'Xenova/distilbert-base-uncased-mnli'\n * );\n * ```\n */\n constructor(\n themes: string[],\n modelName: string = 'Xenova/distilbert-base-uncased-mnli'\n ) {\n this.themes = themes;\n this.modelName = modelName;\n }\n\n /**\n * Lazy loads the zero-shot classification model\n * Only loads once on first call, subsequent calls reuse the loaded model\n *\n * @returns Promise that resolves to the loaded pipeline\n */\n private async ensureModelLoaded(): Promise<any> {\n if (!this.model) {\n const { pipeline } = await import('@xenova/transformers');\n this.model = await pipeline('zero-shot-classification', this.modelName);\n }\n return this.model;\n }\n\n /**\n * Classify a single text into one of the provided themes\n *\n * @param text - The text content to classify\n * @returns A promise that resolves to the theme classification result\n *\n * @example\n * ```typescript\n * const classifier = new ZeroShotThemeClassifier(['technology', 'sports']);\n * const result = await classifier.classify('Machine learning and AI');\n * console.log(result.theme); // 'technology'\n * console.log(result.confidence); // 0.92\n * console.log(result.allScores); // { technology: 0.92, sports: 0.08 }\n * ```\n */\n async classify(text: string): Promise<ThemeClassification> {\n // Handle empty text with uniform scores\n if (!text || text.trim().length === 0) {\n const uniformScore = 1.0 / this.themes.length;\n const allScores: Record<string, number> = {};\n\n for (const theme of this.themes) {\n allScores[theme] = uniformScore;\n }\n\n return {\n theme: this.themes[0], // Return first theme\n confidence: uniformScore,\n allScores,\n };\n }\n\n const model = await this.ensureModelLoaded();\n\n // Run zero-shot classification\n const result = await model(text, this.themes) as {\n labels: string[];\n scores: number[];\n };\n\n // Build scores map\n const allScores: Record<string, number> = {};\n for (let i = 0; i < result.labels.length; i++) {\n allScores[result.labels[i]] = result.scores[i];\n }\n\n // Return highest scoring theme (first in result)\n return {\n theme: result.labels[0],\n confidence: result.scores[0],\n allScores,\n };\n }\n\n /**\n * Classify multiple texts efficiently\n *\n * Processes texts sequentially to avoid memory issues with large batches.\n * The model is loaded once and reused for all texts.\n *\n * @param texts - Array of text contents to classify\n * @returns A promise that resolves to an array of theme classifications\n *\n * @example\n * ```typescript\n * const classifier = new ZeroShotThemeClassifier(['technology', 'sports', 'finance']);\n * const results = await classifier.classifyBatch([\n * 'Machine learning is transforming AI',\n * 'The football team won the championship',\n * 'Stock market hits record high'\n * ]);\n * // results[0].theme === 'technology'\n * // results[1].theme === 'sports'\n * // results[2].theme === 'finance'\n * ```\n */\n async classifyBatch(texts: string[]): Promise<ThemeClassification[]> {\n // Ensure model is loaded once\n await this.ensureModelLoaded();\n\n // Process sequentially to avoid memory issues\n const results: ThemeClassification[] = [];\n for (const text of texts) {\n const result = await this.classify(text);\n results.push(result);\n }\n\n return results;\n }\n}\n","/**\n * Embedding-based theme classifier using cosine similarity\n * Computes similarity between text embeddings and theme embeddings\n */\n\nimport type { ThemeClassifier, ThemeClassification } from './theme-classifier';\nimport type { Embedder } from '../../embedders/embedder';\n\n/**\n * Embedding-based classification using cosine similarity.\n *\n * This classifier computes embeddings for text and themes, then uses cosine\n * similarity to determine which theme is most similar to the text. Theme\n * embeddings are computed lazily on the first classify() call, or can be\n * provided precomputed in the constructor.\n *\n * Features:\n * - Lazy initialization: theme embeddings computed on first classify()\n * - Optional precomputed embeddings for faster startup\n * - Cosine similarity: dotProduct / (normA * normB)\n * - Normalize similarity [-1,1] to confidence [0,1]\n * - Handles empty text with uniform scores\n *\n * @example\n * ```typescript\n * const embedder = new OpenAIEmbedder();\n * const classifier = new EmbeddingThemeClassifier(['technology', 'sports', 'finance'], embedder);\n * const result = await classifier.classify('Machine learning is transforming AI');\n * console.log(result.theme); // 'technology'\n * console.log(result.confidence); // 0.89\n * ```\n */\nexport class EmbeddingThemeClassifier implements ThemeClassifier {\n private themeEmbeddings: Record<string, number[]> | null = null;\n private embedder: Embedder;\n private themes: string[];\n\n /**\n * Creates a new EmbeddingThemeClassifier\n *\n * @param themes - Array of theme labels to classify into\n * @param embedder - Embedder instance to use for generating embeddings\n * @param precomputedEmbeddings - Optional precomputed theme embeddings for faster startup\n *\n * @example\n * ```typescript\n * // Lazy initialization\n * const classifier = new EmbeddingThemeClassifier(['technology', 'sports'], embedder);\n *\n * // With precomputed embeddings\n * const themeEmbeddings = {\n * technology: await embedder.embed('technology'),\n * sports: await embedder.embed('sports')\n * };\n * const classifier = new EmbeddingThemeClassifier(['technology', 'sports'], embedder, themeEmbeddings);\n * ```\n */\n constructor(\n themes: string[],\n embedder: Embedder,\n precomputedEmbeddings?: Record<string, number[]>\n ) {\n this.themes = themes;\n this.embedder = embedder;\n this.themeEmbeddings = precomputedEmbeddings || null;\n }\n\n /**\n * Lazy loads theme embeddings on first use\n * Computes embeddings for all theme labels if not already computed\n *\n * @returns Promise that resolves to the theme embeddings map\n */\n private async ensureThemeEmbeddings(): Promise<Record<string, number[]>> {\n if (!this.themeEmbeddings) {\n this.themeEmbeddings = {};\n\n // Compute embeddings for all themes\n const embeddings = await this.embedder.embedBatch(this.themes);\n\n for (let i = 0; i < this.themes.length; i++) {\n this.themeEmbeddings[this.themes[i]] = embeddings[i];\n }\n }\n\n return this.themeEmbeddings;\n }\n\n /**\n * Compute cosine similarity between two vectors\n *\n * Cosine similarity = dotProduct / (normA * normB)\n * Returns value in range [-1, 1] where:\n * - 1 means vectors point in the same direction\n * - 0 means vectors are orthogonal\n * - -1 means vectors point in opposite directions\n *\n * @param a - First vector\n * @param b - Second vector\n * @returns Cosine similarity between the vectors\n */\n private cosineSimilarity(a: number[], b: number[]): number {\n if (a.length !== b.length) {\n throw new Error('Vectors must have the same length for cosine similarity');\n }\n\n let dotProduct = 0;\n let normA = 0;\n let normB = 0;\n\n for (let i = 0; i < a.length; i++) {\n dotProduct += a[i] * b[i];\n normA += a[i] * a[i];\n normB += b[i] * b[i];\n }\n\n normA = Math.sqrt(normA);\n normB = Math.sqrt(normB);\n\n // Avoid division by zero\n if (normA === 0 || normB === 0) {\n return 0;\n }\n\n return dotProduct / (normA * normB);\n }\n\n /**\n * Normalize cosine similarity from [-1, 1] to confidence score [0, 1]\n *\n * Uses linear transformation: (similarity + 1) / 2\n *\n * @param similarity - Cosine similarity value in range [-1, 1]\n * @returns Confidence score in range [0, 1]\n */\n private normalizeToConfidence(similarity: number): number {\n return (similarity + 1) / 2;\n }\n\n /**\n * Classify a single text into one of the provided themes\n *\n * @param text - The text content to classify\n * @returns A promise that resolves to the theme classification result\n *\n * @example\n * ```typescript\n * const classifier = new EmbeddingThemeClassifier(['technology', 'sports'], embedder);\n * const result = await classifier.classify('Machine learning and AI');\n * console.log(result.theme); // 'technology'\n * console.log(result.confidence); // 0.92\n * console.log(result.allScores); // { technology: 0.92, sports: 0.45 }\n * ```\n */\n async classify(text: string): Promise<ThemeClassification> {\n // Handle empty text with uniform scores\n if (!text || text.trim().length === 0) {\n const uniformScore = 1.0 / this.themes.length;\n const allScores: Record<string, number> = {};\n\n for (const theme of this.themes) {\n allScores[theme] = uniformScore;\n }\n\n return {\n theme: this.themes[0], // Return first theme\n confidence: uniformScore,\n allScores,\n };\n }\n\n // Ensure theme embeddings are computed\n const themeEmbeddings = await this.ensureThemeEmbeddings();\n\n // Compute text embedding\n const textEmbedding = await this.embedder.embed(text);\n\n // Compute cosine similarity for each theme\n const similarities: Record<string, number> = {};\n let maxSimilarity = -Infinity;\n let winningTheme = this.themes[0];\n\n for (const theme of this.themes) {\n const themeEmbedding = themeEmbeddings[theme];\n const similarity = this.cosineSimilarity(textEmbedding, themeEmbedding);\n similarities[theme] = similarity;\n\n if (similarity > maxSimilarity) {\n maxSimilarity = similarity;\n winningTheme = theme;\n }\n }\n\n // Normalize similarities to confidence scores [0, 1]\n const allScores: Record<string, number> = {};\n for (const theme of this.themes) {\n allScores[theme] = this.normalizeToConfidence(similarities[theme]);\n }\n\n return {\n theme: winningTheme,\n confidence: this.normalizeToConfidence(maxSimilarity),\n allScores,\n };\n }\n\n /**\n * Classify multiple texts efficiently\n *\n * Ensures theme embeddings are loaded once, then processes all texts.\n * Text embeddings are computed in batch for efficiency.\n *\n * @param texts - Array of text contents to classify\n * @returns A promise that resolves to an array of theme classifications\n *\n * @example\n * ```typescript\n * const classifier = new EmbeddingThemeClassifier(['technology', 'sports', 'finance'], embedder);\n * const results = await classifier.classifyBatch([\n * 'Machine learning is transforming AI',\n * 'The football team won the championship',\n * 'Stock market hits record high'\n * ]);\n * // results[0].theme === 'technology'\n * // results[1].theme === 'sports'\n * // results[2].theme === 'finance'\n * ```\n */\n async classifyBatch(texts: string[]): Promise<ThemeClassification[]> {\n // Ensure theme embeddings are loaded once\n await this.ensureThemeEmbeddings();\n\n // Process each text (classify already has embeddings cached)\n const results: ThemeClassification[] = [];\n for (const text of texts) {\n const result = await this.classify(text);\n results.push(result);\n }\n\n return results;\n }\n}\n","/**\n * LLM-based theme classifier using language models for high-quality classification\n * Provides the most flexible and accurate theme classification using LLMs\n */\n\nimport type { ThemeClassifier, ThemeClassification } from './theme-classifier';\nimport type { LLMClient } from '../../llm/llm-client';\n\n/**\n * Default prompt template for theme classification.\n * Uses {themes} and {text} placeholders that are replaced at runtime.\n */\nconst DEFAULT_PROMPT_TEMPLATE = `You are a theme classification system. Classify the following text into one of the provided themes.\n\nAvailable themes: {themes}\n\nText to classify:\n{text}\n\nReturn a JSON object with the following structure:\n- theme: the most appropriate theme from the list (string)\n- confidence: confidence score between 0 and 1 (number)\n- allScores: an object mapping each theme to its confidence score (object)\n\nReturn only valid JSON, no additional text.`;\n\n/**\n * LLM-based theme classification using language models.\n *\n * This classifier uses LLMs to provide the highest quality theme classification\n * with semantic understanding and nuanced reasoning. It supports custom prompt\n * templates for domain-specific classification needs.\n *\n * Features:\n * - Default prompt template with {themes} and {text} placeholders\n * - Custom prompt template support for specialized domains\n * - Structured JSON output using LLM.generateJSON<>\n * - Sequential batch processing to avoid rate limits\n * - Comprehensive error handling with cause chain\n * - Empty text handling with uniform scores\n *\n * @example\n * ```typescript\n * const llm = new OpenAIClient('gpt-4');\n * const classifier = new LLMThemeClassifier(\n * ['technology', 'sports', 'finance'],\n * llm\n * );\n * const result = await classifier.classify('Machine learning is transforming AI');\n * console.log(result.theme); // 'technology'\n * console.log(result.confidence); // 0.95\n * ```\n *\n * @example Custom prompt template\n * ```typescript\n * const customTemplate = `Classify this medical text: {text}\n * Themes: {themes}\n * Return JSON with theme, confidence, allScores.`;\n *\n * const classifier = new LLMThemeClassifier(\n * ['cardiology', 'neurology', 'oncology'],\n * llm,\n * customTemplate\n * );\n * ```\n */\nexport class LLMThemeClassifier implements ThemeClassifier {\n private themes: string[];\n private llm: LLMClient;\n private promptTemplate: string;\n\n /**\n * Creates a new LLMThemeClassifier\n *\n * @param themes - Array of theme labels to classify into\n * @param llm - LLM client instance to use for classification\n * @param promptTemplate - Optional custom prompt template with {themes} and {text} placeholders\n *\n * @example\n * ```typescript\n * const classifier = new LLMThemeClassifier(\n * ['technology', 'sports', 'finance'],\n * llm\n * );\n * ```\n *\n * @example With custom prompt\n * ```typescript\n * const customTemplate = `Classify: {text}\\nThemes: {themes}\\nReturn JSON.`;\n * const classifier = new LLMThemeClassifier(\n * ['technology', 'sports'],\n * llm,\n * customTemplate\n * );\n * ```\n */\n constructor(\n themes: string[],\n llm: LLMClient,\n promptTemplate: string = DEFAULT_PROMPT_TEMPLATE\n ) {\n this.themes = themes;\n this.llm = llm;\n this.promptTemplate = promptTemplate;\n }\n\n /**\n * Build the classification prompt by replacing placeholders\n *\n * @param text - The text to classify\n * @returns The complete prompt with placeholders replaced\n */\n private buildPrompt(text: string): string {\n const themesStr = this.themes.join(', ');\n return this.promptTemplate\n .replace('{themes}', themesStr)\n .replace('{text}', text);\n }\n\n /**\n * Classify a single text into one of the provided themes\n *\n * @param text - The text content to classify\n * @returns A promise that resolves to the theme classification result\n *\n * @example\n * ```typescript\n * const classifier = new LLMThemeClassifier(['technology', 'sports'], llm);\n * const result = await classifier.classify('Machine learning and AI');\n * console.log(result.theme); // 'technology'\n * console.log(result.confidence); // 0.95\n * console.log(result.allScores); // { technology: 0.95, sports: 0.05 }\n * ```\n */\n async classify(text: string): Promise<ThemeClassification> {\n // Handle empty text with uniform scores\n if (!text || text.trim().length === 0) {\n const uniformScore = 1.0 / this.themes.length;\n const allScores: Record<string, number> = {};\n\n for (const theme of this.themes) {\n allScores[theme] = uniformScore;\n }\n\n return {\n theme: this.themes[0], // Return first theme\n confidence: uniformScore,\n allScores,\n };\n }\n\n // Build prompt and call LLM\n const prompt = this.buildPrompt(text);\n\n try {\n const result = await this.llm.generateJSON<ThemeClassification>(prompt);\n return result;\n } catch (error) {\n // Chain the error with context\n const message = `Failed to classify text with LLM: ${error instanceof Error ? error.message : 'unknown error'}`;\n const classificationError = new Error(message);\n\n // Preserve the original error as the cause\n if (error instanceof Error) {\n (classificationError as any).cause = error;\n }\n\n throw classificationError;\n }\n }\n\n /**\n * Classify multiple texts sequentially\n *\n * Processes texts one at a time to avoid rate limits and ensure predictable behavior.\n * Sequential processing provides better error handling and rate limit compliance.\n *\n * @param texts - Array of text contents to classify\n * @returns A promise that resolves to an array of theme classifications\n *\n * @example\n * ```typescript\n * const classifier = new LLMThemeClassifier(['technology', 'sports', 'finance'], llm);\n * const results = await classifier.classifyBatch([\n * 'Machine learning is transforming AI',\n * 'The football team won the championship',\n * 'Stock market hits record high'\n * ]);\n * // results[0].theme === 'technology'\n * // results[1].theme === 'sports'\n * // results[2].theme === 'finance'\n * ```\n */\n async classifyBatch(texts: string[]): Promise<ThemeClassification[]> {\n // Sequential processing to avoid rate limits\n const results: ThemeClassification[] = [];\n\n for (const text of texts) {\n const result = await this.classify(text);\n results.push(result);\n }\n\n return results;\n }\n}\n","/**\n * Enrichment pipeline for adding metadata to vector records.\n *\n * This class provides the main enrichment functionality:\n * - Vertical enrichment: Classify documents into business verticals\n * - Theme enrichment: Add thematic tags to documents\n * - Section enrichment: Structure documents into logical sections\n * - Batch processing: Efficiently process large collections\n *\n * Design principles:\n * 1. Database-agnostic: Works with any VectorDBAdapter\n * 2. Strategy pattern: Multiple enrichment strategies per operation\n * 3. Batch processing: Efficient iteration and bulk updates\n * 4. Error resilience: Continue processing despite individual failures\n */\n\nimport type { VectorDBAdapter } from '../adapters';\nimport type { VectorRecord } from '../types';\nimport type { MetadataUpdate } from '../adapters/types';\nimport type { UniversalFilter } from '../filters/types';\nimport type {\n EnrichmentStats,\n VerticalEnrichmentConfig,\n ThemeEnrichmentConfig,\n SectionEnrichmentConfig,\n EnrichAllConfig,\n FieldMappingConfig,\n ExtractorConfig,\n AutomaticExtractionConfig,\n} from './types';\n\n/**\n * EnrichmentPipeline provides methods to enrich vector records with metadata.\n *\n * The pipeline supports three types of enrichment:\n * 1. Vertical enrichment: Classify into business verticals (technology, finance, etc.)\n * 2. Theme enrichment: Add thematic tags (innovation, research, etc.)\n * 3. Section enrichment: Structure into logical sections\n *\n * Each enrichment type supports multiple strategies for maximum flexibility.\n *\n * @example\n * ```typescript\n * const pipeline = new EnrichmentPipeline(adapter, embedder, llm);\n *\n * // Enrich using field mapping\n * await pipeline.enrichVertical('my-collection', {\n * mapping: { 'tech': 'technology', 'hc': 'healthcare' }\n * });\n *\n * // Enrich using custom extractor\n * await pipeline.enrichVertical('my-collection', {\n * extractor: async (doc) => extractVertical(doc)\n * });\n *\n * // Enrich using LLM\n * await pipeline.enrichVertical('my-collection', {\n * automatic: {\n * llm: myLLMClient,\n * fields: ['technology', 'finance', 'healthcare']\n * }\n * });\n * ```\n */\nexport class EnrichmentPipeline {\n /**\n * Create a new enrichment pipeline.\n *\n * @param adapter - Vector database adapter for reading/writing records\n * @param embedder - Optional embedder for embedding-based enrichment\n * @param llm - Optional LLM client for automatic enrichment\n */\n constructor(\n private adapter: VectorDBAdapter,\n private embedder?: any,\n private llm?: any\n ) {}\n\n /**\n * Enrich records with vertical classifications.\n *\n * Supports three strategies:\n * 1. Field mapping: Map existing field values to verticals\n * 2. Custom extractor: Use a custom function to extract verticals\n * 3. Automatic LLM: Use an LLM to classify documents\n *\n * @param collection - Name of the collection to enrich\n * @param config - Vertical enrichment configuration\n * @returns Statistics about the enrichment operation\n *\n * @example\n * ```typescript\n * // Field mapping\n * await pipeline.enrichVertical('docs', {\n * mapping: { 'tech': 'technology' }\n * });\n *\n * // Custom extractor\n * await pipeline.enrichVertical('docs', {\n * extractor: async (doc) => 'technology'\n * });\n *\n * // Automatic LLM\n * await pipeline.enrichVertical('docs', {\n * automatic: {\n * llm: myLLMClient,\n * fields: ['technology', 'finance']\n * }\n * });\n * ```\n */\n async enrichVertical(\n collection: string,\n config: VerticalEnrichmentConfig\n ): Promise<EnrichmentStats> {\n const startTime = Date.now();\n const stats: EnrichmentStats = {\n recordsProcessed: 0,\n recordsUpdated: 0,\n recordsSkipped: 0,\n timeMs: 0,\n errors: [],\n };\n\n try {\n // Determine which strategy to use\n if ('mapping' in config) {\n await this.enrichWithFieldMapping(collection, config, stats);\n } else if ('extractor' in config) {\n await this.enrichWithExtractor(collection, config, stats);\n } else if ('automatic' in config) {\n await this.enrichWithLLM(collection, config, stats);\n }\n } catch (error) {\n stats.errors?.push(\n `Pipeline error: ${error instanceof Error ? error.message : 'unknown error'}`\n );\n }\n\n stats.timeMs = Date.now() - startTime;\n return stats;\n }\n\n /**\n * Enrich records using field mapping strategy.\n *\n * Maps values from an existing field to vertical classifications.\n *\n * @param collection - Collection name\n * @param config - Field mapping configuration\n * @param stats - Statistics object to update\n */\n private async enrichWithFieldMapping(\n collection: string,\n config: FieldMappingConfig,\n stats: EnrichmentStats\n ): Promise<void> {\n const batchSize = config.batchSize || 100;\n\n for await (const batch of this.adapter.iterate(collection, {\n batchSize,\n filter: config.filter,\n })) {\n const updates: MetadataUpdate[] = [];\n\n for (const record of batch) {\n stats.recordsProcessed++;\n\n try {\n const vertical = this.applyFieldMapping(record, config.mapping);\n\n if (vertical) {\n updates.push({\n id: record.id,\n metadata: { vertical },\n });\n } else {\n stats.recordsSkipped++;\n }\n } catch (error) {\n stats.recordsSkipped++;\n stats.errors?.push(\n `Error mapping record ${record.id}: ${error instanceof Error ? error.message : 'unknown error'}`\n );\n }\n }\n\n // Apply updates if any\n if (updates.length > 0) {\n try {\n await this.adapter.updateMetadata(collection, updates);\n stats.recordsUpdated += updates.length;\n } catch (error) {\n stats.errors?.push(\n `Error updating batch: ${error instanceof Error ? error.message : 'unknown error'}`\n );\n }\n }\n }\n }\n\n /**\n * Apply field mapping to extract vertical from a record.\n *\n * @param record - Vector record\n * @param mapping - Field mapping configuration\n * @returns Vertical label or null if no match\n */\n private applyFieldMapping(\n record: VectorRecord,\n mapping: Record<string, string>\n ): string | null {\n const category = record.metadata?.category;\n\n if (category && typeof category === 'string' && category in mapping) {\n return mapping[category];\n }\n\n return null;\n }\n\n /**\n * Enrich records using custom extractor strategy.\n *\n * Calls the provided extractor function for each record.\n *\n * @param collection - Collection name\n * @param config - Extractor configuration\n * @param stats - Statistics object to update\n */\n private async enrichWithExtractor(\n collection: string,\n config: ExtractorConfig,\n stats: EnrichmentStats\n ): Promise<void> {\n const batchSize = config.batchSize || 100;\n\n for await (const batch of this.adapter.iterate(collection, {\n batchSize,\n filter: config.filter,\n })) {\n const updates: MetadataUpdate[] = [];\n\n for (const record of batch) {\n stats.recordsProcessed++;\n\n try {\n const vertical = await config.extractor(record);\n\n if (vertical) {\n updates.push({\n id: record.id,\n metadata: { vertical },\n });\n } else {\n stats.recordsSkipped++;\n }\n } catch (error) {\n stats.recordsSkipped++;\n stats.errors?.push(\n `Extractor error for record ${record.id}: ${error instanceof Error ? error.message : 'unknown error'}`\n );\n }\n }\n\n // Apply updates if any\n if (updates.length > 0) {\n try {\n await this.adapter.updateMetadata(collection, updates);\n stats.recordsUpdated += updates.length;\n } catch (error) {\n stats.errors?.push(\n `Error updating batch: ${error instanceof Error ? error.message : 'unknown error'}`\n );\n }\n }\n }\n }\n\n /**\n * Enrich records using automatic LLM strategy.\n *\n * Uses a language model to classify documents into verticals.\n *\n * @param collection - Collection name\n * @param config - Automatic extraction configuration\n * @param stats - Statistics object to update\n */\n private async enrichWithLLM(\n collection: string,\n config: AutomaticExtractionConfig,\n stats: EnrichmentStats\n ): Promise<void> {\n const batchSize = config.batchSize || 10;\n const { llm, fields, promptTemplate, textField } = config.automatic;\n const fieldName = textField || 'content';\n\n for await (const batch of this.adapter.iterate(collection, {\n batchSize,\n filter: config.filter,\n })) {\n const updates: MetadataUpdate[] = [];\n\n for (const record of batch) {\n stats.recordsProcessed++;\n\n try {\n const vertical = await this.extractWithLLM(\n record,\n llm,\n fields,\n fieldName,\n promptTemplate\n );\n\n if (vertical) {\n updates.push({\n id: record.id,\n metadata: { vertical },\n });\n } else {\n stats.recordsSkipped++;\n }\n } catch (error) {\n stats.recordsSkipped++;\n stats.errors?.push(\n `LLM extraction error for record ${record.id}: ${error instanceof Error ? error.message : 'unknown error'}`\n );\n }\n }\n\n // Apply updates if any\n if (updates.length > 0) {\n try {\n await this.adapter.updateMetadata(collection, updates);\n stats.recordsUpdated += updates.length;\n } catch (error) {\n stats.errors?.push(\n `Error updating batch: ${error instanceof Error ? error.message : 'unknown error'}`\n );\n }\n }\n }\n }\n\n /**\n * Extract vertical classification using LLM.\n *\n * @param record - Vector record\n * @param llm - LLM client\n * @param fields - Available vertical fields\n * @param textField - Field name containing text to classify\n * @param promptTemplate - Optional custom prompt template\n * @returns Vertical label\n */\n private async extractWithLLM(\n record: VectorRecord,\n llm: any,\n fields: string[],\n textField: string,\n promptTemplate?: string\n ): Promise<string> {\n const text = record.metadata?.[textField];\n\n if (!text || typeof text !== 'string') {\n throw new Error(`No text found in field '${textField}'`);\n }\n\n // Build prompt\n const prompt = promptTemplate\n ? promptTemplate\n .replace('{fields}', fields.join(', '))\n .replace('{text}', text)\n : `Classify the following text into one of these categories: ${fields.join(', ')}\\n\\nText: ${text}\\n\\nCategory:`;\n\n // Call LLM\n const result = await llm.generate(prompt);\n\n return result.trim();\n }\n\n /**\n * Enrich records with theme classifications.\n *\n * Uses a theme classifier to identify themes in text content and updates\n * record metadata with theme information. Supports single and multi-theme\n * classification with configurable confidence thresholds.\n *\n * @param collection - Name of the collection to enrich\n * @param config - Theme enrichment configuration\n * @returns Statistics about the enrichment operation\n *\n * @example\n * ```typescript\n * // Single theme classification\n * await pipeline.enrichThemes('docs', {\n * themes: ['technology', 'business', 'science'],\n * classifier: new KeywordThemeClassifier(),\n * confidenceThreshold: 0.7\n * });\n *\n * // Multi-theme classification\n * await pipeline.enrichThemes('docs', {\n * themes: ['technology', 'business', 'science'],\n * classifier: new LLMThemeClassifier(),\n * multiTheme: true,\n * confidenceThreshold: 0.5\n * });\n * ```\n */\n async enrichThemes(\n collection: string,\n config: ThemeEnrichmentConfig\n ): Promise<EnrichmentStats> {\n const startTime = Date.now();\n const stats: EnrichmentStats = {\n recordsProcessed: 0,\n recordsUpdated: 0,\n recordsSkipped: 0,\n timeMs: 0,\n errors: [],\n };\n\n try {\n await this.enrichWithThemeClassifier(collection, config, stats);\n } catch (error) {\n stats.errors?.push(\n `Pipeline error: ${error instanceof Error ? error.message : 'unknown error'}`\n );\n }\n\n stats.timeMs = Date.now() - startTime;\n return stats;\n }\n\n /**\n * Enrich records using theme classifier.\n *\n * @param collection - Collection name\n * @param config - Theme enrichment configuration\n * @param stats - Statistics object to update\n */\n private async enrichWithThemeClassifier(\n collection: string,\n config: ThemeEnrichmentConfig,\n stats: EnrichmentStats\n ): Promise<void> {\n const batchSize = config.batchSize || 100;\n const textField = config.textField || 'content';\n const confidenceThreshold = config.confidenceThreshold ?? 0.5;\n const multiTheme = config.multiTheme || false;\n\n for await (const batch of this.adapter.iterate(collection, {\n batchSize,\n filter: config.filter,\n })) {\n // Extract texts from batch\n const textsToClassify: string[] = [];\n const recordsToProcess: VectorRecord[] = [];\n\n for (const record of batch) {\n stats.recordsProcessed++;\n\n // Extract text from record\n const text = record.text || record.metadata?.[textField];\n\n if (!text || typeof text !== 'string' || text.trim() === '') {\n stats.recordsSkipped++;\n continue;\n }\n\n textsToClassify.push(text);\n recordsToProcess.push(record);\n }\n\n // Skip if no valid texts to classify\n if (textsToClassify.length === 0) {\n continue;\n }\n\n // Classify batch\n let classifications: any[];\n try {\n classifications = await config.classifier.classifyBatch(textsToClassify);\n } catch (error) {\n // If batch classification fails, try individual classification for each item\n stats.errors?.push(\n `Batch classification error, falling back to individual classification: ${error instanceof Error ? error.message : 'unknown error'}`\n );\n\n classifications = [];\n for (let i = 0; i < textsToClassify.length; i++) {\n try {\n const result = await config.classifier.classify(textsToClassify[i]);\n classifications.push(result);\n } catch (individualError) {\n // Push null to maintain index alignment\n classifications.push(null);\n stats.errors?.push(\n `Classification error for record ${recordsToProcess[i].id}: ${individualError instanceof Error ? individualError.message : 'unknown error'}`\n );\n }\n }\n }\n\n // Build updates\n const updates: MetadataUpdate[] = [];\n\n for (let i = 0; i < recordsToProcess.length; i++) {\n const record = recordsToProcess[i];\n const classification = classifications[i];\n\n try {\n // Check if classification is valid (might be error object or undefined)\n if (!classification || typeof classification !== 'object') {\n stats.recordsSkipped++;\n stats.errors?.push(\n `Invalid classification for record ${record.id}`\n );\n continue;\n }\n\n // Skip if below confidence threshold\n if (classification.confidence < confidenceThreshold) {\n stats.recordsSkipped++;\n continue;\n }\n\n // Build metadata update\n const metadata: Record<string, any> = {\n __h_theme: classification.theme,\n __h_theme_confidence: classification.confidence,\n };\n\n // Handle multi-theme mode\n if (multiTheme && classification.allScores) {\n const themes = Object.entries(classification.allScores)\n .filter(([_, score]) => (score as number) >= confidenceThreshold)\n .sort(([_, a], [__, b]) => (b as number) - (a as number))\n .map(([theme, _]) => theme);\n\n if (themes.length > 0) {\n metadata.__h_themes = themes;\n }\n }\n\n updates.push({\n id: record.id,\n metadata,\n });\n } catch (error) {\n stats.recordsSkipped++;\n stats.errors?.push(\n `Error processing record ${record.id}: ${error instanceof Error ? error.message : 'unknown error'}`\n );\n }\n }\n\n // Apply updates if any\n if (updates.length > 0) {\n try {\n await this.adapter.updateMetadata(collection, updates);\n stats.recordsUpdated += updates.length;\n } catch (error) {\n stats.errors?.push(\n `Error updating batch: ${error instanceof Error ? error.message : 'unknown error'}`\n );\n }\n }\n\n // Call progress callback if provided\n if (config.onProgress) {\n config.onProgress(stats);\n }\n }\n }\n\n /**\n * Enrich records with section structure.\n *\n * Extracts section metadata from documents using either existing field mappings\n * or automatic detection strategies (markdown, HTML, or pattern-based).\n *\n * @param collection - Name of the collection to enrich\n * @param config - Section enrichment configuration\n * @returns Statistics about the enrichment operation\n *\n * @example\n * ```typescript\n * // Use existing section field\n * await pipeline.enrichSections('docs', {\n * existingField: 'section_path'\n * });\n *\n * // Auto-detect sections\n * await pipeline.enrichSections('docs', {\n * autoDetect: true\n * });\n * ```\n */\n async enrichSections(\n collection: string,\n config: SectionEnrichmentConfig\n ): Promise<EnrichmentStats> {\n const startTime = Date.now();\n const stats: EnrichmentStats = {\n recordsProcessed: 0,\n recordsUpdated: 0,\n recordsSkipped: 0,\n timeMs: 0,\n errors: [],\n };\n\n try {\n await this.enrichWithSectionDetection(collection, config, stats);\n } catch (error) {\n stats.errors?.push(\n `Pipeline error: ${error instanceof Error ? error.message : 'unknown error'}`\n );\n }\n\n stats.timeMs = Date.now() - startTime;\n return stats;\n }\n\n /**\n * Enrich records with all enrichment types.\n *\n * Runs vertical, theme, and section enrichment sequentially with shared\n * configuration. Global filters and batch sizes apply to all operations.\n *\n * @param collection - Name of the collection to enrich\n * @param config - Combined enrichment configuration\n * @returns Statistics about the enrichment operation\n *\n * @example\n * ```typescript\n * await pipeline.enrichAll('docs', {\n * vertical: { mapping: { tech: 'technology' } },\n * themes: { themes: ['innovation'], classifier },\n * sections: { autoDetect: true },\n * filter: { field: 'status', op: 'eq', value: 'pending' },\n * batchSize: 50\n * });\n * ```\n */\n async enrichAll(\n collection: string,\n config: EnrichAllConfig\n ): Promise<EnrichmentStats> {\n const startTime = Date.now();\n const aggregateStats: EnrichmentStats = {\n recordsProcessed: 0,\n recordsUpdated: 0,\n recordsSkipped: 0,\n timeMs: 0,\n errors: [],\n };\n\n try {\n // Run vertical enrichment if configured\n if (config.vertical) {\n const verticalConfig = this.applyGlobalConfig(config.vertical, config);\n const stats = await this.enrichVertical(collection, verticalConfig);\n this.mergeStats(aggregateStats, stats);\n\n // Call progress callback if provided\n if (config.onProgress) {\n config.onProgress(aggregateStats);\n }\n }\n\n // Run theme enrichment if configured\n if (config.themes) {\n const themesConfig = this.applyGlobalConfig(config.themes, config);\n const stats = await this.enrichThemes(collection, themesConfig);\n this.mergeStats(aggregateStats, stats);\n\n // Call progress callback if provided\n if (config.onProgress) {\n config.onProgress(aggregateStats);\n }\n }\n\n // Run section enrichment if configured\n if (config.sections) {\n const sectionsConfig = this.applyGlobalConfig(config.sections, config);\n const stats = await this.enrichSections(collection, sectionsConfig);\n this.mergeStats(aggregateStats, stats);\n\n // Call progress callback if provided\n if (config.onProgress) {\n config.onProgress(aggregateStats);\n }\n }\n } catch (error) {\n aggregateStats.errors?.push(\n `Pipeline error: ${error instanceof Error ? error.message : 'unknown error'}`\n );\n }\n\n aggregateStats.timeMs = Date.now() - startTime;\n return aggregateStats;\n }\n\n /**\n * Apply global configuration to individual enrichment configs.\n *\n * @param individualConfig - Configuration for a specific enrichment type\n * @param globalConfig - Global configuration\n * @returns Merged configuration\n */\n private applyGlobalConfig<T extends { filter?: UniversalFilter; batchSize?: number }>(\n individualConfig: T,\n globalConfig: EnrichAllConfig\n ): T {\n const merged = { ...individualConfig };\n\n // Apply global filter if not overridden\n if (globalConfig.filter && !merged.filter) {\n merged.filter = globalConfig.filter;\n }\n\n // Apply global batch size if not overridden\n if (globalConfig.batchSize && !merged.batchSize) {\n merged.batchSize = globalConfig.batchSize;\n }\n\n return merged;\n }\n\n /**\n * Merge stats from an enrichment operation into aggregate stats.\n *\n * @param aggregate - Aggregate stats to update\n * @param stats - Stats from a single operation\n */\n private mergeStats(aggregate: EnrichmentStats, stats: EnrichmentStats): void {\n aggregate.recordsProcessed += stats.recordsProcessed;\n aggregate.recordsUpdated += stats.recordsUpdated;\n aggregate.recordsSkipped += stats.recordsSkipped;\n\n // Merge errors\n if (stats.errors && stats.errors.length > 0) {\n if (!aggregate.errors) {\n aggregate.errors = [];\n }\n aggregate.errors.push(...stats.errors);\n }\n }\n\n /**\n * Enrich records using section detection.\n *\n * @param collection - Collection name\n * @param config - Section enrichment configuration\n * @param stats - Statistics object to update\n */\n private async enrichWithSectionDetection(\n collection: string,\n config: SectionEnrichmentConfig,\n stats: EnrichmentStats\n ): Promise<void> {\n const batchSize = config.batchSize || 100;\n\n for await (const batch of this.adapter.iterate(collection, {\n batchSize,\n filter: config.filter,\n })) {\n const updates: MetadataUpdate[] = [];\n\n for (const record of batch) {\n stats.recordsProcessed++;\n\n try {\n let sectionMetadata: {\n path?: string;\n level: number;\n title: string;\n } | null = null;\n\n // Use existing field if provided\n if (config.existingField) {\n sectionMetadata = this.extractSectionMetadata(\n record.metadata?.[config.existingField]\n );\n }\n // Otherwise, auto-detect sections\n else if (config.autoDetect) {\n const text = record.text || record.metadata?.content || '';\n if (typeof text === 'string') {\n sectionMetadata = this.detectSections(text);\n }\n }\n\n if (sectionMetadata) {\n const metadata: Record<string, any> = {\n __h_section_level: sectionMetadata.level,\n __h_section_title: sectionMetadata.title,\n };\n\n if (sectionMetadata.path) {\n metadata.__h_section_path = sectionMetadata.path;\n }\n\n updates.push({\n id: record.id,\n metadata,\n });\n } else {\n stats.recordsSkipped++;\n }\n } catch (error) {\n stats.recordsSkipped++;\n stats.errors?.push(\n `Error processing record ${record.id}: ${error instanceof Error ? error.message : 'unknown error'}`\n );\n }\n }\n\n // Apply updates if any\n if (updates.length > 0) {\n try {\n await this.adapter.updateMetadata(collection, updates);\n stats.recordsUpdated += updates.length;\n } catch (error) {\n stats.errors?.push(\n `Error updating batch: ${error instanceof Error ? error.message : 'unknown error'}`\n );\n }\n }\n }\n }\n\n /**\n * Extract section metadata from an existing field value.\n *\n * @param sectionPath - Section path string (e.g., \"introduction/overview\")\n * @returns Section metadata or null\n */\n private extractSectionMetadata(\n sectionPath: any\n ): { path: string; level: number; title: string } | null {\n if (!sectionPath || typeof sectionPath !== 'string') {\n return null;\n }\n\n const parts = sectionPath.split('/').filter(p => p.trim() !== '');\n if (parts.length === 0) {\n return null;\n }\n\n return {\n path: sectionPath,\n level: parts.length,\n title: parts[parts.length - 1],\n };\n }\n\n /**\n * Detect sections in text using heuristics.\n *\n * @param text - Text content to analyze\n * @returns Section metadata or null\n */\n private detectSections(\n text: string\n ): { level: number; title: string } | null {\n // Try markdown detection first\n const markdown = this.detectMarkdownSections(text);\n if (markdown) return markdown;\n\n // Try HTML detection\n const html = this.detectHtmlSections(text);\n if (html) return html;\n\n // Try pattern detection\n const pattern = this.detectPatternSections(text);\n if (pattern) return pattern;\n\n // Fallback: mark as unsectioned\n return { level: 0, title: 'unsectioned' };\n }\n\n /**\n * Detect markdown headers (# Header).\n *\n * @param text - Text content\n * @returns Section metadata or null\n */\n private detectMarkdownSections(\n text: string\n ): { level: number; title: string } | null {\n const match = text.match(/^(#{1,6})\\s+(.+)$/m);\n if (match) {\n const level = match[1].length;\n const title = match[2].trim();\n return { level, title };\n }\n return null;\n }\n\n /**\n * Detect HTML headers (<h1>Header</h1>).\n *\n * @param text - Text content\n * @returns Section metadata or null\n */\n private detectHtmlSections(\n text: string\n ): { level: number; title: string } | null {\n const match = text.match(/<h([1-6])>(.+?)<\\/h[1-6]>/i);\n if (match) {\n const level = parseInt(match[1], 10);\n const title = match[2].trim();\n return { level, title };\n }\n return null;\n }\n\n /**\n * Detect sections using common patterns (SECTION: Title).\n *\n * @param text - Text content\n * @returns Section metadata or null\n */\n private detectPatternSections(\n text: string\n ): { level: number; title: string } | null {\n const match = text.match(/^SECTION:\\s+(.+)$/m);\n if (match) {\n const title = match[1].trim();\n return { level: 1, title };\n }\n return null;\n }\n}\n","// packages/core/src/ingestion/chunkers/text-chunker.ts\nimport type { TextChunk, ChunkConfig } from '../types';\n\n/**\n * Abstract interface for text chunking strategies.\n * Implementations split text into chunks with different algorithms.\n */\nexport interface TextChunker {\n /**\n * Chunk text into smaller pieces.\n * @param text - Text to chunk\n * @param config - Optional chunking configuration\n * @returns Array of text chunks with position metadata\n */\n chunk(text: string, config?: ChunkConfig): TextChunk[];\n}\n\n/**\n * Default chunk size in tokens (approximate).\n */\nexport const DEFAULT_CHUNK_SIZE = 500;\n\n/**\n * Default chunk overlap in tokens (approximate).\n */\nexport const DEFAULT_CHUNK_OVERLAP = 50;\n\n/**\n * Estimate token count from character count.\n * Simple heuristic: 1 token ≈ 4 characters for English text.\n */\nexport function estimateTokens(text: string): number {\n return Math.ceil(text.length / 4);\n}\n\n/**\n * Estimate character count from token count.\n */\nexport function estimateChars(tokens: number): number {\n return tokens * 4;\n}\n","// packages/core/src/ingestion/chunkers/recursive-chunker.ts\nimport type { TextChunker } from './text-chunker';\nimport type { TextChunk, ChunkConfig } from '../types';\nimport {\n DEFAULT_CHUNK_SIZE,\n DEFAULT_CHUNK_OVERLAP,\n estimateChars\n} from './text-chunker';\n\n/**\n * Recursive text chunker that tries different separators hierarchically.\n * Tries to split by paragraphs first, then sentences, then words, then characters.\n */\nexport class RecursiveChunker implements TextChunker {\n private readonly separators = [\n '\\n\\n', // Paragraphs (double newline)\n '\\n', // Lines (single newline)\n '. ', // Sentences (period + space)\n ' ', // Words (space)\n '' // Characters (last resort)\n ];\n\n chunk(text: string, config?: ChunkConfig): TextChunk[] {\n if (!text) return [];\n\n const chunkSize = config?.chunkSize ?? DEFAULT_CHUNK_SIZE;\n const chunkOverlap = config?.chunkOverlap ?? DEFAULT_CHUNK_OVERLAP;\n\n const maxChars = estimateChars(chunkSize);\n const overlapChars = estimateChars(chunkOverlap);\n\n if (text.length <= maxChars) {\n return [{\n text,\n index: 0,\n metadata: {\n source: '',\n chunkIndex: 0,\n totalChunks: 1,\n startChar: 0,\n endChar: text.length\n }\n }];\n }\n\n const splits = this.recursiveSplit(text, maxChars, 0);\n const chunks = this.addOverlap(splits, overlapChars);\n\n return chunks.map((chunk, index) => ({\n text: chunk.text,\n index,\n metadata: {\n source: '', // Will be set by pipeline\n chunkIndex: index,\n totalChunks: chunks.length,\n startChar: chunk.start,\n endChar: chunk.end\n }\n }));\n }\n\n private recursiveSplit(\n text: string,\n maxChars: number,\n separatorIndex: number\n ): Array<{ text: string; start: number; end: number }> {\n if (text.length <= maxChars) {\n return [{ text, start: 0, end: text.length }];\n }\n\n if (separatorIndex >= this.separators.length) {\n // Last resort: split by character\n const result: Array<{ text: string; start: number; end: number }> = [];\n for (let i = 0; i < text.length; i += maxChars) {\n result.push({\n text: text.slice(i, i + maxChars),\n start: i,\n end: Math.min(i + maxChars, text.length)\n });\n }\n return result;\n }\n\n const separator = this.separators[separatorIndex];\n const parts = separator ? text.split(separator) : [text];\n\n if (parts.length <= 1) {\n // Separator didn't split, try next separator\n return this.recursiveSplit(text, maxChars, separatorIndex + 1);\n }\n\n // Group parts into chunks that fit within maxChars\n const result: Array<{ text: string; start: number; end: number }> = [];\n let currentParts: string[] = [];\n let currentStart = 0;\n let runningOffset = 0;\n\n for (let i = 0; i < parts.length; i++) {\n const part = parts[i];\n const combined = currentParts.length > 0\n ? [...currentParts, part].join(separator)\n : part;\n\n if (combined.length <= maxChars) {\n if (currentParts.length === 0) {\n currentStart = runningOffset;\n }\n currentParts.push(part);\n } else {\n // Save current chunk if non-empty\n if (currentParts.length > 0) {\n const chunkText = currentParts.join(separator);\n result.push({\n text: chunkText,\n start: currentStart,\n end: currentStart + chunkText.length\n });\n }\n // Start new chunk with current part\n currentStart = runningOffset;\n // If single part is too large, recursively split it\n if (part.length > maxChars) {\n const subSplits = this.recursiveSplit(part, maxChars, separatorIndex + 1);\n for (const sub of subSplits) {\n result.push({\n text: sub.text,\n start: currentStart + sub.start,\n end: currentStart + sub.end\n });\n }\n currentParts = [];\n } else {\n currentParts = [part];\n }\n }\n runningOffset += part.length + (i < parts.length - 1 ? separator.length : 0);\n }\n\n // Save remaining chunk\n if (currentParts.length > 0) {\n const chunkText = currentParts.join(separator);\n result.push({\n text: chunkText,\n start: currentStart,\n end: currentStart + chunkText.length\n });\n }\n\n return result;\n }\n\n private addOverlap(\n chunks: Array<{ text: string; start: number; end: number }>,\n overlapChars: number\n ): Array<{ text: string; start: number; end: number }> {\n if (overlapChars === 0 || chunks.length <= 1) {\n return chunks;\n }\n\n const result = [chunks[0]];\n\n for (let i = 1; i < chunks.length; i++) {\n const prevChunk = chunks[i - 1];\n const currChunk = chunks[i];\n\n // Get last N chars from previous chunk\n const overlapText = prevChunk.text.slice(-overlapChars);\n\n result.push({\n text: overlapText + currChunk.text,\n start: Math.max(0, prevChunk.end - overlapChars),\n end: currChunk.end\n });\n }\n\n return result;\n }\n}\n","// packages/core/src/ingestion/ingestion-pipeline.ts\nimport type { VectorDBAdapter } from '../adapters/vector-db-adapter';\nimport type { Embedder } from '../embedders/embedder';\nimport type { VectorRecord } from '../types/vector-record';\nimport type { LoaderRegistry } from './loaders/loader-registry';\nimport type { TextChunker } from './chunkers/text-chunker';\nimport type { Document, IngestionConfig, IngestionStats, TextChunk } from './types';\nimport { RecursiveChunker } from './chunkers/recursive-chunker';\nimport { VerticalFields } from '../metadata/constants';\nimport * as path from 'path';\n\n/**\n * Main ingestion pipeline orchestrator.\n * Coordinates loading, chunking, embedding, and upserting documents.\n */\nexport class IngestionPipeline {\n private defaultChunker: TextChunker;\n\n constructor(\n private adapter: VectorDBAdapter,\n private embedder: Embedder,\n private loaderRegistry: LoaderRegistry,\n chunker?: TextChunker\n ) {\n this.defaultChunker = chunker || new RecursiveChunker();\n }\n\n /**\n * Ingest documents into a vector database collection.\n * @param sources - File paths\n * @param collection - Target collection name\n * @param config - Optional ingestion configuration\n * @returns Statistics about the ingestion operation\n */\n async ingest(\n sources: string | string[],\n collection: string,\n config?: IngestionConfig\n ): Promise<IngestionStats> {\n const startTime = Date.now();\n const sourceArray = Array.isArray(sources) ? sources : [sources];\n\n const stats: IngestionStats = {\n documentsProcessed: 0,\n documentsSucceeded: 0,\n documentsFailed: 0,\n chunksCreated: 0,\n chunksUpserted: 0,\n timeMs: 0,\n errors: []\n };\n\n const totalDocuments = sourceArray.length;\n\n for (const source of sourceArray) {\n // Report loading stage\n config?.onProgress?.({\n stage: 'loading',\n documentsProcessed: stats.documentsProcessed,\n totalDocuments,\n chunksProcessed: stats.chunksUpserted,\n currentDocument: source\n });\n\n try {\n await this.ingestFile(source, collection, config, stats, totalDocuments);\n stats.documentsSucceeded++;\n } catch (error) {\n stats.documentsFailed++;\n stats.errors!.push({\n source,\n stage: 'load',\n error: error as Error\n });\n }\n stats.documentsProcessed++;\n }\n\n stats.timeMs = Date.now() - startTime;\n return stats;\n }\n\n private async ingestFile(\n filePath: string,\n collection: string,\n config: IngestionConfig | undefined,\n stats: IngestionStats,\n totalDocuments: number\n ): Promise<void> {\n // Load document\n const doc = await this.loaderRegistry.load(filePath);\n config?.onDocumentLoaded?.(doc);\n\n // Report chunking stage\n config?.onProgress?.({\n stage: 'chunking',\n documentsProcessed: stats.documentsProcessed,\n totalDocuments,\n chunksProcessed: stats.chunksUpserted,\n currentDocument: filePath\n });\n\n // Chunk text\n const chunker = config?.chunker || this.defaultChunker;\n const chunks = chunker.chunk(doc.text, {\n chunkSize: config?.chunkSize,\n chunkOverlap: config?.chunkOverlap\n });\n\n // Update source in chunk metadata\n for (const chunk of chunks) {\n chunk.metadata.source = doc.source;\n }\n\n stats.chunksCreated += chunks.length;\n config?.onChunksCreated?.(chunks);\n\n // Report embedding stage\n config?.onProgress?.({\n stage: 'embedding',\n documentsProcessed: stats.documentsProcessed,\n totalDocuments,\n chunksProcessed: stats.chunksUpserted,\n totalChunks: stats.chunksCreated,\n currentDocument: filePath\n });\n\n // Embed chunks\n const texts = chunks.map((c: TextChunk) => c.text);\n const embeddings = await this.embedder.embedBatch(texts);\n\n // Build vector records with metadata\n const records: VectorRecord[] = chunks.map((chunk: TextChunk, i: number) => {\n const metadata = this.buildMetadata(doc, chunk, config);\n\n return {\n id: `${path.basename(doc.source)}:${chunk.index}`,\n embedding: embeddings[i],\n text: chunk.text,\n metadata\n };\n });\n\n // Report upserting stage\n config?.onProgress?.({\n stage: 'upserting',\n documentsProcessed: stats.documentsProcessed,\n totalDocuments,\n chunksProcessed: stats.chunksUpserted,\n totalChunks: stats.chunksCreated,\n currentDocument: filePath\n });\n\n // Upsert to database in batches\n const batchSize = config?.batchSize || 100;\n for (let i = 0; i < records.length; i += batchSize) {\n const batch = records.slice(i, i + batchSize);\n await this.adapter.upsert(collection, batch);\n stats.chunksUpserted += batch.length;\n }\n }\n\n private buildMetadata(\n doc: Document,\n chunk: TextChunk,\n config: IngestionConfig | undefined\n ): Record<string, any> {\n // Auto-extract vertical metadata\n const basename = path.basename(doc.source, path.extname(doc.source));\n const dirname = path.dirname(doc.source);\n\n const autoMetadata: Record<string, any> = {\n [VerticalFields.SOURCE]: doc.source,\n [VerticalFields.DOC_TYPE]: doc.type,\n [VerticalFields.DOC_ID]: basename,\n [VerticalFields.PARTITION]: dirname\n };\n\n // Apply custom extractor\n const extractedMetadata = config?.metadataExtractor?.(doc) || {};\n\n // Apply user metadata\n const userMetadata = config?.metadata || {};\n\n // Add chunk metadata\n const chunkMetadata = {\n chunkIndex: chunk.metadata.chunkIndex,\n totalChunks: chunk.metadata.totalChunks,\n startChar: chunk.metadata.startChar,\n endChar: chunk.metadata.endChar\n };\n\n // Merge all metadata (user overrides auto-extracted)\n return {\n ...autoMetadata,\n ...extractedMetadata,\n ...userMetadata,\n ...chunkMetadata\n };\n }\n}\n","// packages/core/src/ingestion/loaders/text-loader.ts\nimport * as fs from 'fs/promises';\nimport * as path from 'path';\nimport type { DocumentLoader } from './document-loader';\nimport type { Document } from '../types';\n\n/**\n * Loader for plain text files (.txt, .md).\n * No external dependencies, uses Node.js built-in fs.\n */\nexport class TextLoader implements DocumentLoader {\n canHandle(filePath: string): boolean {\n return /\\.(txt|md)$/i.test(filePath);\n }\n\n async load(filePath: string): Promise<Document> {\n const text = await fs.readFile(filePath, 'utf-8');\n const type = path.extname(filePath).slice(1).toLowerCase();\n const stats = await fs.stat(filePath);\n const extension = path.extname(filePath);\n\n return {\n text,\n source: filePath,\n type,\n metadata: {\n size: stats.size,\n extension\n }\n };\n }\n}\n","// packages/core/src/ingestion/loaders/pdf-loader.ts\nimport * as fs from 'fs/promises';\nimport pdfParse from 'pdf-parse';\nimport type { DocumentLoader } from './document-loader';\nimport type { Document } from '../types';\n\n/**\n * Loader for PDF files using pdf-parse library.\n * Extracts text from all pages and includes PDF metadata.\n */\nexport class PDFLoader implements DocumentLoader {\n canHandle(filePath: string): boolean {\n return /\\.pdf$/i.test(filePath);\n }\n\n async load(filePath: string): Promise<Document> {\n const dataBuffer = await fs.readFile(filePath);\n const pdfData = await pdfParse(dataBuffer);\n\n return {\n text: pdfData.text,\n source: filePath,\n type: 'pdf',\n metadata: {\n pages: pdfData.numpages,\n info: pdfData.info\n }\n };\n }\n}\n","// packages/core/src/ingestion/loaders/docx-loader.ts\nimport mammoth from 'mammoth';\nimport type { DocumentLoader } from './document-loader';\nimport type { Document } from '../types';\n\n/**\n * Loader for DOCX files using mammoth library.\n * Converts DOCX to plain text, preserves paragraph structure.\n */\nexport class DOCXLoader implements DocumentLoader {\n canHandle(filePath: string): boolean {\n return /\\.docx$/i.test(filePath);\n }\n\n async load(filePath: string): Promise<Document> {\n const result = await mammoth.extractRawText({ path: filePath });\n\n return {\n text: result.value,\n source: filePath,\n type: 'docx',\n metadata: {\n warnings: result.messages // Conversion warnings from mammoth\n }\n };\n }\n}\n","// packages/core/src/ingestion/loaders/html-loader.ts\nimport * as fs from 'fs/promises';\nimport * as cheerio from 'cheerio';\nimport type { DocumentLoader } from './document-loader';\nimport type { Document } from '../types';\n\n/**\n * Loader for HTML files using cheerio library.\n * Strips tags, extracts visible text, removes scripts/styles.\n */\nexport class HTMLLoader implements DocumentLoader {\n canHandle(filePath: string): boolean {\n return /\\.html?$/i.test(filePath);\n }\n\n async load(filePath: string): Promise<Document> {\n const html = await fs.readFile(filePath, 'utf-8');\n const $ = cheerio.load(html);\n\n // Remove script, style, nav, footer\n $('script, style, nav, footer').remove();\n\n // Extract text from body\n const text = $('body').text()\n .replace(/\\s+/g, ' ') // Normalize whitespace\n .trim();\n\n return {\n text,\n source: filePath,\n type: 'html',\n metadata: {\n title: $('title').text() || undefined,\n description: $('meta[name=\"description\"]').attr('content') || undefined\n }\n };\n }\n}\n","// packages/core/src/ingestion/loaders/loader-registry.ts\nimport type { DocumentLoader } from './document-loader';\nimport type { Document } from '../types';\nimport { TextLoader } from './text-loader';\nimport { PDFLoader } from './pdf-loader';\nimport { DOCXLoader } from './docx-loader';\nimport { HTMLLoader } from './html-loader';\n\n/**\n * Registry for document loaders.\n * Manages loaders and routes files to correct loader based on extension.\n */\nexport class LoaderRegistry {\n private loaders: DocumentLoader[] = [];\n\n constructor() {\n // Register built-in loaders\n this.register(new TextLoader());\n this.register(new PDFLoader());\n this.register(new DOCXLoader());\n this.register(new HTMLLoader());\n }\n\n /**\n * Register a custom document loader.\n * @param loader - Loader to register\n */\n register(loader: DocumentLoader): void {\n this.loaders.push(loader);\n }\n\n /**\n * Check if any loader can handle this file.\n * @param filePath - Path to check\n * @returns true if a loader exists for this file type\n */\n canLoad(filePath: string): boolean {\n return this.loaders.some(l => l.canHandle(filePath));\n }\n\n /**\n * Load a document using the appropriate loader.\n * @param filePath - Path to the file to load\n * @returns Promise resolving to Document\n * @throws Error if no loader found for file type\n */\n async load(filePath: string): Promise<Document> {\n const loader = this.loaders.find(l => l.canHandle(filePath));\n if (!loader) {\n throw new Error(`No loader found for file: ${filePath}`);\n }\n return loader.load(filePath);\n }\n}\n","// packages/core/src/ingestion/chunkers/fixed-chunker.ts\nimport type { TextChunker } from './text-chunker';\nimport type { TextChunk, ChunkConfig } from '../types';\nimport {\n DEFAULT_CHUNK_SIZE,\n DEFAULT_CHUNK_OVERLAP,\n estimateChars\n} from './text-chunker';\n\n/**\n * Fixed-size text chunker that splits at exact character boundaries.\n * Fast and predictable, but may split mid-sentence or mid-word.\n */\nexport class FixedChunker implements TextChunker {\n chunk(text: string, config?: ChunkConfig): TextChunk[] {\n if (!text) return [];\n\n const chunkSize = config?.chunkSize ?? DEFAULT_CHUNK_SIZE;\n const chunkOverlap = config?.chunkOverlap ?? DEFAULT_CHUNK_OVERLAP;\n\n const maxChars = estimateChars(chunkSize);\n const overlapChars = estimateChars(chunkOverlap);\n const step = maxChars - overlapChars;\n\n if (text.length <= maxChars) {\n return [{\n text,\n index: 0,\n metadata: {\n source: '',\n chunkIndex: 0,\n totalChunks: 1,\n startChar: 0,\n endChar: text.length\n }\n }];\n }\n\n const chunks: TextChunk[] = [];\n let position = 0;\n\n while (position < text.length) {\n const end = Math.min(text.length, position + maxChars);\n const chunkText = text.slice(position, end);\n\n chunks.push({\n text: chunkText,\n index: chunks.length,\n metadata: {\n source: '',\n chunkIndex: chunks.length,\n totalChunks: 0, // Updated after loop\n startChar: position,\n endChar: end\n }\n });\n\n position += step;\n // Prevent infinite loop if step is 0\n if (step <= 0) break;\n }\n\n // Update totalChunks\n for (const chunk of chunks) {\n chunk.metadata.totalChunks = chunks.length;\n }\n\n return chunks;\n }\n}\n","// packages/core/src/ingestion/chunkers/sentence-chunker.ts\nimport type { TextChunker } from './text-chunker';\nimport type { TextChunk, ChunkConfig } from '../types';\nimport {\n DEFAULT_CHUNK_SIZE,\n DEFAULT_CHUNK_OVERLAP,\n estimateChars\n} from './text-chunker';\n\n/**\n * Sentence-aware chunker that splits on sentence boundaries.\n * Uses a simple regex-based sentence splitter for portability.\n */\nexport class SentenceChunker implements TextChunker {\n chunk(text: string, config?: ChunkConfig): TextChunk[] {\n if (!text) return [];\n\n const chunkSize = config?.chunkSize ?? DEFAULT_CHUNK_SIZE;\n const chunkOverlap = config?.chunkOverlap ?? DEFAULT_CHUNK_OVERLAP;\n\n const maxChars = estimateChars(chunkSize);\n const overlapChars = estimateChars(chunkOverlap);\n\n // Split into sentences using regex\n const sentences = this.splitSentences(text);\n\n if (sentences.length === 0) {\n return [{\n text,\n index: 0,\n metadata: {\n source: '',\n chunkIndex: 0,\n totalChunks: 1,\n startChar: 0,\n endChar: text.length\n }\n }];\n }\n\n // Group sentences into chunks\n const rawChunks: Array<{ text: string; start: number; end: number }> = [];\n let currentSentences: string[] = [];\n let currentStart = 0;\n\n for (const sentence of sentences) {\n const combined = currentSentences.length > 0\n ? [...currentSentences, sentence].join(' ')\n : sentence;\n\n if (currentSentences.length === 0) {\n currentSentences = [sentence];\n currentStart = text.indexOf(sentence);\n } else if (combined.length <= maxChars) {\n currentSentences.push(sentence);\n } else {\n // Save current chunk\n const chunkText = currentSentences.join(' ');\n rawChunks.push({\n text: chunkText,\n start: currentStart,\n end: currentStart + chunkText.length\n });\n\n // Start new chunk\n currentSentences = [sentence];\n currentStart = text.indexOf(sentence, currentStart + 1);\n if (currentStart === -1) currentStart = 0;\n }\n }\n\n // Save last chunk\n if (currentSentences.length > 0) {\n const chunkText = currentSentences.join(' ');\n rawChunks.push({\n text: chunkText,\n start: currentStart,\n end: currentStart + chunkText.length\n });\n }\n\n // Add overlap\n const withOverlap = this.addSentenceOverlap(rawChunks, overlapChars);\n\n return withOverlap.map((chunk, index) => ({\n text: chunk.text,\n index,\n metadata: {\n source: '',\n chunkIndex: index,\n totalChunks: withOverlap.length,\n startChar: chunk.start,\n endChar: chunk.end\n }\n }));\n }\n\n private splitSentences(text: string): string[] {\n // Split on sentence-ending punctuation followed by space or end of string\n const parts = text.match(/[^.!?]*[.!?]+(?:\\s|$)|[^.!?]+$/g);\n if (!parts) return [text];\n return parts.map(s => s.trim()).filter(s => s.length > 0);\n }\n\n private addSentenceOverlap(\n chunks: Array<{ text: string; start: number; end: number }>,\n overlapChars: number\n ): Array<{ text: string; start: number; end: number }> {\n if (overlapChars === 0 || chunks.length <= 1) {\n return chunks;\n }\n\n const result = [chunks[0]];\n\n for (let i = 1; i < chunks.length; i++) {\n const prevChunk = chunks[i - 1];\n const currChunk = chunks[i];\n\n // Find last sentence from previous chunk to use as overlap\n const prevSentences = this.splitSentences(prevChunk.text);\n const lastSentence = prevSentences[prevSentences.length - 1] || '';\n\n if (lastSentence && lastSentence.length <= overlapChars) {\n result.push({\n text: lastSentence + ' ' + currChunk.text,\n start: Math.max(0, prevChunk.end - lastSentence.length),\n end: currChunk.end\n });\n } else {\n result.push(currChunk);\n }\n }\n\n return result;\n }\n}\n","// packages/core/src/client/rag-client.ts\nimport type { VectorDBAdapter } from '../adapters/vector-db-adapter';\nimport type { Embedder } from '../embedders/embedder';\nimport type { LLMClient } from '../llm/llm-client';\nimport type { DistanceMetric } from '../adapters/types';\nimport type { UniversalFilter } from '../filters/types';\nimport type { RetrievalResult } from '../query/types';\nimport type { IngestionConfig, IngestionStats } from '../ingestion/types';\nimport type { EnrichAllConfig, EnrichmentStats } from '../enrichment/types';\nimport type { RAGClientConfig, RetrieveOptions, QueryOptions, RAGResponse } from './types';\nimport { RAGQueryComposer } from '../query/rag-query-composer';\nimport { IngestionPipeline } from '../ingestion/ingestion-pipeline';\nimport { EnrichmentPipeline } from '../enrichment/enrichment-pipeline';\nimport { LoaderRegistry } from '../ingestion/loaders/loader-registry';\nimport { VerticalFields } from '../metadata/constants';\nimport { HorizontalFields } from '../metadata/constants';\n\nconst DEFAULT_TOP_K = 10;\n\nconst DEFAULT_RAG_SYSTEM_PROMPT =\n 'You are a helpful assistant. Answer the question based on the provided context. ' +\n 'If the context doesn\\'t contain enough information, say so.';\n\n/**\n * RAGClient - Unified facade for all Glyph VectorORM operations.\n *\n * Ties together adapter, embedder, LLM, ingestion, enrichment, and query\n * into a single developer-facing API.\n *\n * @example\n * ```typescript\n * const client = new RAGClient({\n * adapter: new ChromaAdapter(),\n * embedder: new OpenAIEmbedder(),\n * llm: new OpenAIClient(),\n * defaultCollection: 'my-docs'\n * });\n *\n * // Ingest documents\n * await client.ingest(['docs/*.pdf']);\n *\n * // Retrieve\n * const result = await client.retrieve('pricing info');\n *\n * // Full RAG query\n * const response = await client.query('What are the pricing terms?');\n * console.log(response.answer);\n * ```\n */\nexport class RAGClient {\n private readonly adapter: VectorDBAdapter;\n private readonly embedder: Embedder;\n private readonly llm?: LLMClient;\n private readonly defaultCollection?: string;\n private readonly defaultTopK: number;\n\n private readonly queryComposer: RAGQueryComposer;\n private readonly ingestionPipeline: IngestionPipeline;\n private readonly enrichmentPipeline: EnrichmentPipeline;\n\n constructor(config: RAGClientConfig) {\n this.adapter = config.adapter;\n this.embedder = config.embedder;\n this.llm = config.llm;\n this.defaultCollection = config.defaultCollection;\n this.defaultTopK = config.defaultTopK ?? DEFAULT_TOP_K;\n\n // Auto-construct internal pipelines\n this.queryComposer = new RAGQueryComposer(this.adapter, this.embedder);\n this.ingestionPipeline = new IngestionPipeline(\n this.adapter,\n this.embedder,\n new LoaderRegistry()\n );\n this.enrichmentPipeline = new EnrichmentPipeline(this.adapter);\n }\n\n // ==========================================================================\n // COLLECTION MANAGEMENT\n // ==========================================================================\n\n /**\n * Create a new vector collection.\n * Dimension defaults to embedder.dimensions if not specified.\n */\n async createCollection(\n name: string,\n dimension?: number,\n metric?: DistanceMetric\n ): Promise<void> {\n const dim = dimension ?? this.embedder.dimensions;\n await this.adapter.createCollection(name, dim, metric);\n }\n\n /**\n * Delete a collection.\n */\n async deleteCollection(name: string): Promise<void> {\n await this.adapter.deleteCollection(name);\n }\n\n /**\n * Check if a collection exists.\n */\n async collectionExists(name: string): Promise<boolean> {\n return this.adapter.collectionExists(name);\n }\n\n // ==========================================================================\n // INGESTION\n // ==========================================================================\n\n /**\n * Ingest documents into a collection.\n * Collection defaults to defaultCollection if not specified.\n */\n async ingest(\n sources: string | string[],\n collection?: string,\n config?: IngestionConfig\n ): Promise<IngestionStats> {\n const col = collection ?? this.defaultCollection;\n if (!col) {\n throw new Error(\n 'No collection specified. Pass a collection name or set defaultCollection in config.'\n );\n }\n return this.ingestionPipeline.ingest(sources, col, config);\n }\n\n // ==========================================================================\n // RETRIEVAL\n // ==========================================================================\n\n /**\n * Retrieve relevant chunks for a query.\n * Supports filter shorthands (partition, theme) and groupBy.\n */\n async retrieve(\n query: string,\n options?: RetrieveOptions\n ): Promise<RetrievalResult> {\n const collection = options?.collection ?? this.defaultCollection;\n if (!collection) {\n throw new Error(\n 'No collection specified. Pass a collection name or set defaultCollection in config.'\n );\n }\n\n const topK = options?.topK ?? this.defaultTopK;\n\n // Build filters from shorthands\n let verticalFilters: UniversalFilter | undefined;\n let horizontalFilters: UniversalFilter | undefined;\n const customFilters = options?.filter;\n\n if (options?.partition) {\n verticalFilters = {\n field: VerticalFields.PARTITION,\n op: 'eq' as const,\n value: options.partition\n };\n }\n\n if (options?.theme) {\n horizontalFilters = {\n field: HorizontalFields.THEME,\n op: 'eq' as const,\n value: options.theme\n };\n }\n\n const params = {\n query,\n collection,\n topK,\n verticalFilters,\n horizontalFilters,\n customFilters\n };\n\n // Handle groupBy\n if (options?.groupBy === 'document') {\n const grouped = await this.queryComposer.retrieveVertical(params);\n // Flatten grouped results back into RetrievalResult\n const records = Array.from(grouped.values()).flat();\n return { records, query, filtersApplied: { vertical: verticalFilters, horizontal: horizontalFilters, custom: customFilters } };\n }\n\n if (options?.groupBy === 'theme') {\n const grouped = await this.queryComposer.retrieveHorizontal(params);\n const records = Array.from(grouped.values()).flat();\n return { records, query, filtersApplied: { vertical: verticalFilters, horizontal: horizontalFilters, custom: customFilters } };\n }\n\n return this.queryComposer.retrieve(params);\n }\n\n // ==========================================================================\n // ENRICHMENT\n // ==========================================================================\n\n /**\n * Enrich a collection with vertical, theme, and/or section metadata.\n */\n async enrich(\n collection: string,\n config: EnrichAllConfig\n ): Promise<EnrichmentStats> {\n return this.enrichmentPipeline.enrichAll(collection, config);\n }\n\n // ==========================================================================\n // FULL RAG QUERY\n // ==========================================================================\n\n /**\n * Full RAG: retrieve relevant context and generate an answer using LLM.\n * Requires an LLM client to be provided in the constructor config.\n */\n async query(\n question: string,\n options?: QueryOptions\n ): Promise<RAGResponse> {\n if (!this.llm) {\n throw new Error(\n 'RAGClient.query() requires an LLM client. Pass one in the constructor config.'\n );\n }\n\n // 1. Retrieve relevant chunks\n const retrievalResult = await this.retrieve(question, options);\n\n // 2. Build context from chunk texts\n const context = retrievalResult.records\n .map((r) => r.text)\n .filter(Boolean)\n .join('\\n\\n');\n\n // 3. Build prompt\n const systemPrompt = options?.systemPrompt ?? DEFAULT_RAG_SYSTEM_PROMPT;\n const prompt = `${systemPrompt}\\n\\nContext:\\n${context}\\n\\nQuestion: ${question}`;\n\n // 4. Generate answer\n const answer = await this.llm.generate(prompt, {\n temperature: options?.temperature,\n maxTokens: options?.maxTokens\n });\n\n return {\n answer,\n sources: retrievalResult.records,\n query: question,\n retrievalResult\n };\n }\n}\n"],"mappings":";AAQO,IAAM,oBAAoB;AAAA,EAC/B,UAAU;AAAA,EACV,YAAY;AAAA,EACZ,YAAY;AACd;AAKO,IAAM,iBAAiB;AAAA;AAAA,EAE5B,QAAQ;AAAA;AAAA,EAGR,QAAQ;AAAA;AAAA,EAGR,WAAW;AAAA;AAAA,EAGX,UAAU;AAAA;AAAA,EAGV,MAAM;AACR;AAKO,IAAM,mBAAmB;AAAA;AAAA,EAE9B,OAAO;AAAA;AAAA,EAGP,QAAQ;AAAA;AAAA,EAGR,kBAAkB;AAAA;AAAA,EAGlB,cAAc;AAAA;AAAA,EAGd,eAAe;AAAA;AAAA,EAGf,eAAe;AACjB;AAKO,IAAM,mBAAmB;AAAA;AAAA,EAE9B,aAAa;AAAA;AAAA,EAGb,WAAW;AAAA;AAAA,EAGX,cAAc;AAAA;AAAA,EAGd,cAAc;AAChB;;;AClDO,IAAM,kBAAN,MAAsB;AAAA,EACnB,WAAgC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASzC,SAAS,QAAmC;AAC1C,eAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,MAAM,GAAG;AACjD,UAAI,UAAU,QAAW;AACvB,aAAK,SAAS,GAAG,kBAAkB,QAAQ,GAAG,GAAG,EAAE,IAAI;AAAA,MACzD;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,WAAW,QAAmC;AAC5C,eAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,MAAM,GAAG;AACjD,UAAI,UAAU,QAAW;AACvB,aAAK,SAAS,GAAG,kBAAkB,UAAU,GAAG,GAAG,EAAE,IAAI;AAAA,MAC3D;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,WAAW,QAAmC;AAC5C,eAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,MAAM,GAAG;AACjD,UAAI,UAAU,QAAW;AACvB,aAAK,SAAS,GAAG,kBAAkB,UAAU,GAAG,GAAG,EAAE,IAAI;AAAA,MAC3D;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,OAAO,QAAmC;AACxC,eAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,MAAM,GAAG;AACjD,UAAI,UAAU,QAAW;AACvB,aAAK,SAAS,GAAG,IAAI;AAAA,MACvB;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,QAA6B;AAC3B,WAAO,EAAE,GAAG,KAAK,SAAS;AAAA,EAC5B;AACF;;;ACtFA,IAAM,kBAAoC;AAAA,EACxC;AAAA,EAAM;AAAA,EAAO;AAAA,EAAM;AAAA,EAAO;AAAA,EAAM;AAAA,EAAO;AAAA,EAAM;AAAA,EAAO;AAAA,EAAY;AAClE;AAKO,IAAM,mBAAN,MAAuB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAS5B,OAAO,UAAU,OAA2D;AAE1E,QAAI,KAAK,iBAAiB,KAAK,GAAG;AAChC,aAAO;AAAA,IACT;AAGA,WAAO,KAAK,cAAc,KAAwB;AAAA,EACpD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,OAAO,SAAS,QAA+B;AAC7C,QAAI,KAAK,WAAW,MAAM,GAAG;AAC3B,YAAM,WAAW;AACjB,YAAM,aAAa,SAAS,WAAW,SAAS,MAAM,SAAS;AAE/D,UAAI,CAAC,MAAM,QAAQ,UAAU,KAAK,WAAW,WAAW,GAAG;AACzD,cAAM,IAAI,MAAM,kDAAkD;AAAA,MACpE;AAEA,iBAAW,QAAQ,OAAK,KAAK,SAAS,CAAC,CAAC;AAAA,IAC1C,OAAO;AACL,YAAM,YAAY;AAElB,UAAI,CAAC,UAAU,SAAS,OAAO,UAAU,UAAU,UAAU;AAC3D,cAAM,IAAI,MAAM,yCAAyC;AAAA,MAC3D;AAEA,UAAI,CAAC,gBAAgB,SAAS,UAAU,EAAE,GAAG;AAC3C,cAAM,IAAI,MAAM,4BAA4B,UAAU,EAAE,EAAE;AAAA,MAC5D;AAEA,UAAI,UAAU,UAAU,QAAW;AACjC,cAAM,IAAI,MAAM,0BAA0B;AAAA,MAC5C;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,WAAW,QAAkC;AAClD,WAAO,SAAS,UAAU,QAAQ;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA,EAKA,OAAe,iBAAiB,OAAqB;AACnD,QAAI,CAAC,SAAS,OAAO,UAAU,UAAU;AACvC,aAAO;AAAA,IACT;AAGA,QAAI,SAAS,SAAS,QAAQ,OAAO;AACnC,aAAO;AAAA,IACT;AAGA,QAAI,WAAW,SAAS,QAAQ,SAAS,WAAW,OAAO;AACzD,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,OAAe,cAAc,WAA6C;AACxE,UAAM,UAAU,OAAO,QAAQ,SAAS;AAExC,QAAI,QAAQ,WAAW,GAAG;AACxB,YAAM,IAAI,MAAM,8CAA8C;AAAA,IAChE;AAEA,UAAM,aAAgC,CAAC;AAEvC,eAAW,CAAC,KAAK,KAAK,KAAK,SAAS;AAElC,UAAI;AACJ,UAAI;AAEJ,UAAI,IAAI,SAAS,IAAI,KAAK,CAAC,IAAI,WAAW,IAAI,GAAG;AAE/C,cAAM,YAAY,IAAI,YAAY,IAAI;AACtC,gBAAQ,IAAI,UAAU,GAAG,SAAS;AAClC,cAAM,cAAc,IAAI,UAAU,YAAY,CAAC;AAE/C,YAAI,CAAC,gBAAgB,SAAS,WAA6B,GAAG;AAC5D,gBAAM,IAAI,MAAM,yCAAyC,WAAW,EAAE;AAAA,QACxE;AAEA,aAAK;AAAA,MACP,OAAO;AAEL,gBAAQ;AACR,aAAK;AAAA,MACP;AAEA,iBAAW,KAAK,EAAE,OAAO,IAAI,MAAM,CAAC;AAAA,IACtC;AAGA,QAAI,WAAW,WAAW,GAAG;AAC3B,aAAO,WAAW,CAAC;AAAA,IACrB;AAGA,WAAO,EAAE,KAAK,WAAW;AAAA,EAC3B;AACF;;;AC5HO,IAAe,kBAAf,MAA+B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA8LpC,yBAAkC;AAChC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,oBAA6B;AAC3B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,0BAAmC;AACjC,WAAO;AAAA,EACT;AACF;;;ACvNO,IAAM,gBAAN,MAAoB;AAAA,EACjB;AAAA,EACA;AAAA,EACA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQR,mBAAmB,QAAqD;AACtE,SAAK,iBAAiB,iBAAiB,UAAU,MAAM;AACvD,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,qBAAqB,QAAqD;AACxE,SAAK,mBAAmB,iBAAiB,UAAU,MAAM;AACzD,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,iBAAiB,QAAqD;AACpE,SAAK,eAAe,iBAAiB,UAAU,MAAM;AACrD,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,QAAqC;AACnC,UAAM,UAA6B,CAAC;AAEpC,QAAI,KAAK,gBAAgB;AACvB,cAAQ,KAAK,KAAK,cAAc;AAAA,IAClC;AAEA,QAAI,KAAK,kBAAkB;AACzB,cAAQ,KAAK,KAAK,gBAAgB;AAAA,IACpC;AAEA,QAAI,KAAK,cAAc;AACrB,cAAQ,KAAK,KAAK,YAAY;AAAA,IAChC;AAGA,QAAI,QAAQ,WAAW,GAAG;AACxB,aAAO;AAAA,IACT;AAGA,QAAI,QAAQ,WAAW,GAAG;AACxB,aAAO,QAAQ,CAAC;AAAA,IAClB;AAGA,WAAO,EAAE,KAAK,QAAQ;AAAA,EACxB;AACF;;;ACnDO,IAAM,mBAAN,MAAuB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAO5B,YACmB,SACA,UACjB;AAFiB;AACA;AAAA,EAChB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcH,MAAM,SAAS,QAAmD;AAEhE,UAAM,cAAc,MAAM,KAAK,SAAS,MAAM,OAAO,KAAK;AAG1D,UAAM,gBAAgB,IAAI,cAAc;AAExC,QAAI,OAAO,iBAAiB;AAC1B,oBAAc,mBAAmB,OAAO,eAAe;AAAA,IACzD;AAEA,QAAI,OAAO,mBAAmB;AAC5B,oBAAc,qBAAqB,OAAO,iBAAiB;AAAA,IAC7D;AAEA,QAAI,OAAO,eAAe;AACxB,oBAAc,iBAAiB,OAAO,aAAa;AAAA,IACrD;AAEA,UAAM,iBAAiB,cAAc,MAAM;AAG3C,UAAM,eAAe,MAAM,KAAK,QAAQ;AAAA,MACtC,OAAO;AAAA,MACP;AAAA,MACA;AAAA,QACE,MAAM,OAAO;AAAA,QACb,QAAQ;AAAA,QACR,iBAAiB;AAAA,QACjB,eAAe,OAAO;AAAA,MACxB;AAAA,IACF;AAGA,WAAO;AAAA,MACL,SAAS,aAAa;AAAA,MACtB,OAAO,OAAO;AAAA,MACd,gBAAgB;AAAA,QACd,GAAI,OAAO,mBAAmB,EAAE,UAAU,OAAO,gBAAgB;AAAA,QACjE,GAAI,OAAO,qBAAqB,EAAE,YAAY,OAAO,kBAAkB;AAAA,QACvE,GAAI,OAAO,iBAAiB,EAAE,QAAQ,OAAO,cAAc;AAAA,MAC7D;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,iBACJ,QACsC;AACtC,UAAM,SAAS,MAAM,KAAK,SAAS,MAAM;AAEzC,UAAM,UAAU,oBAAI,IAA4B;AAEhD,eAAW,UAAU,OAAO,SAAS;AACnC,YAAM,QAAQ,OAAO,SAAS,eAAe,MAAM;AAEnD,UAAI,OAAO,UAAU,UAAU;AAC7B,YAAI,CAAC,QAAQ,IAAI,KAAK,GAAG;AACvB,kBAAQ,IAAI,OAAO,CAAC,CAAC;AAAA,QACvB;AACA,gBAAQ,IAAI,KAAK,EAAG,KAAK,MAAM;AAAA,MACjC;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,mBACJ,QACsC;AACtC,UAAM,SAAS,MAAM,KAAK,SAAS,MAAM;AAEzC,UAAM,UAAU,oBAAI,IAA4B;AAEhD,eAAW,UAAU,OAAO,SAAS;AACnC,YAAM,QAAQ,OAAO,SAAS,iBAAiB,KAAK;AAEpD,UAAI,OAAO,UAAU,UAAU;AAC7B,YAAI,CAAC,QAAQ,IAAI,KAAK,GAAG;AACvB,kBAAQ,IAAI,OAAO,CAAC,CAAC;AAAA,QACvB;AACA,gBAAQ,IAAI,KAAK,EAAG,KAAK,MAAM;AAAA,MACjC;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AACF;;;AC9IO,IAAe,WAAf,MAAe,UAAS;AAAA;AAAA;AAAA;AAAA;AAAA,EAkCnB,cAAc;AACtB,QAAI,eAAe,WAAU;AAC3B,YAAM,IAAI,MAAM,qDAAqD;AAAA,IACvE;AAAA,EACF;AACF;;;AChCO,IAAe,YAAf,MAAe,WAAU;AAAA;AAAA;AAAA;AAAA;AAAA,EAoDpB,cAAc;AACtB,QAAI,eAAe,YAAW;AAC5B,YAAM,IAAI,MAAM,sDAAsD;AAAA,IACxE;AAAA,EACF;AACF;;;AC9EO,IAAM,UAAN,cAAsB,UAAU;AAAA,EAC7B,YAAoB;AAAA,EAE5B,cAAc;AACZ,UAAM;AAAA,EACR;AAAA,EAEA,IAAI,YAAoB;AACtB,WAAO;AAAA,EACT;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,YAAY,UAAwB;AAClC,SAAK,YAAY;AAAA,EACnB;AAAA,EAEA,MAAM,SACJ,QACA,SACiB;AAEjB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAM,aACJ,QACA,SACY;AAEZ,QAAI;AACF,aAAO,KAAK,MAAM,KAAK,SAAS;AAAA,IAClC,SAAS,OAAO;AACd,YAAM,IAAI;AAAA,QACR,0CAA0C,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,MACpG;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,cACJ,SACA,SACmB;AAEnB,WAAO,QAAQ,IAAI,MAAM,KAAK,SAAS;AAAA,EACzC;AACF;;;ACxDO,IAAM,yBAAN,MAA6B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUlC,YACU,QACR,UACQ,gBAAyB,OACjC;AAHQ;AAEA;AAER,SAAK,WAAW,oBAAI,IAAI;AACxB,SAAK,gBAAgB,oBAAI,IAAI;AAG7B,eAAW,SAAS,QAAQ;AAC1B,YAAM,gBAAgB,SAAS,KAAK,KAAK,CAAC;AAC1C,WAAK,cAAc,IAAI,OAAO,cAAc,MAAM;AAElD,YAAM,WAAW,cAAc,IAAI,CAAC,YAAY;AAC9C,cAAM,iBAAiB,KAAK,YAAY,OAAO;AAC/C,cAAM,QAAQ,gBAAgB,MAAM;AACpC,eAAO,IAAI,OAAO,MAAM,cAAc,OAAO,KAAK;AAAA,MACpD,CAAC;AAED,WAAK,SAAS,IAAI,OAAO,QAAQ;AAAA,IACnC;AAAA,EACF;AAAA,EA9BQ;AAAA,EACA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAoCR,SAAS,MAAyC;AAChD,QAAI,CAAC,QAAQ,KAAK,KAAK,EAAE,WAAW,GAAG;AACrC,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,QACZ,WAAW,CAAC;AAAA,MACd;AAAA,IACF;AAEA,UAAM,SAAiC,CAAC;AACxC,QAAI,WAAW;AACf,QAAI,eAAe;AAGnB,eAAW,SAAS,KAAK,QAAQ;AAC/B,YAAM,WAAW,KAAK,SAAS,IAAI,KAAK,KAAK,CAAC;AAC9C,UAAI,aAAa;AAEjB,iBAAW,WAAW,UAAU;AAC9B,cAAM,UAAU,KAAK,MAAM,OAAO;AAClC,YAAI,SAAS;AACX,wBAAc,QAAQ;AAAA,QACxB;AAAA,MACF;AAEA,aAAO,KAAK,IAAI;AAGhB,UAAI,aAAa,UAAU;AACzB,mBAAW;AACX,uBAAe;AAAA,MACjB;AAAA,IACF;AAGA,QAAI,aAAa,GAAG;AAClB,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,QACZ,WAAW;AAAA,MACb;AAAA,IACF;AAGA,UAAM,gBAAgB,KAAK,cAAc,IAAI,YAAY,KAAK;AAC9D,UAAM,aAAa,WAAW;AAE9B,WAAO;AAAA,MACL,OAAO;AAAA,MACP,YAAY,KAAK,IAAI,YAAY,CAAG;AAAA;AAAA,MACpC,WAAW;AAAA,IACb;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,cAAc,OAA8C;AAC1D,WAAO,MAAM,IAAI,CAAC,SAAS,KAAK,SAAS,IAAI,CAAC;AAAA,EAChD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,YAAY,KAAqB;AACvC,WAAO,IAAI,QAAQ,uBAAuB,MAAM;AAAA,EAClD;AACF;;;AC3FO,IAAM,0BAAN,MAAyD;AAAA,EACtD,QAAa;AAAA,EACb;AAAA,EACA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAoBR,YACE,QACA,YAAoB,uCACpB;AACA,SAAK,SAAS;AACd,SAAK,YAAY;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAc,oBAAkC;AAC9C,QAAI,CAAC,KAAK,OAAO;AACf,YAAM,EAAE,SAAS,IAAI,MAAM,OAAO,sBAAsB;AACxD,WAAK,QAAQ,MAAM,SAAS,4BAA4B,KAAK,SAAS;AAAA,IACxE;AACA,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiBA,MAAM,SAAS,MAA4C;AAEzD,QAAI,CAAC,QAAQ,KAAK,KAAK,EAAE,WAAW,GAAG;AACrC,YAAM,eAAe,IAAM,KAAK,OAAO;AACvC,YAAMA,aAAoC,CAAC;AAE3C,iBAAW,SAAS,KAAK,QAAQ;AAC/B,QAAAA,WAAU,KAAK,IAAI;AAAA,MACrB;AAEA,aAAO;AAAA,QACL,OAAO,KAAK,OAAO,CAAC;AAAA;AAAA,QACpB,YAAY;AAAA,QACZ,WAAAA;AAAA,MACF;AAAA,IACF;AAEA,UAAM,QAAQ,MAAM,KAAK,kBAAkB;AAG3C,UAAM,SAAS,MAAM,MAAM,MAAM,KAAK,MAAM;AAM5C,UAAM,YAAoC,CAAC;AAC3C,aAAS,IAAI,GAAG,IAAI,OAAO,OAAO,QAAQ,KAAK;AAC7C,gBAAU,OAAO,OAAO,CAAC,CAAC,IAAI,OAAO,OAAO,CAAC;AAAA,IAC/C;AAGA,WAAO;AAAA,MACL,OAAO,OAAO,OAAO,CAAC;AAAA,MACtB,YAAY,OAAO,OAAO,CAAC;AAAA,MAC3B;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAwBA,MAAM,cAAc,OAAiD;AAEnE,UAAM,KAAK,kBAAkB;AAG7B,UAAM,UAAiC,CAAC;AACxC,eAAW,QAAQ,OAAO;AACxB,YAAM,SAAS,MAAM,KAAK,SAAS,IAAI;AACvC,cAAQ,KAAK,MAAM;AAAA,IACrB;AAEA,WAAO;AAAA,EACT;AACF;;;ACnIO,IAAM,2BAAN,MAA0D;AAAA,EACvD,kBAAmD;AAAA,EACnD;AAAA,EACA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAsBR,YACE,QACA,UACA,uBACA;AACA,SAAK,SAAS;AACd,SAAK,WAAW;AAChB,SAAK,kBAAkB,yBAAyB;AAAA,EAClD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAc,wBAA2D;AACvE,QAAI,CAAC,KAAK,iBAAiB;AACzB,WAAK,kBAAkB,CAAC;AAGxB,YAAM,aAAa,MAAM,KAAK,SAAS,WAAW,KAAK,MAAM;AAE7D,eAAS,IAAI,GAAG,IAAI,KAAK,OAAO,QAAQ,KAAK;AAC3C,aAAK,gBAAgB,KAAK,OAAO,CAAC,CAAC,IAAI,WAAW,CAAC;AAAA,MACrD;AAAA,IACF;AAEA,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeQ,iBAAiB,GAAa,GAAqB;AACzD,QAAI,EAAE,WAAW,EAAE,QAAQ;AACzB,YAAM,IAAI,MAAM,yDAAyD;AAAA,IAC3E;AAEA,QAAI,aAAa;AACjB,QAAI,QAAQ;AACZ,QAAI,QAAQ;AAEZ,aAAS,IAAI,GAAG,IAAI,EAAE,QAAQ,KAAK;AACjC,oBAAc,EAAE,CAAC,IAAI,EAAE,CAAC;AACxB,eAAS,EAAE,CAAC,IAAI,EAAE,CAAC;AACnB,eAAS,EAAE,CAAC,IAAI,EAAE,CAAC;AAAA,IACrB;AAEA,YAAQ,KAAK,KAAK,KAAK;AACvB,YAAQ,KAAK,KAAK,KAAK;AAGvB,QAAI,UAAU,KAAK,UAAU,GAAG;AAC9B,aAAO;AAAA,IACT;AAEA,WAAO,cAAc,QAAQ;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUQ,sBAAsB,YAA4B;AACxD,YAAQ,aAAa,KAAK;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiBA,MAAM,SAAS,MAA4C;AAEzD,QAAI,CAAC,QAAQ,KAAK,KAAK,EAAE,WAAW,GAAG;AACrC,YAAM,eAAe,IAAM,KAAK,OAAO;AACvC,YAAMC,aAAoC,CAAC;AAE3C,iBAAW,SAAS,KAAK,QAAQ;AAC/B,QAAAA,WAAU,KAAK,IAAI;AAAA,MACrB;AAEA,aAAO;AAAA,QACL,OAAO,KAAK,OAAO,CAAC;AAAA;AAAA,QACpB,YAAY;AAAA,QACZ,WAAAA;AAAA,MACF;AAAA,IACF;AAGA,UAAM,kBAAkB,MAAM,KAAK,sBAAsB;AAGzD,UAAM,gBAAgB,MAAM,KAAK,SAAS,MAAM,IAAI;AAGpD,UAAM,eAAuC,CAAC;AAC9C,QAAI,gBAAgB;AACpB,QAAI,eAAe,KAAK,OAAO,CAAC;AAEhC,eAAW,SAAS,KAAK,QAAQ;AAC/B,YAAM,iBAAiB,gBAAgB,KAAK;AAC5C,YAAM,aAAa,KAAK,iBAAiB,eAAe,cAAc;AACtE,mBAAa,KAAK,IAAI;AAEtB,UAAI,aAAa,eAAe;AAC9B,wBAAgB;AAChB,uBAAe;AAAA,MACjB;AAAA,IACF;AAGA,UAAM,YAAoC,CAAC;AAC3C,eAAW,SAAS,KAAK,QAAQ;AAC/B,gBAAU,KAAK,IAAI,KAAK,sBAAsB,aAAa,KAAK,CAAC;AAAA,IACnE;AAEA,WAAO;AAAA,MACL,OAAO;AAAA,MACP,YAAY,KAAK,sBAAsB,aAAa;AAAA,MACpD;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAwBA,MAAM,cAAc,OAAiD;AAEnE,UAAM,KAAK,sBAAsB;AAGjC,UAAM,UAAiC,CAAC;AACxC,eAAW,QAAQ,OAAO;AACxB,YAAM,SAAS,MAAM,KAAK,SAAS,IAAI;AACvC,cAAQ,KAAK,MAAM;AAAA,IACrB;AAEA,WAAO;AAAA,EACT;AACF;;;ACrOA,IAAM,0BAA0B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAsDzB,IAAM,qBAAN,MAAoD;AAAA,EACjD;AAAA,EACA;AAAA,EACA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA2BR,YACE,QACA,KACA,iBAAyB,yBACzB;AACA,SAAK,SAAS;AACd,SAAK,MAAM;AACX,SAAK,iBAAiB;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,YAAY,MAAsB;AACxC,UAAM,YAAY,KAAK,OAAO,KAAK,IAAI;AACvC,WAAO,KAAK,eACT,QAAQ,YAAY,SAAS,EAC7B,QAAQ,UAAU,IAAI;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiBA,MAAM,SAAS,MAA4C;AAEzD,QAAI,CAAC,QAAQ,KAAK,KAAK,EAAE,WAAW,GAAG;AACrC,YAAM,eAAe,IAAM,KAAK,OAAO;AACvC,YAAM,YAAoC,CAAC;AAE3C,iBAAW,SAAS,KAAK,QAAQ;AAC/B,kBAAU,KAAK,IAAI;AAAA,MACrB;AAEA,aAAO;AAAA,QACL,OAAO,KAAK,OAAO,CAAC;AAAA;AAAA,QACpB,YAAY;AAAA,QACZ;AAAA,MACF;AAAA,IACF;AAGA,UAAM,SAAS,KAAK,YAAY,IAAI;AAEpC,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,IAAI,aAAkC,MAAM;AACtE,aAAO;AAAA,IACT,SAAS,OAAO;AAEd,YAAM,UAAU,qCAAqC,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAC7G,YAAM,sBAAsB,IAAI,MAAM,OAAO;AAG7C,UAAI,iBAAiB,OAAO;AAC1B,QAAC,oBAA4B,QAAQ;AAAA,MACvC;AAEA,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAwBA,MAAM,cAAc,OAAiD;AAEnE,UAAM,UAAiC,CAAC;AAExC,eAAW,QAAQ,OAAO;AACxB,YAAM,SAAS,MAAM,KAAK,SAAS,IAAI;AACvC,cAAQ,KAAK,MAAM;AAAA,IACrB;AAEA,WAAO;AAAA,EACT;AACF;;;AC5IO,IAAM,qBAAN,MAAyB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQ9B,YACU,SACA,UACA,KACR;AAHQ;AACA;AACA;AAAA,EACP;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAmCH,MAAM,eACJ,YACA,QAC0B;AAC1B,UAAM,YAAY,KAAK,IAAI;AAC3B,UAAM,QAAyB;AAAA,MAC7B,kBAAkB;AAAA,MAClB,gBAAgB;AAAA,MAChB,gBAAgB;AAAA,MAChB,QAAQ;AAAA,MACR,QAAQ,CAAC;AAAA,IACX;AAEA,QAAI;AAEF,UAAI,aAAa,QAAQ;AACvB,cAAM,KAAK,uBAAuB,YAAY,QAAQ,KAAK;AAAA,MAC7D,WAAW,eAAe,QAAQ;AAChC,cAAM,KAAK,oBAAoB,YAAY,QAAQ,KAAK;AAAA,MAC1D,WAAW,eAAe,QAAQ;AAChC,cAAM,KAAK,cAAc,YAAY,QAAQ,KAAK;AAAA,MACpD;AAAA,IACF,SAAS,OAAO;AACd,YAAM,QAAQ;AAAA,QACZ,mBAAmB,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,MAC7E;AAAA,IACF;AAEA,UAAM,SAAS,KAAK,IAAI,IAAI;AAC5B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAc,uBACZ,YACA,QACA,OACe;AACf,UAAM,YAAY,OAAO,aAAa;AAEtC,qBAAiB,SAAS,KAAK,QAAQ,QAAQ,YAAY;AAAA,MACzD;AAAA,MACA,QAAQ,OAAO;AAAA,IACjB,CAAC,GAAG;AACF,YAAM,UAA4B,CAAC;AAEnC,iBAAW,UAAU,OAAO;AAC1B,cAAM;AAEN,YAAI;AACF,gBAAM,WAAW,KAAK,kBAAkB,QAAQ,OAAO,OAAO;AAE9D,cAAI,UAAU;AACZ,oBAAQ,KAAK;AAAA,cACX,IAAI,OAAO;AAAA,cACX,UAAU,EAAE,SAAS;AAAA,YACvB,CAAC;AAAA,UACH,OAAO;AACL,kBAAM;AAAA,UACR;AAAA,QACF,SAAS,OAAO;AACd,gBAAM;AACN,gBAAM,QAAQ;AAAA,YACZ,wBAAwB,OAAO,EAAE,KAAK,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,UAChG;AAAA,QACF;AAAA,MACF;AAGA,UAAI,QAAQ,SAAS,GAAG;AACtB,YAAI;AACF,gBAAM,KAAK,QAAQ,eAAe,YAAY,OAAO;AACrD,gBAAM,kBAAkB,QAAQ;AAAA,QAClC,SAAS,OAAO;AACd,gBAAM,QAAQ;AAAA,YACZ,yBAAyB,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,UACnF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASQ,kBACN,QACA,SACe;AACf,UAAM,WAAW,OAAO,UAAU;AAElC,QAAI,YAAY,OAAO,aAAa,YAAY,YAAY,SAAS;AACnE,aAAO,QAAQ,QAAQ;AAAA,IACzB;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAc,oBACZ,YACA,QACA,OACe;AACf,UAAM,YAAY,OAAO,aAAa;AAEtC,qBAAiB,SAAS,KAAK,QAAQ,QAAQ,YAAY;AAAA,MACzD;AAAA,MACA,QAAQ,OAAO;AAAA,IACjB,CAAC,GAAG;AACF,YAAM,UAA4B,CAAC;AAEnC,iBAAW,UAAU,OAAO;AAC1B,cAAM;AAEN,YAAI;AACF,gBAAM,WAAW,MAAM,OAAO,UAAU,MAAM;AAE9C,cAAI,UAAU;AACZ,oBAAQ,KAAK;AAAA,cACX,IAAI,OAAO;AAAA,cACX,UAAU,EAAE,SAAS;AAAA,YACvB,CAAC;AAAA,UACH,OAAO;AACL,kBAAM;AAAA,UACR;AAAA,QACF,SAAS,OAAO;AACd,gBAAM;AACN,gBAAM,QAAQ;AAAA,YACZ,8BAA8B,OAAO,EAAE,KAAK,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,UACtG;AAAA,QACF;AAAA,MACF;AAGA,UAAI,QAAQ,SAAS,GAAG;AACtB,YAAI;AACF,gBAAM,KAAK,QAAQ,eAAe,YAAY,OAAO;AACrD,gBAAM,kBAAkB,QAAQ;AAAA,QAClC,SAAS,OAAO;AACd,gBAAM,QAAQ;AAAA,YACZ,yBAAyB,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,UACnF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAc,cACZ,YACA,QACA,OACe;AACf,UAAM,YAAY,OAAO,aAAa;AACtC,UAAM,EAAE,KAAK,QAAQ,gBAAgB,UAAU,IAAI,OAAO;AAC1D,UAAM,YAAY,aAAa;AAE/B,qBAAiB,SAAS,KAAK,QAAQ,QAAQ,YAAY;AAAA,MACzD;AAAA,MACA,QAAQ,OAAO;AAAA,IACjB,CAAC,GAAG;AACF,YAAM,UAA4B,CAAC;AAEnC,iBAAW,UAAU,OAAO;AAC1B,cAAM;AAEN,YAAI;AACF,gBAAM,WAAW,MAAM,KAAK;AAAA,YAC1B;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAEA,cAAI,UAAU;AACZ,oBAAQ,KAAK;AAAA,cACX,IAAI,OAAO;AAAA,cACX,UAAU,EAAE,SAAS;AAAA,YACvB,CAAC;AAAA,UACH,OAAO;AACL,kBAAM;AAAA,UACR;AAAA,QACF,SAAS,OAAO;AACd,gBAAM;AACN,gBAAM,QAAQ;AAAA,YACZ,mCAAmC,OAAO,EAAE,KAAK,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,UAC3G;AAAA,QACF;AAAA,MACF;AAGA,UAAI,QAAQ,SAAS,GAAG;AACtB,YAAI;AACF,gBAAM,KAAK,QAAQ,eAAe,YAAY,OAAO;AACrD,gBAAM,kBAAkB,QAAQ;AAAA,QAClC,SAAS,OAAO;AACd,gBAAM,QAAQ;AAAA,YACZ,yBAAyB,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,UACnF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAc,eACZ,QACA,KACA,QACA,WACA,gBACiB;AACjB,UAAM,OAAO,OAAO,WAAW,SAAS;AAExC,QAAI,CAAC,QAAQ,OAAO,SAAS,UAAU;AACrC,YAAM,IAAI,MAAM,2BAA2B,SAAS,GAAG;AAAA,IACzD;AAGA,UAAM,SAAS,iBACX,eACG,QAAQ,YAAY,OAAO,KAAK,IAAI,CAAC,EACrC,QAAQ,UAAU,IAAI,IACzB,6DAA6D,OAAO,KAAK,IAAI,CAAC;AAAA;AAAA,QAAa,IAAI;AAAA;AAAA;AAGnG,UAAM,SAAS,MAAM,IAAI,SAAS,MAAM;AAExC,WAAO,OAAO,KAAK;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA+BA,MAAM,aACJ,YACA,QAC0B;AAC1B,UAAM,YAAY,KAAK,IAAI;AAC3B,UAAM,QAAyB;AAAA,MAC7B,kBAAkB;AAAA,MAClB,gBAAgB;AAAA,MAChB,gBAAgB;AAAA,MAChB,QAAQ;AAAA,MACR,QAAQ,CAAC;AAAA,IACX;AAEA,QAAI;AACF,YAAM,KAAK,0BAA0B,YAAY,QAAQ,KAAK;AAAA,IAChE,SAAS,OAAO;AACd,YAAM,QAAQ;AAAA,QACZ,mBAAmB,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,MAC7E;AAAA,IACF;AAEA,UAAM,SAAS,KAAK,IAAI,IAAI;AAC5B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAc,0BACZ,YACA,QACA,OACe;AACf,UAAM,YAAY,OAAO,aAAa;AACtC,UAAM,YAAY,OAAO,aAAa;AACtC,UAAM,sBAAsB,OAAO,uBAAuB;AAC1D,UAAM,aAAa,OAAO,cAAc;AAExC,qBAAiB,SAAS,KAAK,QAAQ,QAAQ,YAAY;AAAA,MACzD;AAAA,MACA,QAAQ,OAAO;AAAA,IACjB,CAAC,GAAG;AAEF,YAAM,kBAA4B,CAAC;AACnC,YAAM,mBAAmC,CAAC;AAE1C,iBAAW,UAAU,OAAO;AAC1B,cAAM;AAGN,cAAM,OAAO,OAAO,QAAQ,OAAO,WAAW,SAAS;AAEvD,YAAI,CAAC,QAAQ,OAAO,SAAS,YAAY,KAAK,KAAK,MAAM,IAAI;AAC3D,gBAAM;AACN;AAAA,QACF;AAEA,wBAAgB,KAAK,IAAI;AACzB,yBAAiB,KAAK,MAAM;AAAA,MAC9B;AAGA,UAAI,gBAAgB,WAAW,GAAG;AAChC;AAAA,MACF;AAGA,UAAI;AACJ,UAAI;AACF,0BAAkB,MAAM,OAAO,WAAW,cAAc,eAAe;AAAA,MACzE,SAAS,OAAO;AAEd,cAAM,QAAQ;AAAA,UACZ,0EAA0E,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,QACpI;AAEA,0BAAkB,CAAC;AACnB,iBAAS,IAAI,GAAG,IAAI,gBAAgB,QAAQ,KAAK;AAC/C,cAAI;AACF,kBAAM,SAAS,MAAM,OAAO,WAAW,SAAS,gBAAgB,CAAC,CAAC;AAClE,4BAAgB,KAAK,MAAM;AAAA,UAC7B,SAAS,iBAAiB;AAExB,4BAAgB,KAAK,IAAI;AACzB,kBAAM,QAAQ;AAAA,cACZ,mCAAmC,iBAAiB,CAAC,EAAE,EAAE,KAAK,2BAA2B,QAAQ,gBAAgB,UAAU,eAAe;AAAA,YAC5I;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAGA,YAAM,UAA4B,CAAC;AAEnC,eAAS,IAAI,GAAG,IAAI,iBAAiB,QAAQ,KAAK;AAChD,cAAM,SAAS,iBAAiB,CAAC;AACjC,cAAM,iBAAiB,gBAAgB,CAAC;AAExC,YAAI;AAEF,cAAI,CAAC,kBAAkB,OAAO,mBAAmB,UAAU;AACzD,kBAAM;AACN,kBAAM,QAAQ;AAAA,cACZ,qCAAqC,OAAO,EAAE;AAAA,YAChD;AACA;AAAA,UACF;AAGA,cAAI,eAAe,aAAa,qBAAqB;AACnD,kBAAM;AACN;AAAA,UACF;AAGA,gBAAM,WAAgC;AAAA,YACpC,WAAW,eAAe;AAAA,YAC1B,sBAAsB,eAAe;AAAA,UACvC;AAGA,cAAI,cAAc,eAAe,WAAW;AAC1C,kBAAM,SAAS,OAAO,QAAQ,eAAe,SAAS,EACnD,OAAO,CAAC,CAAC,GAAG,KAAK,MAAO,SAAoB,mBAAmB,EAC/D,KAAK,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC,MAAO,IAAgB,CAAY,EACvD,IAAI,CAAC,CAAC,OAAO,CAAC,MAAM,KAAK;AAE5B,gBAAI,OAAO,SAAS,GAAG;AACrB,uBAAS,aAAa;AAAA,YACxB;AAAA,UACF;AAEA,kBAAQ,KAAK;AAAA,YACX,IAAI,OAAO;AAAA,YACX;AAAA,UACF,CAAC;AAAA,QACH,SAAS,OAAO;AACd,gBAAM;AACN,gBAAM,QAAQ;AAAA,YACZ,2BAA2B,OAAO,EAAE,KAAK,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,UACnG;AAAA,QACF;AAAA,MACF;AAGA,UAAI,QAAQ,SAAS,GAAG;AACtB,YAAI;AACF,gBAAM,KAAK,QAAQ,eAAe,YAAY,OAAO;AACrD,gBAAM,kBAAkB,QAAQ;AAAA,QAClC,SAAS,OAAO;AACd,gBAAM,QAAQ;AAAA,YACZ,yBAAyB,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,UACnF;AAAA,QACF;AAAA,MACF;AAGA,UAAI,OAAO,YAAY;AACrB,eAAO,WAAW,KAAK;AAAA,MACzB;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAyBA,MAAM,eACJ,YACA,QAC0B;AAC1B,UAAM,YAAY,KAAK,IAAI;AAC3B,UAAM,QAAyB;AAAA,MAC7B,kBAAkB;AAAA,MAClB,gBAAgB;AAAA,MAChB,gBAAgB;AAAA,MAChB,QAAQ;AAAA,MACR,QAAQ,CAAC;AAAA,IACX;AAEA,QAAI;AACF,YAAM,KAAK,2BAA2B,YAAY,QAAQ,KAAK;AAAA,IACjE,SAAS,OAAO;AACd,YAAM,QAAQ;AAAA,QACZ,mBAAmB,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,MAC7E;AAAA,IACF;AAEA,UAAM,SAAS,KAAK,IAAI,IAAI;AAC5B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAuBA,MAAM,UACJ,YACA,QAC0B;AAC1B,UAAM,YAAY,KAAK,IAAI;AAC3B,UAAM,iBAAkC;AAAA,MACtC,kBAAkB;AAAA,MAClB,gBAAgB;AAAA,MAChB,gBAAgB;AAAA,MAChB,QAAQ;AAAA,MACR,QAAQ,CAAC;AAAA,IACX;AAEA,QAAI;AAEF,UAAI,OAAO,UAAU;AACnB,cAAM,iBAAiB,KAAK,kBAAkB,OAAO,UAAU,MAAM;AACrE,cAAM,QAAQ,MAAM,KAAK,eAAe,YAAY,cAAc;AAClE,aAAK,WAAW,gBAAgB,KAAK;AAGrC,YAAI,OAAO,YAAY;AACrB,iBAAO,WAAW,cAAc;AAAA,QAClC;AAAA,MACF;AAGA,UAAI,OAAO,QAAQ;AACjB,cAAM,eAAe,KAAK,kBAAkB,OAAO,QAAQ,MAAM;AACjE,cAAM,QAAQ,MAAM,KAAK,aAAa,YAAY,YAAY;AAC9D,aAAK,WAAW,gBAAgB,KAAK;AAGrC,YAAI,OAAO,YAAY;AACrB,iBAAO,WAAW,cAAc;AAAA,QAClC;AAAA,MACF;AAGA,UAAI,OAAO,UAAU;AACnB,cAAM,iBAAiB,KAAK,kBAAkB,OAAO,UAAU,MAAM;AACrE,cAAM,QAAQ,MAAM,KAAK,eAAe,YAAY,cAAc;AAClE,aAAK,WAAW,gBAAgB,KAAK;AAGrC,YAAI,OAAO,YAAY;AACrB,iBAAO,WAAW,cAAc;AAAA,QAClC;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,qBAAe,QAAQ;AAAA,QACrB,mBAAmB,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,MAC7E;AAAA,IACF;AAEA,mBAAe,SAAS,KAAK,IAAI,IAAI;AACrC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASQ,kBACN,kBACA,cACG;AACH,UAAM,SAAS,EAAE,GAAG,iBAAiB;AAGrC,QAAI,aAAa,UAAU,CAAC,OAAO,QAAQ;AACzC,aAAO,SAAS,aAAa;AAAA,IAC/B;AAGA,QAAI,aAAa,aAAa,CAAC,OAAO,WAAW;AAC/C,aAAO,YAAY,aAAa;AAAA,IAClC;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,WAAW,WAA4B,OAA8B;AAC3E,cAAU,oBAAoB,MAAM;AACpC,cAAU,kBAAkB,MAAM;AAClC,cAAU,kBAAkB,MAAM;AAGlC,QAAI,MAAM,UAAU,MAAM,OAAO,SAAS,GAAG;AAC3C,UAAI,CAAC,UAAU,QAAQ;AACrB,kBAAU,SAAS,CAAC;AAAA,MACtB;AACA,gBAAU,OAAO,KAAK,GAAG,MAAM,MAAM;AAAA,IACvC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAc,2BACZ,YACA,QACA,OACe;AACf,UAAM,YAAY,OAAO,aAAa;AAEtC,qBAAiB,SAAS,KAAK,QAAQ,QAAQ,YAAY;AAAA,MACzD;AAAA,MACA,QAAQ,OAAO;AAAA,IACjB,CAAC,GAAG;AACF,YAAM,UAA4B,CAAC;AAEnC,iBAAW,UAAU,OAAO;AAC1B,cAAM;AAEN,YAAI;AACF,cAAI,kBAIO;AAGX,cAAI,OAAO,eAAe;AACxB,8BAAkB,KAAK;AAAA,cACrB,OAAO,WAAW,OAAO,aAAa;AAAA,YACxC;AAAA,UACF,WAES,OAAO,YAAY;AAC1B,kBAAM,OAAO,OAAO,QAAQ,OAAO,UAAU,WAAW;AACxD,gBAAI,OAAO,SAAS,UAAU;AAC5B,gCAAkB,KAAK,eAAe,IAAI;AAAA,YAC5C;AAAA,UACF;AAEA,cAAI,iBAAiB;AACnB,kBAAM,WAAgC;AAAA,cACpC,mBAAmB,gBAAgB;AAAA,cACnC,mBAAmB,gBAAgB;AAAA,YACrC;AAEA,gBAAI,gBAAgB,MAAM;AACxB,uBAAS,mBAAmB,gBAAgB;AAAA,YAC9C;AAEA,oBAAQ,KAAK;AAAA,cACX,IAAI,OAAO;AAAA,cACX;AAAA,YACF,CAAC;AAAA,UACH,OAAO;AACL,kBAAM;AAAA,UACR;AAAA,QACF,SAAS,OAAO;AACd,gBAAM;AACN,gBAAM,QAAQ;AAAA,YACZ,2BAA2B,OAAO,EAAE,KAAK,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,UACnG;AAAA,QACF;AAAA,MACF;AAGA,UAAI,QAAQ,SAAS,GAAG;AACtB,YAAI;AACF,gBAAM,KAAK,QAAQ,eAAe,YAAY,OAAO;AACrD,gBAAM,kBAAkB,QAAQ;AAAA,QAClC,SAAS,OAAO;AACd,gBAAM,QAAQ;AAAA,YACZ,yBAAyB,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,UACnF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,uBACN,aACuD;AACvD,QAAI,CAAC,eAAe,OAAO,gBAAgB,UAAU;AACnD,aAAO;AAAA,IACT;AAEA,UAAM,QAAQ,YAAY,MAAM,GAAG,EAAE,OAAO,OAAK,EAAE,KAAK,MAAM,EAAE;AAChE,QAAI,MAAM,WAAW,GAAG;AACtB,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,MACL,MAAM;AAAA,MACN,OAAO,MAAM;AAAA,MACb,OAAO,MAAM,MAAM,SAAS,CAAC;AAAA,IAC/B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,eACN,MACyC;AAEzC,UAAM,WAAW,KAAK,uBAAuB,IAAI;AACjD,QAAI,SAAU,QAAO;AAGrB,UAAM,OAAO,KAAK,mBAAmB,IAAI;AACzC,QAAI,KAAM,QAAO;AAGjB,UAAM,UAAU,KAAK,sBAAsB,IAAI;AAC/C,QAAI,QAAS,QAAO;AAGpB,WAAO,EAAE,OAAO,GAAG,OAAO,cAAc;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,uBACN,MACyC;AACzC,UAAM,QAAQ,KAAK,MAAM,oBAAoB;AAC7C,QAAI,OAAO;AACT,YAAM,QAAQ,MAAM,CAAC,EAAE;AACvB,YAAM,QAAQ,MAAM,CAAC,EAAE,KAAK;AAC5B,aAAO,EAAE,OAAO,MAAM;AAAA,IACxB;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,mBACN,MACyC;AACzC,UAAM,QAAQ,KAAK,MAAM,4BAA4B;AACrD,QAAI,OAAO;AACT,YAAM,QAAQ,SAAS,MAAM,CAAC,GAAG,EAAE;AACnC,YAAM,QAAQ,MAAM,CAAC,EAAE,KAAK;AAC5B,aAAO,EAAE,OAAO,MAAM;AAAA,IACxB;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,sBACN,MACyC;AACzC,UAAM,QAAQ,KAAK,MAAM,oBAAoB;AAC7C,QAAI,OAAO;AACT,YAAM,QAAQ,MAAM,CAAC,EAAE,KAAK;AAC5B,aAAO,EAAE,OAAO,GAAG,MAAM;AAAA,IAC3B;AACA,WAAO;AAAA,EACT;AACF;;;ACp5BO,IAAM,qBAAqB;AAK3B,IAAM,wBAAwB;AAM9B,SAAS,eAAe,MAAsB;AACnD,SAAO,KAAK,KAAK,KAAK,SAAS,CAAC;AAClC;AAKO,SAAS,cAAc,QAAwB;AACpD,SAAO,SAAS;AAClB;;;AC3BO,IAAM,mBAAN,MAA8C;AAAA,EAClC,aAAa;AAAA,IAC5B;AAAA;AAAA,IACA;AAAA;AAAA,IACA;AAAA;AAAA,IACA;AAAA;AAAA,IACA;AAAA;AAAA,EACF;AAAA,EAEA,MAAM,MAAc,QAAmC;AACrD,QAAI,CAAC,KAAM,QAAO,CAAC;AAEnB,UAAM,YAAY,QAAQ,aAAa;AACvC,UAAM,eAAe,QAAQ,gBAAgB;AAE7C,UAAM,WAAW,cAAc,SAAS;AACxC,UAAM,eAAe,cAAc,YAAY;AAE/C,QAAI,KAAK,UAAU,UAAU;AAC3B,aAAO,CAAC;AAAA,QACN;AAAA,QACA,OAAO;AAAA,QACP,UAAU;AAAA,UACR,QAAQ;AAAA,UACR,YAAY;AAAA,UACZ,aAAa;AAAA,UACb,WAAW;AAAA,UACX,SAAS,KAAK;AAAA,QAChB;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,SAAS,KAAK,eAAe,MAAM,UAAU,CAAC;AACpD,UAAM,SAAS,KAAK,WAAW,QAAQ,YAAY;AAEnD,WAAO,OAAO,IAAI,CAAC,OAAO,WAAW;AAAA,MACnC,MAAM,MAAM;AAAA,MACZ;AAAA,MACA,UAAU;AAAA,QACR,QAAQ;AAAA;AAAA,QACR,YAAY;AAAA,QACZ,aAAa,OAAO;AAAA,QACpB,WAAW,MAAM;AAAA,QACjB,SAAS,MAAM;AAAA,MACjB;AAAA,IACF,EAAE;AAAA,EACJ;AAAA,EAEQ,eACN,MACA,UACA,gBACqD;AACrD,QAAI,KAAK,UAAU,UAAU;AAC3B,aAAO,CAAC,EAAE,MAAM,OAAO,GAAG,KAAK,KAAK,OAAO,CAAC;AAAA,IAC9C;AAEA,QAAI,kBAAkB,KAAK,WAAW,QAAQ;AAE5C,YAAMC,UAA8D,CAAC;AACrE,eAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK,UAAU;AAC9C,QAAAA,QAAO,KAAK;AAAA,UACV,MAAM,KAAK,MAAM,GAAG,IAAI,QAAQ;AAAA,UAChC,OAAO;AAAA,UACP,KAAK,KAAK,IAAI,IAAI,UAAU,KAAK,MAAM;AAAA,QACzC,CAAC;AAAA,MACH;AACA,aAAOA;AAAA,IACT;AAEA,UAAM,YAAY,KAAK,WAAW,cAAc;AAChD,UAAM,QAAQ,YAAY,KAAK,MAAM,SAAS,IAAI,CAAC,IAAI;AAEvD,QAAI,MAAM,UAAU,GAAG;AAErB,aAAO,KAAK,eAAe,MAAM,UAAU,iBAAiB,CAAC;AAAA,IAC/D;AAGA,UAAM,SAA8D,CAAC;AACrE,QAAI,eAAyB,CAAC;AAC9B,QAAI,eAAe;AACnB,QAAI,gBAAgB;AAEpB,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,YAAM,OAAO,MAAM,CAAC;AACpB,YAAM,WAAW,aAAa,SAAS,IACnC,CAAC,GAAG,cAAc,IAAI,EAAE,KAAK,SAAS,IACtC;AAEJ,UAAI,SAAS,UAAU,UAAU;AAC/B,YAAI,aAAa,WAAW,GAAG;AAC7B,yBAAe;AAAA,QACjB;AACA,qBAAa,KAAK,IAAI;AAAA,MACxB,OAAO;AAEL,YAAI,aAAa,SAAS,GAAG;AAC3B,gBAAM,YAAY,aAAa,KAAK,SAAS;AAC7C,iBAAO,KAAK;AAAA,YACV,MAAM;AAAA,YACN,OAAO;AAAA,YACP,KAAK,eAAe,UAAU;AAAA,UAChC,CAAC;AAAA,QACH;AAEA,uBAAe;AAEf,YAAI,KAAK,SAAS,UAAU;AAC1B,gBAAM,YAAY,KAAK,eAAe,MAAM,UAAU,iBAAiB,CAAC;AACxE,qBAAW,OAAO,WAAW;AAC3B,mBAAO,KAAK;AAAA,cACV,MAAM,IAAI;AAAA,cACV,OAAO,eAAe,IAAI;AAAA,cAC1B,KAAK,eAAe,IAAI;AAAA,YAC1B,CAAC;AAAA,UACH;AACA,yBAAe,CAAC;AAAA,QAClB,OAAO;AACL,yBAAe,CAAC,IAAI;AAAA,QACtB;AAAA,MACF;AACA,uBAAiB,KAAK,UAAU,IAAI,MAAM,SAAS,IAAI,UAAU,SAAS;AAAA,IAC5E;AAGA,QAAI,aAAa,SAAS,GAAG;AAC3B,YAAM,YAAY,aAAa,KAAK,SAAS;AAC7C,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,OAAO;AAAA,QACP,KAAK,eAAe,UAAU;AAAA,MAChC,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,WACN,QACA,cACqD;AACrD,QAAI,iBAAiB,KAAK,OAAO,UAAU,GAAG;AAC5C,aAAO;AAAA,IACT;AAEA,UAAM,SAAS,CAAC,OAAO,CAAC,CAAC;AAEzB,aAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,YAAM,YAAY,OAAO,IAAI,CAAC;AAC9B,YAAM,YAAY,OAAO,CAAC;AAG1B,YAAM,cAAc,UAAU,KAAK,MAAM,CAAC,YAAY;AAEtD,aAAO,KAAK;AAAA,QACV,MAAM,cAAc,UAAU;AAAA,QAC9B,OAAO,KAAK,IAAI,GAAG,UAAU,MAAM,YAAY;AAAA,QAC/C,KAAK,UAAU;AAAA,MACjB,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AACF;;;ACxKA,YAAY,UAAU;AAMf,IAAM,oBAAN,MAAwB;AAAA,EAG7B,YACU,SACA,UACA,gBACR,SACA;AAJQ;AACA;AACA;AAGR,SAAK,iBAAiB,WAAW,IAAI,iBAAiB;AAAA,EACxD;AAAA,EATQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAkBR,MAAM,OACJ,SACA,YACA,QACyB;AACzB,UAAM,YAAY,KAAK,IAAI;AAC3B,UAAM,cAAc,MAAM,QAAQ,OAAO,IAAI,UAAU,CAAC,OAAO;AAE/D,UAAM,QAAwB;AAAA,MAC5B,oBAAoB;AAAA,MACpB,oBAAoB;AAAA,MACpB,iBAAiB;AAAA,MACjB,eAAe;AAAA,MACf,gBAAgB;AAAA,MAChB,QAAQ;AAAA,MACR,QAAQ,CAAC;AAAA,IACX;AAEA,UAAM,iBAAiB,YAAY;AAEnC,eAAW,UAAU,aAAa;AAEhC,cAAQ,aAAa;AAAA,QACnB,OAAO;AAAA,QACP,oBAAoB,MAAM;AAAA,QAC1B;AAAA,QACA,iBAAiB,MAAM;AAAA,QACvB,iBAAiB;AAAA,MACnB,CAAC;AAED,UAAI;AACF,cAAM,KAAK,WAAW,QAAQ,YAAY,QAAQ,OAAO,cAAc;AACvE,cAAM;AAAA,MACR,SAAS,OAAO;AACd,cAAM;AACN,cAAM,OAAQ,KAAK;AAAA,UACjB;AAAA,UACA,OAAO;AAAA,UACP;AAAA,QACF,CAAC;AAAA,MACH;AACA,YAAM;AAAA,IACR;AAEA,UAAM,SAAS,KAAK,IAAI,IAAI;AAC5B,WAAO;AAAA,EACT;AAAA,EAEA,MAAc,WACZ,UACA,YACA,QACA,OACA,gBACe;AAEf,UAAM,MAAM,MAAM,KAAK,eAAe,KAAK,QAAQ;AACnD,YAAQ,mBAAmB,GAAG;AAG9B,YAAQ,aAAa;AAAA,MACnB,OAAO;AAAA,MACP,oBAAoB,MAAM;AAAA,MAC1B;AAAA,MACA,iBAAiB,MAAM;AAAA,MACvB,iBAAiB;AAAA,IACnB,CAAC;AAGD,UAAM,UAAU,QAAQ,WAAW,KAAK;AACxC,UAAM,SAAS,QAAQ,MAAM,IAAI,MAAM;AAAA,MACrC,WAAW,QAAQ;AAAA,MACnB,cAAc,QAAQ;AAAA,IACxB,CAAC;AAGD,eAAW,SAAS,QAAQ;AAC1B,YAAM,SAAS,SAAS,IAAI;AAAA,IAC9B;AAEA,UAAM,iBAAiB,OAAO;AAC9B,YAAQ,kBAAkB,MAAM;AAGhC,YAAQ,aAAa;AAAA,MACnB,OAAO;AAAA,MACP,oBAAoB,MAAM;AAAA,MAC1B;AAAA,MACA,iBAAiB,MAAM;AAAA,MACvB,aAAa,MAAM;AAAA,MACnB,iBAAiB;AAAA,IACnB,CAAC;AAGD,UAAM,QAAQ,OAAO,IAAI,CAAC,MAAiB,EAAE,IAAI;AACjD,UAAM,aAAa,MAAM,KAAK,SAAS,WAAW,KAAK;AAGvD,UAAM,UAA0B,OAAO,IAAI,CAAC,OAAkB,MAAc;AAC1E,YAAM,WAAW,KAAK,cAAc,KAAK,OAAO,MAAM;AAEtD,aAAO;AAAA,QACL,IAAI,GAAQ,cAAS,IAAI,MAAM,CAAC,IAAI,MAAM,KAAK;AAAA,QAC/C,WAAW,WAAW,CAAC;AAAA,QACvB,MAAM,MAAM;AAAA,QACZ;AAAA,MACF;AAAA,IACF,CAAC;AAGD,YAAQ,aAAa;AAAA,MACnB,OAAO;AAAA,MACP,oBAAoB,MAAM;AAAA,MAC1B;AAAA,MACA,iBAAiB,MAAM;AAAA,MACvB,aAAa,MAAM;AAAA,MACnB,iBAAiB;AAAA,IACnB,CAAC;AAGD,UAAM,YAAY,QAAQ,aAAa;AACvC,aAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK,WAAW;AAClD,YAAM,QAAQ,QAAQ,MAAM,GAAG,IAAI,SAAS;AAC5C,YAAM,KAAK,QAAQ,OAAO,YAAY,KAAK;AAC3C,YAAM,kBAAkB,MAAM;AAAA,IAChC;AAAA,EACF;AAAA,EAEQ,cACN,KACA,OACA,QACqB;AAErB,UAAMC,YAAgB,cAAS,IAAI,QAAa,aAAQ,IAAI,MAAM,CAAC;AACnE,UAAMC,WAAe,aAAQ,IAAI,MAAM;AAEvC,UAAM,eAAoC;AAAA,MACxC,CAAC,eAAe,MAAM,GAAG,IAAI;AAAA,MAC7B,CAAC,eAAe,QAAQ,GAAG,IAAI;AAAA,MAC/B,CAAC,eAAe,MAAM,GAAGD;AAAA,MACzB,CAAC,eAAe,SAAS,GAAGC;AAAA,IAC9B;AAGA,UAAM,oBAAoB,QAAQ,oBAAoB,GAAG,KAAK,CAAC;AAG/D,UAAM,eAAe,QAAQ,YAAY,CAAC;AAG1C,UAAM,gBAAgB;AAAA,MACpB,YAAY,MAAM,SAAS;AAAA,MAC3B,aAAa,MAAM,SAAS;AAAA,MAC5B,WAAW,MAAM,SAAS;AAAA,MAC1B,SAAS,MAAM,SAAS;AAAA,IAC1B;AAGA,WAAO;AAAA,MACL,GAAG;AAAA,MACH,GAAG;AAAA,MACH,GAAG;AAAA,MACH,GAAG;AAAA,IACL;AAAA,EACF;AACF;;;ACvMA,YAAY,QAAQ;AACpB,YAAYC,WAAU;AAQf,IAAM,aAAN,MAA2C;AAAA,EAChD,UAAU,UAA2B;AACnC,WAAO,eAAe,KAAK,QAAQ;AAAA,EACrC;AAAA,EAEA,MAAM,KAAK,UAAqC;AAC9C,UAAM,OAAO,MAAS,YAAS,UAAU,OAAO;AAChD,UAAM,OAAY,cAAQ,QAAQ,EAAE,MAAM,CAAC,EAAE,YAAY;AACzD,UAAM,QAAQ,MAAS,QAAK,QAAQ;AACpC,UAAM,YAAiB,cAAQ,QAAQ;AAEvC,WAAO;AAAA,MACL;AAAA,MACA,QAAQ;AAAA,MACR;AAAA,MACA,UAAU;AAAA,QACR,MAAM,MAAM;AAAA,QACZ;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;;;AC9BA,YAAYC,SAAQ;AACpB,OAAO,cAAc;AAQd,IAAM,YAAN,MAA0C;AAAA,EAC/C,UAAU,UAA2B;AACnC,WAAO,UAAU,KAAK,QAAQ;AAAA,EAChC;AAAA,EAEA,MAAM,KAAK,UAAqC;AAC9C,UAAM,aAAa,MAAS,aAAS,QAAQ;AAC7C,UAAM,UAAU,MAAM,SAAS,UAAU;AAEzC,WAAO;AAAA,MACL,MAAM,QAAQ;AAAA,MACd,QAAQ;AAAA,MACR,MAAM;AAAA,MACN,UAAU;AAAA,QACR,OAAO,QAAQ;AAAA,QACf,MAAM,QAAQ;AAAA,MAChB;AAAA,IACF;AAAA,EACF;AACF;;;AC5BA,OAAO,aAAa;AAQb,IAAM,aAAN,MAA2C;AAAA,EAChD,UAAU,UAA2B;AACnC,WAAO,WAAW,KAAK,QAAQ;AAAA,EACjC;AAAA,EAEA,MAAM,KAAK,UAAqC;AAC9C,UAAM,SAAS,MAAM,QAAQ,eAAe,EAAE,MAAM,SAAS,CAAC;AAE9D,WAAO;AAAA,MACL,MAAM,OAAO;AAAA,MACb,QAAQ;AAAA,MACR,MAAM;AAAA,MACN,UAAU;AAAA,QACR,UAAU,OAAO;AAAA;AAAA,MACnB;AAAA,IACF;AAAA,EACF;AACF;;;ACzBA,YAAYC,SAAQ;AACpB,YAAY,aAAa;AAQlB,IAAM,aAAN,MAA2C;AAAA,EAChD,UAAU,UAA2B;AACnC,WAAO,YAAY,KAAK,QAAQ;AAAA,EAClC;AAAA,EAEA,MAAM,KAAK,UAAqC;AAC9C,UAAM,OAAO,MAAS,aAAS,UAAU,OAAO;AAChD,UAAM,IAAY,aAAK,IAAI;AAG3B,MAAE,4BAA4B,EAAE,OAAO;AAGvC,UAAM,OAAO,EAAE,MAAM,EAAE,KAAK,EACzB,QAAQ,QAAQ,GAAG,EACnB,KAAK;AAER,WAAO;AAAA,MACL;AAAA,MACA,QAAQ;AAAA,MACR,MAAM;AAAA,MACN,UAAU;AAAA,QACR,OAAO,EAAE,OAAO,EAAE,KAAK,KAAK;AAAA,QAC5B,aAAa,EAAE,0BAA0B,EAAE,KAAK,SAAS,KAAK;AAAA,MAChE;AAAA,IACF;AAAA,EACF;AACF;;;ACzBO,IAAM,iBAAN,MAAqB;AAAA,EAClB,UAA4B,CAAC;AAAA,EAErC,cAAc;AAEZ,SAAK,SAAS,IAAI,WAAW,CAAC;AAC9B,SAAK,SAAS,IAAI,UAAU,CAAC;AAC7B,SAAK,SAAS,IAAI,WAAW,CAAC;AAC9B,SAAK,SAAS,IAAI,WAAW,CAAC;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,SAAS,QAA8B;AACrC,SAAK,QAAQ,KAAK,MAAM;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,QAAQ,UAA2B;AACjC,WAAO,KAAK,QAAQ,KAAK,OAAK,EAAE,UAAU,QAAQ,CAAC;AAAA,EACrD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,KAAK,UAAqC;AAC9C,UAAM,SAAS,KAAK,QAAQ,KAAK,OAAK,EAAE,UAAU,QAAQ,CAAC;AAC3D,QAAI,CAAC,QAAQ;AACX,YAAM,IAAI,MAAM,6BAA6B,QAAQ,EAAE;AAAA,IACzD;AACA,WAAO,OAAO,KAAK,QAAQ;AAAA,EAC7B;AACF;;;ACxCO,IAAM,eAAN,MAA0C;AAAA,EAC/C,MAAM,MAAc,QAAmC;AACrD,QAAI,CAAC,KAAM,QAAO,CAAC;AAEnB,UAAM,YAAY,QAAQ,aAAa;AACvC,UAAM,eAAe,QAAQ,gBAAgB;AAE7C,UAAM,WAAW,cAAc,SAAS;AACxC,UAAM,eAAe,cAAc,YAAY;AAC/C,UAAM,OAAO,WAAW;AAExB,QAAI,KAAK,UAAU,UAAU;AAC3B,aAAO,CAAC;AAAA,QACN;AAAA,QACA,OAAO;AAAA,QACP,UAAU;AAAA,UACR,QAAQ;AAAA,UACR,YAAY;AAAA,UACZ,aAAa;AAAA,UACb,WAAW;AAAA,UACX,SAAS,KAAK;AAAA,QAChB;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,SAAsB,CAAC;AAC7B,QAAI,WAAW;AAEf,WAAO,WAAW,KAAK,QAAQ;AAC7B,YAAM,MAAM,KAAK,IAAI,KAAK,QAAQ,WAAW,QAAQ;AACrD,YAAM,YAAY,KAAK,MAAM,UAAU,GAAG;AAE1C,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,OAAO,OAAO;AAAA,QACd,UAAU;AAAA,UACR,QAAQ;AAAA,UACR,YAAY,OAAO;AAAA,UACnB,aAAa;AAAA;AAAA,UACb,WAAW;AAAA,UACX,SAAS;AAAA,QACX;AAAA,MACF,CAAC;AAED,kBAAY;AAEZ,UAAI,QAAQ,EAAG;AAAA,IACjB;AAGA,eAAW,SAAS,QAAQ;AAC1B,YAAM,SAAS,cAAc,OAAO;AAAA,IACtC;AAEA,WAAO;AAAA,EACT;AACF;;;ACxDO,IAAM,kBAAN,MAA6C;AAAA,EAClD,MAAM,MAAc,QAAmC;AACrD,QAAI,CAAC,KAAM,QAAO,CAAC;AAEnB,UAAM,YAAY,QAAQ,aAAa;AACvC,UAAM,eAAe,QAAQ,gBAAgB;AAE7C,UAAM,WAAW,cAAc,SAAS;AACxC,UAAM,eAAe,cAAc,YAAY;AAG/C,UAAM,YAAY,KAAK,eAAe,IAAI;AAE1C,QAAI,UAAU,WAAW,GAAG;AAC1B,aAAO,CAAC;AAAA,QACN;AAAA,QACA,OAAO;AAAA,QACP,UAAU;AAAA,UACR,QAAQ;AAAA,UACR,YAAY;AAAA,UACZ,aAAa;AAAA,UACb,WAAW;AAAA,UACX,SAAS,KAAK;AAAA,QAChB;AAAA,MACF,CAAC;AAAA,IACH;AAGA,UAAM,YAAiE,CAAC;AACxE,QAAI,mBAA6B,CAAC;AAClC,QAAI,eAAe;AAEnB,eAAW,YAAY,WAAW;AAChC,YAAM,WAAW,iBAAiB,SAAS,IACvC,CAAC,GAAG,kBAAkB,QAAQ,EAAE,KAAK,GAAG,IACxC;AAEJ,UAAI,iBAAiB,WAAW,GAAG;AACjC,2BAAmB,CAAC,QAAQ;AAC5B,uBAAe,KAAK,QAAQ,QAAQ;AAAA,MACtC,WAAW,SAAS,UAAU,UAAU;AACtC,yBAAiB,KAAK,QAAQ;AAAA,MAChC,OAAO;AAEL,cAAM,YAAY,iBAAiB,KAAK,GAAG;AAC3C,kBAAU,KAAK;AAAA,UACb,MAAM;AAAA,UACN,OAAO;AAAA,UACP,KAAK,eAAe,UAAU;AAAA,QAChC,CAAC;AAGD,2BAAmB,CAAC,QAAQ;AAC5B,uBAAe,KAAK,QAAQ,UAAU,eAAe,CAAC;AACtD,YAAI,iBAAiB,GAAI,gBAAe;AAAA,MAC1C;AAAA,IACF;AAGA,QAAI,iBAAiB,SAAS,GAAG;AAC/B,YAAM,YAAY,iBAAiB,KAAK,GAAG;AAC3C,gBAAU,KAAK;AAAA,QACb,MAAM;AAAA,QACN,OAAO;AAAA,QACP,KAAK,eAAe,UAAU;AAAA,MAChC,CAAC;AAAA,IACH;AAGA,UAAM,cAAc,KAAK,mBAAmB,WAAW,YAAY;AAEnE,WAAO,YAAY,IAAI,CAAC,OAAO,WAAW;AAAA,MACxC,MAAM,MAAM;AAAA,MACZ;AAAA,MACA,UAAU;AAAA,QACR,QAAQ;AAAA,QACR,YAAY;AAAA,QACZ,aAAa,YAAY;AAAA,QACzB,WAAW,MAAM;AAAA,QACjB,SAAS,MAAM;AAAA,MACjB;AAAA,IACF,EAAE;AAAA,EACJ;AAAA,EAEQ,eAAe,MAAwB;AAE7C,UAAM,QAAQ,KAAK,MAAM,iCAAiC;AAC1D,QAAI,CAAC,MAAO,QAAO,CAAC,IAAI;AACxB,WAAO,MAAM,IAAI,OAAK,EAAE,KAAK,CAAC,EAAE,OAAO,OAAK,EAAE,SAAS,CAAC;AAAA,EAC1D;AAAA,EAEQ,mBACN,QACA,cACqD;AACrD,QAAI,iBAAiB,KAAK,OAAO,UAAU,GAAG;AAC5C,aAAO;AAAA,IACT;AAEA,UAAM,SAAS,CAAC,OAAO,CAAC,CAAC;AAEzB,aAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,YAAM,YAAY,OAAO,IAAI,CAAC;AAC9B,YAAM,YAAY,OAAO,CAAC;AAG1B,YAAM,gBAAgB,KAAK,eAAe,UAAU,IAAI;AACxD,YAAM,eAAe,cAAc,cAAc,SAAS,CAAC,KAAK;AAEhE,UAAI,gBAAgB,aAAa,UAAU,cAAc;AACvD,eAAO,KAAK;AAAA,UACV,MAAM,eAAe,MAAM,UAAU;AAAA,UACrC,OAAO,KAAK,IAAI,GAAG,UAAU,MAAM,aAAa,MAAM;AAAA,UACtD,KAAK,UAAU;AAAA,QACjB,CAAC;AAAA,MACH,OAAO;AACL,eAAO,KAAK,SAAS;AAAA,MACvB;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AACF;;;ACtHA,IAAM,gBAAgB;AAEtB,IAAM,4BACJ;AA6BK,IAAM,YAAN,MAAgB;AAAA,EACJ;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EAEjB,YAAY,QAAyB;AACnC,SAAK,UAAU,OAAO;AACtB,SAAK,WAAW,OAAO;AACvB,SAAK,MAAM,OAAO;AAClB,SAAK,oBAAoB,OAAO;AAChC,SAAK,cAAc,OAAO,eAAe;AAGzC,SAAK,gBAAgB,IAAI,iBAAiB,KAAK,SAAS,KAAK,QAAQ;AACrE,SAAK,oBAAoB,IAAI;AAAA,MAC3B,KAAK;AAAA,MACL,KAAK;AAAA,MACL,IAAI,eAAe;AAAA,IACrB;AACA,SAAK,qBAAqB,IAAI,mBAAmB,KAAK,OAAO;AAAA,EAC/D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,iBACJ,MACA,WACA,QACe;AACf,UAAM,MAAM,aAAa,KAAK,SAAS;AACvC,UAAM,KAAK,QAAQ,iBAAiB,MAAM,KAAK,MAAM;AAAA,EACvD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,iBAAiB,MAA6B;AAClD,UAAM,KAAK,QAAQ,iBAAiB,IAAI;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,iBAAiB,MAAgC;AACrD,WAAO,KAAK,QAAQ,iBAAiB,IAAI;AAAA,EAC3C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,OACJ,SACA,YACA,QACyB;AACzB,UAAM,MAAM,cAAc,KAAK;AAC/B,QAAI,CAAC,KAAK;AACR,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AACA,WAAO,KAAK,kBAAkB,OAAO,SAAS,KAAK,MAAM;AAAA,EAC3D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,SACJ,OACA,SAC0B;AAC1B,UAAM,aAAa,SAAS,cAAc,KAAK;AAC/C,QAAI,CAAC,YAAY;AACf,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,UAAM,OAAO,SAAS,QAAQ,KAAK;AAGnC,QAAI;AACJ,QAAI;AACJ,UAAM,gBAAgB,SAAS;AAE/B,QAAI,SAAS,WAAW;AACtB,wBAAkB;AAAA,QAChB,OAAO,eAAe;AAAA,QACtB,IAAI;AAAA,QACJ,OAAO,QAAQ;AAAA,MACjB;AAAA,IACF;AAEA,QAAI,SAAS,OAAO;AAClB,0BAAoB;AAAA,QAClB,OAAO,iBAAiB;AAAA,QACxB,IAAI;AAAA,QACJ,OAAO,QAAQ;AAAA,MACjB;AAAA,IACF;AAEA,UAAM,SAAS;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAGA,QAAI,SAAS,YAAY,YAAY;AACnC,YAAM,UAAU,MAAM,KAAK,cAAc,iBAAiB,MAAM;AAEhE,YAAM,UAAU,MAAM,KAAK,QAAQ,OAAO,CAAC,EAAE,KAAK;AAClD,aAAO,EAAE,SAAS,OAAO,gBAAgB,EAAE,UAAU,iBAAiB,YAAY,mBAAmB,QAAQ,cAAc,EAAE;AAAA,IAC/H;AAEA,QAAI,SAAS,YAAY,SAAS;AAChC,YAAM,UAAU,MAAM,KAAK,cAAc,mBAAmB,MAAM;AAClE,YAAM,UAAU,MAAM,KAAK,QAAQ,OAAO,CAAC,EAAE,KAAK;AAClD,aAAO,EAAE,SAAS,OAAO,gBAAgB,EAAE,UAAU,iBAAiB,YAAY,mBAAmB,QAAQ,cAAc,EAAE;AAAA,IAC/H;AAEA,WAAO,KAAK,cAAc,SAAS,MAAM;AAAA,EAC3C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,OACJ,YACA,QAC0B;AAC1B,WAAO,KAAK,mBAAmB,UAAU,YAAY,MAAM;AAAA,EAC7D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,MACJ,UACA,SACsB;AACtB,QAAI,CAAC,KAAK,KAAK;AACb,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAGA,UAAM,kBAAkB,MAAM,KAAK,SAAS,UAAU,OAAO;AAG7D,UAAM,UAAU,gBAAgB,QAC7B,IAAI,CAAC,MAAM,EAAE,IAAI,EACjB,OAAO,OAAO,EACd,KAAK,MAAM;AAGd,UAAM,eAAe,SAAS,gBAAgB;AAC9C,UAAM,SAAS,GAAG,YAAY;AAAA;AAAA;AAAA,EAAiB,OAAO;AAAA;AAAA,YAAiB,QAAQ;AAG/E,UAAM,SAAS,MAAM,KAAK,IAAI,SAAS,QAAQ;AAAA,MAC7C,aAAa,SAAS;AAAA,MACtB,WAAW,SAAS;AAAA,IACtB,CAAC;AAED,WAAO;AAAA,MACL;AAAA,MACA,SAAS,gBAAgB;AAAA,MACzB,OAAO;AAAA,MACP;AAAA,IACF;AAAA,EACF;AACF;","names":["allScores","allScores","result","basename","dirname","path","fs","fs"]}
|
|
1
|
+
{"version":3,"sources":["../src/metadata/constants.ts","../src/metadata/builder.ts","../src/filters/translator.ts","../src/adapters/vector-db-adapter.ts","../src/query/filter-builder.ts","../src/query/rag-query-composer.ts","../src/embedders/embedder.ts","../src/llm/llm-client.ts","../src/llm/mock-llm.ts","../src/enrichment/classifiers/keyword-classifier.ts","../src/enrichment/classifiers/zero-shot-classifier.ts","../src/enrichment/classifiers/embedding-classifier.ts","../src/enrichment/classifiers/llm-classifier.ts","../src/enrichment/enrichment-pipeline.ts","../src/ingestion/chunkers/text-chunker.ts","../src/ingestion/chunkers/recursive-chunker.ts","../src/ingestion/ingestion-pipeline.ts","../src/ingestion/loaders/text-loader.ts","../src/ingestion/loaders/pdf-loader.ts","../src/ingestion/loaders/docx-loader.ts","../src/ingestion/loaders/html-loader.ts","../src/ingestion/loaders/loader-registry.ts","../src/ingestion/chunkers/fixed-chunker.ts","../src/ingestion/chunkers/sentence-chunker.ts","../src/client/rag-client.ts"],"sourcesContent":["/**\n * Metadata field prefixes for the three axes of VectorORM's schema.\n *\n * These prefixes separate framework fields from user-defined metadata:\n * - __v_: Vertical axis (document identity)\n * - __h_: Horizontal axis (content/theme identity)\n * - __s_: Structural axis (position/hierarchy)\n */\nexport const METADATA_PREFIXES = {\n VERTICAL: '__v_',\n HORIZONTAL: '__h_',\n STRUCTURAL: '__s_',\n} as const;\n\n/**\n * Vertical axis fields - identify WHICH document a chunk belongs to.\n */\nexport const VerticalFields = {\n /** Unique document identifier */\n DOC_ID: '__v_doc_id',\n\n /** Original source path/URL */\n SOURCE: '__v_source',\n\n /** Logical partition key (for filtering by document subsets) */\n PARTITION: '__v_partition',\n\n /** Document type classification */\n DOC_TYPE: '__v_doc_type',\n\n /** Arbitrary vertical tags */\n TAGS: '__v_tags',\n} as const;\n\n/**\n * Horizontal axis fields - identify WHAT topic/theme a chunk covers.\n */\nexport const HorizontalFields = {\n /** Primary theme classification */\n THEME: '__h_theme',\n\n /** Multiple themes (if applicable) */\n THEMES: '__h_themes',\n\n /** Classification confidence score */\n THEME_CONFIDENCE: '__h_theme_confidence',\n\n /** Hierarchical section path (e.g., \"Chapter 3/Pricing/Rates\") */\n SECTION_PATH: '__h_section_path',\n\n /** Depth level in hierarchy (0 = root) */\n SECTION_LEVEL: '__h_section_level',\n\n /** Section header text */\n SECTION_TITLE: '__h_section_title',\n} as const;\n\n/**\n * Structural axis fields - track chunk position and relationships.\n */\nexport const StructuralFields = {\n /** Position in document (0-indexed) */\n CHUNK_INDEX: '__s_chunk_index',\n\n /** Parent chunk ID (for hierarchical chunking) */\n PARENT_ID: '__s_parent_id',\n\n /** Whether this chunk has children */\n HAS_CHILDREN: '__s_has_children',\n\n /** Total chunks in this document */\n TOTAL_CHUNKS: '__s_total_chunks',\n} as const;\n","import { METADATA_PREFIXES } from './constants';\n\n/**\n * MetadataBuilder provides a fluent API for constructing metadata objects\n * with proper V/H/S prefixes and type safety.\n *\n * Example:\n * ```typescript\n * const metadata = new MetadataBuilder()\n * .vertical({ doc_id: 'doc123', source: 'file.pdf' })\n * .horizontal({ theme: 'pricing' })\n * .structural({ chunk_index: 0, total_chunks: 10 })\n * .custom({ author: 'John Doe' })\n * .build();\n * ```\n *\n * Features:\n * - Fluent chaining API\n * - Automatic prefix application\n * - Skips undefined values\n * - Returns immutable copy on build()\n */\nexport class MetadataBuilder {\n private metadata: Record<string, any> = {};\n\n /**\n * Add vertical axis metadata (document identity).\n * Automatically prefixes fields with '__v_'.\n *\n * @param fields - Vertical metadata fields (doc_id, source, partition, etc.)\n * @returns This builder for chaining\n */\n vertical(fields: Record<string, any>): this {\n for (const [key, value] of Object.entries(fields)) {\n if (value !== undefined) {\n this.metadata[`${METADATA_PREFIXES.VERTICAL}${key}`] = value;\n }\n }\n return this;\n }\n\n /**\n * Add horizontal axis metadata (theme/section identity).\n * Automatically prefixes fields with '__h_'.\n *\n * @param fields - Horizontal metadata fields (theme, section_path, etc.)\n * @returns This builder for chaining\n */\n horizontal(fields: Record<string, any>): this {\n for (const [key, value] of Object.entries(fields)) {\n if (value !== undefined) {\n this.metadata[`${METADATA_PREFIXES.HORIZONTAL}${key}`] = value;\n }\n }\n return this;\n }\n\n /**\n * Add structural axis metadata (position/hierarchy).\n * Automatically prefixes fields with '__s_'.\n *\n * @param fields - Structural metadata fields (chunk_index, parent_id, etc.)\n * @returns This builder for chaining\n */\n structural(fields: Record<string, any>): this {\n for (const [key, value] of Object.entries(fields)) {\n if (value !== undefined) {\n this.metadata[`${METADATA_PREFIXES.STRUCTURAL}${key}`] = value;\n }\n }\n return this;\n }\n\n /**\n * Add custom user-defined metadata.\n * Fields are added as-is without any prefix.\n *\n * @param fields - Custom metadata fields\n * @returns This builder for chaining\n */\n custom(fields: Record<string, any>): this {\n for (const [key, value] of Object.entries(fields)) {\n if (value !== undefined) {\n this.metadata[key] = value;\n }\n }\n return this;\n }\n\n /**\n * Build and return the complete metadata object.\n * Returns a copy to prevent external modification.\n *\n * @returns Immutable copy of the metadata object\n */\n build(): Record<string, any> {\n return { ...this.metadata };\n }\n}\n","import type {\n UniversalFilter,\n FilterCondition,\n AndFilter,\n OrFilter,\n ShorthandFilter,\n FilterOperator\n} from './types';\n\n/**\n * Valid filter operators.\n */\nconst VALID_OPERATORS: FilterOperator[] = [\n 'eq', 'neq', 'in', 'nin', 'gt', 'gte', 'lt', 'lte', 'contains', 'exists'\n];\n\n/**\n * Translates filters between formats and validates structure.\n */\nexport class FilterTranslator {\n /**\n * Normalize any filter input to standard UniversalFilter format.\n *\n * Handles:\n * - Standard format (pass through)\n * - Shorthand format (convert to standard)\n * - Operator suffixes (field__op syntax)\n */\n static normalize(input: ShorthandFilter | UniversalFilter): UniversalFilter {\n // Already standard format\n if (this.isStandardFormat(input)) {\n return input as UniversalFilter;\n }\n\n // Convert shorthand\n return this.fromShorthand(input as ShorthandFilter);\n }\n\n /**\n * Validate filter structure and operators.\n *\n * Throws error if filter is invalid.\n */\n static validate(filter: UniversalFilter): void {\n if (this.isCompound(filter)) {\n const compound = filter as AndFilter | OrFilter;\n const conditions = 'and' in compound ? compound.and : compound.or;\n\n if (!Array.isArray(conditions) || conditions.length === 0) {\n throw new Error('Compound filter must have at least one condition');\n }\n\n conditions.forEach(c => this.validate(c));\n } else {\n const condition = filter as FilterCondition;\n\n if (!condition.field || typeof condition.field !== 'string') {\n throw new Error('Filter field must be a non-empty string');\n }\n\n if (!VALID_OPERATORS.includes(condition.op)) {\n throw new Error(`Invalid filter operator: ${condition.op}`);\n }\n\n if (condition.value === undefined) {\n throw new Error('Filter value is required');\n }\n }\n }\n\n /**\n * Check if filter is compound (AND/OR).\n */\n static isCompound(filter: UniversalFilter): boolean {\n return 'and' in filter || 'or' in filter;\n }\n\n /**\n * Check if input is already in standard format.\n */\n private static isStandardFormat(input: any): boolean {\n if (!input || typeof input !== 'object') {\n return false;\n }\n\n // Check for compound filter\n if ('and' in input || 'or' in input) {\n return true;\n }\n\n // Check for filter condition\n if ('field' in input && 'op' in input && 'value' in input) {\n return true;\n }\n\n return false;\n }\n\n /**\n * Convert shorthand format to standard.\n */\n private static fromShorthand(shorthand: ShorthandFilter): UniversalFilter {\n const entries = Object.entries(shorthand);\n\n if (entries.length === 0) {\n throw new Error('Cannot convert empty shorthand filter object');\n }\n\n const conditions: FilterCondition[] = [];\n\n for (const [key, value] of entries) {\n // Parse field__op syntax\n let field: string;\n let op: FilterOperator;\n\n if (key.includes('__') && !key.startsWith('__')) {\n // Has operator suffix\n const lastIndex = key.lastIndexOf('__');\n field = key.substring(0, lastIndex);\n const extractedOp = key.substring(lastIndex + 2);\n\n if (!VALID_OPERATORS.includes(extractedOp as FilterOperator)) {\n throw new Error(`Invalid filter operator in shorthand: ${extractedOp}`);\n }\n\n op = extractedOp as FilterOperator;\n } else {\n // Implicit eq\n field = key;\n op = 'eq';\n }\n\n conditions.push({ field, op, value });\n }\n\n // Single condition - return as-is\n if (conditions.length === 1) {\n return conditions[0];\n }\n\n // Multiple conditions - wrap in AND\n return { and: conditions };\n }\n}\n","import type { VectorRecord, SearchResult } from '../types';\nimport type { UniversalFilter } from '../filters';\nimport type { CollectionStats, MetadataUpdate, DistanceMetric } from './types';\n\n/**\n * Abstract base class for all vector database adapters.\n *\n * This is the KEY abstraction that enables database-agnostic operations.\n * Each database (Pinecone, Chroma, Qdrant, etc.) implements this interface,\n * allowing the SDK to work with any vector database.\n *\n * Design principles:\n * 1. All methods are abstract (must be implemented by subclasses)\n * 2. Capability flags have default implementations (can be overridden)\n * 3. Universal filter translation is adapter-specific\n * 4. Async iteration enables efficient enrichment pipelines\n *\n * @abstract\n */\nexport abstract class VectorDBAdapter {\n // ============================================================================\n // CONNECTION MANAGEMENT\n // ============================================================================\n\n /**\n * Connect to the vector database.\n *\n * Initialize client, authenticate, verify connection.\n */\n abstract connect(): Promise<void>;\n\n /**\n * Disconnect from the vector database.\n *\n * Clean up resources, close connections.\n */\n abstract disconnect(): Promise<void>;\n\n /**\n * Check if currently connected to the database.\n */\n abstract isConnected(): Promise<boolean>;\n\n // ============================================================================\n // COLLECTION MANAGEMENT\n // ============================================================================\n\n /**\n * Create a new vector collection.\n *\n * @param name - Collection name\n * @param dimension - Vector dimension\n * @param metric - Distance metric (default: cosine)\n */\n abstract createCollection(\n name: string,\n dimension: number,\n metric?: DistanceMetric\n ): Promise<void>;\n\n /**\n * Delete a collection and all its vectors.\n *\n * @param name - Collection name\n */\n abstract deleteCollection(name: string): Promise<void>;\n\n /**\n * Check if a collection exists.\n *\n * @param name - Collection name\n */\n abstract collectionExists(name: string): Promise<boolean>;\n\n /**\n * Get statistics about a collection.\n *\n * @param name - Collection name\n */\n abstract getCollectionStats(name: string): Promise<CollectionStats>;\n\n // ============================================================================\n // VECTOR OPERATIONS\n // ============================================================================\n\n /**\n * Upsert (insert or update) vector records.\n *\n * This is the primary method for adding vectors to the database.\n * If a record with the same ID exists, it is updated.\n *\n * @param collection - Collection name\n * @param records - Vector records to upsert\n */\n abstract upsert(collection: string, records: VectorRecord[]): Promise<void>;\n\n /**\n * Fetch vector records by ID.\n *\n * @param collection - Collection name\n * @param ids - Record IDs to fetch\n * @returns Array of matching records (may be empty)\n */\n abstract fetch(collection: string, ids: string[]): Promise<VectorRecord[]>;\n\n /**\n * Delete vector records by ID.\n *\n * @param collection - Collection name\n * @param ids - Record IDs to delete\n */\n abstract delete(collection: string, ids: string[]): Promise<void>;\n\n // ============================================================================\n // METADATA OPERATIONS (CRITICAL FOR ENRICHMENT)\n // ============================================================================\n\n /**\n * Update metadata for existing records without re-uploading vectors.\n *\n * This is CRITICAL for enrichment pipelines where we need to:\n * 1. Insert initial vectors with basic metadata\n * 2. Later enrich with vertical/horizontal metadata\n * 3. Avoid re-uploading large embedding vectors\n *\n * @param collection - Collection name\n * @param updates - Metadata updates to apply\n */\n abstract updateMetadata(\n collection: string,\n updates: MetadataUpdate[]\n ): Promise<void>;\n\n // ============================================================================\n // SEARCH OPERATIONS\n // ============================================================================\n\n /**\n * Search for similar vectors.\n *\n * @param collection - Collection name\n * @param queryVector - Query vector to search with\n * @param options - Search options\n * @returns Search results\n */\n abstract search(\n collection: string,\n queryVector: number[],\n options?: {\n topK?: number;\n filter?: UniversalFilter;\n includeMetadata?: boolean;\n includeValues?: boolean;\n }\n ): Promise<SearchResult>;\n\n // ============================================================================\n // FILTER TRANSLATION (KEY FOR DB AGNOSTICISM)\n // ============================================================================\n\n /**\n * Translate universal filter to database-specific filter format.\n *\n * This is the KEY method that enables database-agnostic filtering.\n * Each adapter translates the universal filter to its native format:\n *\n * - Pinecone: {field: {$eq: value}}\n * - Qdrant: {must: [{key: field, match: {value}}]}\n * - Chroma: {field: value}\n *\n * @param filter - Universal filter\n * @returns Database-specific filter object\n */\n abstract translateFilter(filter: UniversalFilter): any;\n\n // ============================================================================\n // ITERATION (FOR ENRICHMENT PIPELINE)\n // ============================================================================\n\n /**\n * Iterate over all vectors in a collection in batches.\n *\n * This enables efficient enrichment pipelines:\n * 1. Fetch vectors in batches\n * 2. Enrich each batch with metadata\n * 3. Update metadata back to DB\n *\n * @param collection - Collection name\n * @param options - Iteration options\n * @yields Batches of vector records\n */\n abstract iterate(\n collection: string,\n options?: {\n batchSize?: number;\n filter?: UniversalFilter;\n }\n ): AsyncIterableIterator<VectorRecord[]>;\n\n // ============================================================================\n // CAPABILITY FLAGS (WITH DEFAULT IMPLEMENTATIONS)\n // ============================================================================\n\n /**\n * Whether this adapter supports metadata updates without re-uploading vectors.\n *\n * Default: false (must re-upload entire record)\n * Override to return true if your DB supports partial updates.\n */\n supportsMetadataUpdate(): boolean {\n return false;\n }\n\n /**\n * Whether this adapter supports filtering during search.\n *\n * Default: false (no filtering support)\n * Override to return true if your DB supports metadata filtering.\n */\n supportsFiltering(): boolean {\n return false;\n }\n\n /**\n * Whether this adapter supports batch operations efficiently.\n *\n * Default: false (single operations only)\n * Override to return true if your DB supports batch upsert/delete.\n */\n supportsBatchOperations(): boolean {\n return false;\n }\n}\n","import type { UniversalFilter, AndFilter } from '../filters/types';\nimport { FilterTranslator } from '../filters/translator';\n\n/**\n * FilterBuilder - Utility for combining multiple filters with fluent API.\n *\n * Provides a convenient way to combine vertical, horizontal, and custom filters\n * into a single UniversalFilter with AND logic.\n *\n * @example\n * ```typescript\n * const filter = new FilterBuilder()\n * .withVerticalFilter({ field: 'doc_id', op: 'eq', value: 'doc123' })\n * .withHorizontalFilter({ field: 'theme', op: 'eq', value: 'legal' })\n * .build();\n * ```\n */\nexport class FilterBuilder {\n private verticalFilter?: UniversalFilter;\n private horizontalFilter?: UniversalFilter;\n private customFilter?: UniversalFilter;\n\n /**\n * Add a vertical (document-level) filter.\n *\n * @param filter - The vertical filter to add (standard or shorthand format)\n * @returns This builder for method chaining\n */\n withVerticalFilter(filter: UniversalFilter | Record<string, any>): this {\n this.verticalFilter = FilterTranslator.normalize(filter);\n return this;\n }\n\n /**\n * Add a horizontal (theme-level) filter.\n *\n * @param filter - The horizontal filter to add (standard or shorthand format)\n * @returns This builder for method chaining\n */\n withHorizontalFilter(filter: UniversalFilter | Record<string, any>): this {\n this.horizontalFilter = FilterTranslator.normalize(filter);\n return this;\n }\n\n /**\n * Add a custom user-defined filter.\n *\n * @param filter - The custom filter to add (standard or shorthand format)\n * @returns This builder for method chaining\n */\n withCustomFilter(filter: UniversalFilter | Record<string, any>): this {\n this.customFilter = FilterTranslator.normalize(filter);\n return this;\n }\n\n /**\n * Build the combined filter.\n *\n * Combination logic:\n * - If no filters: returns undefined\n * - If single filter: returns it directly\n * - If multiple filters: combines with AND logic\n *\n * @returns The combined filter, or undefined if no filters were added\n */\n build(): UniversalFilter | undefined {\n const filters: UniversalFilter[] = [];\n\n if (this.verticalFilter) {\n filters.push(this.verticalFilter);\n }\n\n if (this.horizontalFilter) {\n filters.push(this.horizontalFilter);\n }\n\n if (this.customFilter) {\n filters.push(this.customFilter);\n }\n\n // No filters\n if (filters.length === 0) {\n return undefined;\n }\n\n // Single filter - return as-is\n if (filters.length === 1) {\n return filters[0];\n }\n\n // Multiple filters - combine with AND\n return { and: filters } as AndFilter;\n }\n}\n","import type { VectorDBAdapter } from '../adapters/vector-db-adapter';\nimport type { Embedder } from '../embedders/embedder';\nimport type { RetrievalParams, RetrievalResult } from './types';\nimport type { VectorRecord } from '../types/vector-record';\nimport { FilterBuilder } from './filter-builder';\nimport { VerticalFields, HorizontalFields } from '../metadata/constants';\n\n/**\n * RAGQueryComposer - Main orchestrator for retrieval operations.\n *\n * Coordinates between embedder and vector database adapter to perform\n * semantic search with filtering. Provides specialized methods for\n * grouping results by vertical (document) or horizontal (theme) dimensions.\n *\n * @example\n * ```typescript\n * const composer = new RAGQueryComposer(adapter, embedder);\n *\n * // Basic retrieval\n * const result = await composer.retrieve({\n * query: 'pricing information',\n * collection: 'documents',\n * topK: 10\n * });\n *\n * // Retrieval with filters\n * const filtered = await composer.retrieve({\n * query: 'pricing information',\n * collection: 'documents',\n * topK: 10,\n * verticalFilters: { doc_id: 'contract-123' },\n * horizontalFilters: { theme: 'legal' }\n * });\n *\n * // Grouped by document\n * const byDocument = await composer.retrieveVertical({\n * query: 'pricing information',\n * collection: 'documents',\n * topK: 10\n * });\n * ```\n */\nexport class RAGQueryComposer {\n /**\n * Create a new RAGQueryComposer.\n *\n * @param adapter - Vector database adapter for search operations\n * @param embedder - Embedder for converting text queries to vectors\n */\n constructor(\n private readonly adapter: VectorDBAdapter,\n private readonly embedder: Embedder\n ) {}\n\n /**\n * Main retrieval method.\n *\n * Performs semantic search with optional filtering:\n * 1. Embeds query text using embedder\n * 2. Builds combined filter using FilterBuilder\n * 3. Calls adapter.search() with query vector and filter\n * 4. Returns results with filter information\n *\n * @param params - Retrieval parameters\n * @returns Retrieval result with records and filter information\n */\n async retrieve(params: RetrievalParams): Promise<RetrievalResult> {\n // 1. Embed the query text\n const queryVector = await this.embedder.embed(params.query);\n\n // 2. Build combined filter using FilterBuilder\n const filterBuilder = new FilterBuilder();\n\n if (params.verticalFilters) {\n filterBuilder.withVerticalFilter(params.verticalFilters);\n }\n\n if (params.horizontalFilters) {\n filterBuilder.withHorizontalFilter(params.horizontalFilters);\n }\n\n if (params.customFilters) {\n filterBuilder.withCustomFilter(params.customFilters);\n }\n\n const combinedFilter = filterBuilder.build();\n\n // 3. Call adapter.search() with query vector and filter\n const searchResult = await this.adapter.search(\n params.collection,\n queryVector,\n {\n topK: params.topK,\n filter: combinedFilter,\n includeMetadata: true,\n includeValues: params.includeEmbeddings\n }\n );\n\n // 4. Return RetrievalResult with records and filters applied\n return {\n records: searchResult.records,\n query: params.query,\n filtersApplied: {\n ...(params.verticalFilters && { vertical: params.verticalFilters }),\n ...(params.horizontalFilters && { horizontal: params.horizontalFilters }),\n ...(params.customFilters && { custom: params.customFilters })\n }\n };\n }\n\n /**\n * Retrieve and group results by document ID.\n *\n * Calls retrieve() and organizes results into a Map keyed by __v_doc_id.\n * Records without a doc_id are excluded.\n *\n * @param params - Retrieval parameters\n * @returns Map of document ID to array of records\n */\n async retrieveVertical(\n params: RetrievalParams\n ): Promise<Map<string, VectorRecord[]>> {\n const result = await this.retrieve(params);\n\n const grouped = new Map<string, VectorRecord[]>();\n\n for (const record of result.records) {\n const docId = record.metadata[VerticalFields.DOC_ID];\n\n if (typeof docId === 'string') {\n if (!grouped.has(docId)) {\n grouped.set(docId, []);\n }\n grouped.get(docId)!.push(record);\n }\n }\n\n return grouped;\n }\n\n /**\n * Retrieve and group results by theme.\n *\n * Calls retrieve() and organizes results into a Map keyed by __h_theme.\n * Records without a theme are excluded.\n *\n * @param params - Retrieval parameters\n * @returns Map of theme to array of records\n */\n async retrieveHorizontal(\n params: RetrievalParams\n ): Promise<Map<string, VectorRecord[]>> {\n const result = await this.retrieve(params);\n\n const grouped = new Map<string, VectorRecord[]>();\n\n for (const record of result.records) {\n const theme = record.metadata[HorizontalFields.THEME];\n\n if (typeof theme === 'string') {\n if (!grouped.has(theme)) {\n grouped.set(theme, []);\n }\n grouped.get(theme)!.push(record);\n }\n }\n\n return grouped;\n }\n}\n","/**\n * Abstract base class for text embedding models.\n *\n * This abstraction allows the VectorORM to work with any embedding provider\n * (OpenAI, Cohere, HuggingFace, etc.) by implementing a consistent interface.\n *\n * Implementations must provide:\n * - `embed()`: Convert a single text string into a vector embedding\n * - `embedBatch()`: Convert multiple texts into embeddings efficiently\n * - `dimensions`: The size of the embedding vectors produced\n * - `modelName`: Identifier for the embedding model being used\n *\n * @example\n * ```typescript\n * class OpenAIEmbedder extends Embedder {\n * get dimensions(): number { return 1536; }\n * get modelName(): string { return 'text-embedding-ada-002'; }\n *\n * async embed(text: string): Promise<number[]> {\n * // Call OpenAI API\n * }\n *\n * async embedBatch(texts: string[]): Promise<number[][]> {\n * // Batch call to OpenAI API\n * }\n * }\n * ```\n */\nexport abstract class Embedder {\n /**\n * The dimensionality of embeddings produced by this model.\n * Must be consistent across all embeddings from the same model.\n */\n abstract get dimensions(): number;\n\n /**\n * Identifier for the embedding model.\n * Used for tracking which model generated embeddings.\n */\n abstract get modelName(): string;\n\n /**\n * Embed a single text string into a vector.\n *\n * @param text - The text to embed\n * @returns A promise that resolves to a number array representing the embedding\n */\n abstract embed(text: string): Promise<number[]>;\n\n /**\n * Embed multiple texts into vectors efficiently.\n * Implementations should maintain the order of input texts in the output.\n *\n * @param texts - Array of texts to embed\n * @returns A promise that resolves to an array of embeddings, one per input text\n */\n abstract embedBatch(texts: string[]): Promise<number[][]>;\n\n /**\n * Constructor is protected to prevent direct instantiation of abstract class.\n * Subclasses can call super() in their constructors.\n */\n protected constructor() {\n if (new.target === Embedder) {\n throw new Error('Cannot instantiate abstract class Embedder directly');\n }\n }\n}\n","import type { GenerateOptions } from './types';\n\n/**\n * Abstract base class for LLM (Large Language Model) clients.\n *\n * This abstraction allows the VectorORM to work with any LLM provider\n * (OpenAI, Anthropic, Google, etc.) by implementing a consistent interface.\n *\n * Implementations must provide:\n * - `generate()`: Generate text from a prompt\n * - `generateJSON<T>()`: Generate structured JSON output\n * - `generateBatch()`: Generate multiple responses efficiently\n * - `modelName`: Identifier for the LLM model being used\n * - `provider`: Name of the LLM provider\n *\n * @example\n * ```typescript\n * class OpenAIClient extends LLMClient {\n * get modelName(): string { return 'gpt-4'; }\n * get provider(): string { return 'openai'; }\n *\n * async generate(prompt: string, options?: GenerateOptions): Promise<string> {\n * // Call OpenAI API\n * }\n *\n * async generateJSON<T>(prompt: string, options?: GenerateOptions): Promise<T> {\n * // Call OpenAI API with JSON mode\n * }\n *\n * async generateBatch(prompts: string[], options?: GenerateOptions): Promise<string[]> {\n * // Batch call to OpenAI API\n * }\n * }\n * ```\n */\nexport abstract class LLMClient {\n /**\n * Identifier for the LLM model.\n * Used for tracking which model generated responses.\n */\n abstract get modelName(): string;\n\n /**\n * Name of the LLM provider.\n * Examples: 'openai', 'anthropic', 'google', 'mock'\n */\n abstract get provider(): string;\n\n /**\n * Generate text from a prompt.\n *\n * @param prompt - The text prompt to send to the LLM\n * @param options - Optional generation parameters\n * @returns A promise that resolves to the generated text\n */\n abstract generate(prompt: string, options?: GenerateOptions): Promise<string>;\n\n /**\n * Generate structured JSON output from a prompt.\n * The LLM will be instructed to return valid JSON that matches type T.\n *\n * @param prompt - The text prompt to send to the LLM\n * @param options - Optional generation parameters\n * @returns A promise that resolves to the parsed JSON object\n */\n abstract generateJSON<T>(\n prompt: string,\n options?: GenerateOptions\n ): Promise<T>;\n\n /**\n * Generate multiple responses efficiently.\n * Implementations should maintain the order of input prompts in the output.\n *\n * @param prompts - Array of prompts to process\n * @param options - Optional generation parameters\n * @returns A promise that resolves to an array of responses, one per input prompt\n */\n abstract generateBatch(\n prompts: string[],\n options?: GenerateOptions\n ): Promise<string[]>;\n\n /**\n * Constructor is protected to prevent direct instantiation of abstract class.\n * Subclasses can call super() in their constructors.\n */\n protected constructor() {\n if (new.target === LLMClient) {\n throw new Error('Cannot instantiate abstract class LLMClient directly');\n }\n }\n}\n","import { LLMClient } from './llm-client';\nimport type { GenerateOptions } from './types';\n\n/**\n * MockLLM for testing purposes only.\n * Returns canned responses that can be set programmatically.\n *\n * @example\n * ```typescript\n * const llm = new MockLLM();\n * llm.setResponse('Hello, world!');\n * const result = await llm.generate('Say hello'); // Returns 'Hello, world!'\n * ```\n */\nexport class MockLLM extends LLMClient {\n private _response: string = '';\n\n constructor() {\n super();\n }\n\n get modelName(): string {\n return 'mock-llm-v1';\n }\n\n get provider(): string {\n return 'mock';\n }\n\n /**\n * Set the canned response that will be returned by generate methods.\n *\n * @param response - The response text to return\n */\n setResponse(response: string): void {\n this._response = response;\n }\n\n async generate(\n prompt: string,\n options?: GenerateOptions\n ): Promise<string> {\n // Ignore prompt and options, return canned response\n return this._response;\n }\n\n async generateJSON<T>(\n prompt: string,\n options?: GenerateOptions\n ): Promise<T> {\n // Parse the canned response as JSON\n try {\n return JSON.parse(this._response) as T;\n } catch (error) {\n throw new Error(\n `Failed to parse mock response as JSON: ${error instanceof Error ? error.message : 'unknown error'}`\n );\n }\n }\n\n async generateBatch(\n prompts: string[],\n options?: GenerateOptions\n ): Promise<string[]> {\n // Return the same canned response for all prompts\n return prompts.map(() => this._response);\n }\n}\n","/**\n * Fast, deterministic keyword-based theme classifier\n * Uses precompiled regex patterns with word boundaries for efficient matching\n */\n\nexport interface ThemeClassificationResult {\n theme: string;\n confidence: number;\n allScores?: Record<string, number>;\n}\n\nexport class KeywordThemeClassifier {\n private patterns: Map<string, RegExp[]>;\n private keywordCounts: Map<string, number>;\n\n /**\n * Creates a new KeywordThemeClassifier\n * @param themes - Array of theme names\n * @param keywords - Map of theme names to their keyword arrays\n * @param caseSensitive - Whether matching should be case sensitive (default: false)\n */\n constructor(\n private themes: string[],\n keywords: Record<string, string[]>,\n private caseSensitive: boolean = false\n ) {\n this.patterns = new Map();\n this.keywordCounts = new Map();\n\n // Precompile regex patterns for each theme\n for (const theme of themes) {\n const themeKeywords = keywords[theme] || [];\n this.keywordCounts.set(theme, themeKeywords.length);\n\n const patterns = themeKeywords.map((keyword) => {\n const escapedKeyword = this.escapeRegex(keyword);\n const flags = caseSensitive ? 'g' : 'gi';\n return new RegExp(`\\\\b${escapedKeyword}\\\\b`, flags);\n });\n\n this.patterns.set(theme, patterns);\n }\n }\n\n /**\n * Classify a single text\n * @param text - Text to classify\n * @returns Classification result with theme, confidence, and all scores\n */\n classify(text: string): ThemeClassificationResult {\n if (!text || text.trim().length === 0) {\n return {\n theme: 'unknown',\n confidence: 0,\n allScores: {},\n };\n }\n\n const scores: Record<string, number> = {};\n let maxScore = 0;\n let winningTheme = 'unknown';\n\n // Count keyword matches for each theme\n for (const theme of this.themes) {\n const patterns = this.patterns.get(theme) || [];\n let matchCount = 0;\n\n for (const pattern of patterns) {\n const matches = text.match(pattern);\n if (matches) {\n matchCount += matches.length;\n }\n }\n\n scores[theme] = matchCount;\n\n // Track highest scoring theme (first theme wins ties)\n if (matchCount > maxScore) {\n maxScore = matchCount;\n winningTheme = theme;\n }\n }\n\n // No matches found\n if (maxScore === 0) {\n return {\n theme: 'unknown',\n confidence: 0,\n allScores: scores,\n };\n }\n\n // Normalize confidence: matches / total keywords for winning theme\n const totalKeywords = this.keywordCounts.get(winningTheme) || 1;\n const confidence = maxScore / totalKeywords;\n\n return {\n theme: winningTheme,\n confidence: Math.min(confidence, 1.0), // Cap at 1.0\n allScores: scores,\n };\n }\n\n /**\n * Classify multiple texts in batch\n * @param texts - Array of texts to classify\n * @returns Array of classification results\n */\n classifyBatch(texts: string[]): ThemeClassificationResult[] {\n return texts.map((text) => this.classify(text));\n }\n\n /**\n * Escape special regex characters in a string\n * @param str - String to escape\n * @returns Escaped string safe for use in regex\n */\n private escapeRegex(str: string): string {\n return str.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&');\n }\n}\n","/**\n * Zero-shot theme classifier using Transformers.js\n * Uses pre-trained models without requiring fine-tuning or training data\n */\n\nimport type { ThemeClassifier, ThemeClassification } from './theme-classifier';\n\n/**\n * Zero-shot classification using pre-trained transformer models.\n *\n * This classifier uses Hugging Face's zero-shot classification pipeline\n * to classify text into themes without requiring training data or fine-tuning.\n * The model is loaded lazily on the first classify() call to improve startup time.\n *\n * Features:\n * - No training data required\n * - Works with any set of theme labels\n * - Lazy model loading (loads on first classification)\n * - Sequential batch processing to avoid memory issues\n * - Handles empty text with uniform scores\n *\n * @example\n * ```typescript\n * const classifier = new ZeroShotThemeClassifier(['technology', 'sports', 'business']);\n * const result = await classifier.classify('Machine learning is transforming AI');\n * console.log(result.theme); // 'technology'\n * console.log(result.confidence); // 0.95\n * ```\n */\nexport class ZeroShotThemeClassifier implements ThemeClassifier {\n private model: any = null;\n private modelName: string;\n private themes: string[];\n\n /**\n * Creates a new ZeroShotThemeClassifier\n *\n * @param themes - Array of theme labels to classify into\n * @param modelName - Name of the Hugging Face model to use (default: 'Xenova/distilbert-base-uncased-mnli')\n *\n * @example\n * ```typescript\n * // Use default model\n * const classifier = new ZeroShotThemeClassifier(['technology', 'sports', 'finance']);\n *\n * // Use custom model\n * const classifier = new ZeroShotThemeClassifier(\n * ['positive', 'negative'],\n * 'Xenova/distilbert-base-uncased-mnli'\n * );\n * ```\n */\n constructor(\n themes: string[],\n modelName: string = 'Xenova/distilbert-base-uncased-mnli'\n ) {\n this.themes = themes;\n this.modelName = modelName;\n }\n\n /**\n * Lazy loads the zero-shot classification model\n * Only loads once on first call, subsequent calls reuse the loaded model\n *\n * @returns Promise that resolves to the loaded pipeline\n */\n private async ensureModelLoaded(): Promise<any> {\n if (!this.model) {\n const { pipeline } = await import('@xenova/transformers');\n this.model = await pipeline('zero-shot-classification', this.modelName);\n }\n return this.model;\n }\n\n /**\n * Classify a single text into one of the provided themes\n *\n * @param text - The text content to classify\n * @returns A promise that resolves to the theme classification result\n *\n * @example\n * ```typescript\n * const classifier = new ZeroShotThemeClassifier(['technology', 'sports']);\n * const result = await classifier.classify('Machine learning and AI');\n * console.log(result.theme); // 'technology'\n * console.log(result.confidence); // 0.92\n * console.log(result.allScores); // { technology: 0.92, sports: 0.08 }\n * ```\n */\n async classify(text: string): Promise<ThemeClassification> {\n // Handle empty text with uniform scores\n if (!text || text.trim().length === 0) {\n const uniformScore = 1.0 / this.themes.length;\n const allScores: Record<string, number> = {};\n\n for (const theme of this.themes) {\n allScores[theme] = uniformScore;\n }\n\n return {\n theme: this.themes[0], // Return first theme\n confidence: uniformScore,\n allScores,\n };\n }\n\n const model = await this.ensureModelLoaded();\n\n // Run zero-shot classification\n const result = await model(text, this.themes) as {\n labels: string[];\n scores: number[];\n };\n\n // Build scores map\n const allScores: Record<string, number> = {};\n for (let i = 0; i < result.labels.length; i++) {\n allScores[result.labels[i]] = result.scores[i];\n }\n\n // Return highest scoring theme (first in result)\n return {\n theme: result.labels[0],\n confidence: result.scores[0],\n allScores,\n };\n }\n\n /**\n * Classify multiple texts efficiently\n *\n * Processes texts sequentially to avoid memory issues with large batches.\n * The model is loaded once and reused for all texts.\n *\n * @param texts - Array of text contents to classify\n * @returns A promise that resolves to an array of theme classifications\n *\n * @example\n * ```typescript\n * const classifier = new ZeroShotThemeClassifier(['technology', 'sports', 'finance']);\n * const results = await classifier.classifyBatch([\n * 'Machine learning is transforming AI',\n * 'The football team won the championship',\n * 'Stock market hits record high'\n * ]);\n * // results[0].theme === 'technology'\n * // results[1].theme === 'sports'\n * // results[2].theme === 'finance'\n * ```\n */\n async classifyBatch(texts: string[]): Promise<ThemeClassification[]> {\n // Ensure model is loaded once\n await this.ensureModelLoaded();\n\n // Process sequentially to avoid memory issues\n const results: ThemeClassification[] = [];\n for (const text of texts) {\n const result = await this.classify(text);\n results.push(result);\n }\n\n return results;\n }\n}\n","/**\n * Embedding-based theme classifier using cosine similarity\n * Computes similarity between text embeddings and theme embeddings\n */\n\nimport type { ThemeClassifier, ThemeClassification } from './theme-classifier';\nimport type { Embedder } from '../../embedders/embedder';\n\n/**\n * Embedding-based classification using cosine similarity.\n *\n * This classifier computes embeddings for text and themes, then uses cosine\n * similarity to determine which theme is most similar to the text. Theme\n * embeddings are computed lazily on the first classify() call, or can be\n * provided precomputed in the constructor.\n *\n * Features:\n * - Lazy initialization: theme embeddings computed on first classify()\n * - Optional precomputed embeddings for faster startup\n * - Cosine similarity: dotProduct / (normA * normB)\n * - Normalize similarity [-1,1] to confidence [0,1]\n * - Handles empty text with uniform scores\n *\n * @example\n * ```typescript\n * const embedder = new OpenAIEmbedder();\n * const classifier = new EmbeddingThemeClassifier(['technology', 'sports', 'finance'], embedder);\n * const result = await classifier.classify('Machine learning is transforming AI');\n * console.log(result.theme); // 'technology'\n * console.log(result.confidence); // 0.89\n * ```\n */\nexport class EmbeddingThemeClassifier implements ThemeClassifier {\n private themeEmbeddings: Record<string, number[]> | null = null;\n private embedder: Embedder;\n private themes: string[];\n\n /**\n * Creates a new EmbeddingThemeClassifier\n *\n * @param themes - Array of theme labels to classify into\n * @param embedder - Embedder instance to use for generating embeddings\n * @param precomputedEmbeddings - Optional precomputed theme embeddings for faster startup\n *\n * @example\n * ```typescript\n * // Lazy initialization\n * const classifier = new EmbeddingThemeClassifier(['technology', 'sports'], embedder);\n *\n * // With precomputed embeddings\n * const themeEmbeddings = {\n * technology: await embedder.embed('technology'),\n * sports: await embedder.embed('sports')\n * };\n * const classifier = new EmbeddingThemeClassifier(['technology', 'sports'], embedder, themeEmbeddings);\n * ```\n */\n constructor(\n themes: string[],\n embedder: Embedder,\n precomputedEmbeddings?: Record<string, number[]>\n ) {\n this.themes = themes;\n this.embedder = embedder;\n this.themeEmbeddings = precomputedEmbeddings || null;\n }\n\n /**\n * Lazy loads theme embeddings on first use\n * Computes embeddings for all theme labels if not already computed\n *\n * @returns Promise that resolves to the theme embeddings map\n */\n private async ensureThemeEmbeddings(): Promise<Record<string, number[]>> {\n if (!this.themeEmbeddings) {\n this.themeEmbeddings = {};\n\n // Compute embeddings for all themes\n const embeddings = await this.embedder.embedBatch(this.themes);\n\n for (let i = 0; i < this.themes.length; i++) {\n this.themeEmbeddings[this.themes[i]] = embeddings[i];\n }\n }\n\n return this.themeEmbeddings;\n }\n\n /**\n * Compute cosine similarity between two vectors\n *\n * Cosine similarity = dotProduct / (normA * normB)\n * Returns value in range [-1, 1] where:\n * - 1 means vectors point in the same direction\n * - 0 means vectors are orthogonal\n * - -1 means vectors point in opposite directions\n *\n * @param a - First vector\n * @param b - Second vector\n * @returns Cosine similarity between the vectors\n */\n private cosineSimilarity(a: number[], b: number[]): number {\n if (a.length !== b.length) {\n throw new Error('Vectors must have the same length for cosine similarity');\n }\n\n let dotProduct = 0;\n let normA = 0;\n let normB = 0;\n\n for (let i = 0; i < a.length; i++) {\n dotProduct += a[i] * b[i];\n normA += a[i] * a[i];\n normB += b[i] * b[i];\n }\n\n normA = Math.sqrt(normA);\n normB = Math.sqrt(normB);\n\n // Avoid division by zero\n if (normA === 0 || normB === 0) {\n return 0;\n }\n\n return dotProduct / (normA * normB);\n }\n\n /**\n * Normalize cosine similarity from [-1, 1] to confidence score [0, 1]\n *\n * Uses linear transformation: (similarity + 1) / 2\n *\n * @param similarity - Cosine similarity value in range [-1, 1]\n * @returns Confidence score in range [0, 1]\n */\n private normalizeToConfidence(similarity: number): number {\n return (similarity + 1) / 2;\n }\n\n /**\n * Classify a single text into one of the provided themes\n *\n * @param text - The text content to classify\n * @returns A promise that resolves to the theme classification result\n *\n * @example\n * ```typescript\n * const classifier = new EmbeddingThemeClassifier(['technology', 'sports'], embedder);\n * const result = await classifier.classify('Machine learning and AI');\n * console.log(result.theme); // 'technology'\n * console.log(result.confidence); // 0.92\n * console.log(result.allScores); // { technology: 0.92, sports: 0.45 }\n * ```\n */\n async classify(text: string): Promise<ThemeClassification> {\n // Handle empty text with uniform scores\n if (!text || text.trim().length === 0) {\n const uniformScore = 1.0 / this.themes.length;\n const allScores: Record<string, number> = {};\n\n for (const theme of this.themes) {\n allScores[theme] = uniformScore;\n }\n\n return {\n theme: this.themes[0], // Return first theme\n confidence: uniformScore,\n allScores,\n };\n }\n\n // Ensure theme embeddings are computed\n const themeEmbeddings = await this.ensureThemeEmbeddings();\n\n // Compute text embedding\n const textEmbedding = await this.embedder.embed(text);\n\n // Compute cosine similarity for each theme\n const similarities: Record<string, number> = {};\n let maxSimilarity = -Infinity;\n let winningTheme = this.themes[0];\n\n for (const theme of this.themes) {\n const themeEmbedding = themeEmbeddings[theme];\n const similarity = this.cosineSimilarity(textEmbedding, themeEmbedding);\n similarities[theme] = similarity;\n\n if (similarity > maxSimilarity) {\n maxSimilarity = similarity;\n winningTheme = theme;\n }\n }\n\n // Normalize similarities to confidence scores [0, 1]\n const allScores: Record<string, number> = {};\n for (const theme of this.themes) {\n allScores[theme] = this.normalizeToConfidence(similarities[theme]);\n }\n\n return {\n theme: winningTheme,\n confidence: this.normalizeToConfidence(maxSimilarity),\n allScores,\n };\n }\n\n /**\n * Classify multiple texts efficiently\n *\n * Ensures theme embeddings are loaded once, then processes all texts.\n * Text embeddings are computed in batch for efficiency.\n *\n * @param texts - Array of text contents to classify\n * @returns A promise that resolves to an array of theme classifications\n *\n * @example\n * ```typescript\n * const classifier = new EmbeddingThemeClassifier(['technology', 'sports', 'finance'], embedder);\n * const results = await classifier.classifyBatch([\n * 'Machine learning is transforming AI',\n * 'The football team won the championship',\n * 'Stock market hits record high'\n * ]);\n * // results[0].theme === 'technology'\n * // results[1].theme === 'sports'\n * // results[2].theme === 'finance'\n * ```\n */\n async classifyBatch(texts: string[]): Promise<ThemeClassification[]> {\n // Ensure theme embeddings are loaded once\n await this.ensureThemeEmbeddings();\n\n // Process each text (classify already has embeddings cached)\n const results: ThemeClassification[] = [];\n for (const text of texts) {\n const result = await this.classify(text);\n results.push(result);\n }\n\n return results;\n }\n}\n","/**\n * LLM-based theme classifier using language models for high-quality classification\n * Provides the most flexible and accurate theme classification using LLMs\n */\n\nimport type { ThemeClassifier, ThemeClassification } from './theme-classifier';\nimport type { LLMClient } from '../../llm/llm-client';\n\n/**\n * Default prompt template for theme classification.\n * Uses {themes} and {text} placeholders that are replaced at runtime.\n */\nconst DEFAULT_PROMPT_TEMPLATE = `You are a theme classification system. Classify the following text into one of the provided themes.\n\nAvailable themes: {themes}\n\nText to classify:\n{text}\n\nReturn a JSON object with the following structure:\n- theme: the most appropriate theme from the list (string)\n- confidence: confidence score between 0 and 1 (number)\n- allScores: an object mapping each theme to its confidence score (object)\n\nReturn only valid JSON, no additional text.`;\n\n/**\n * LLM-based theme classification using language models.\n *\n * This classifier uses LLMs to provide the highest quality theme classification\n * with semantic understanding and nuanced reasoning. It supports custom prompt\n * templates for domain-specific classification needs.\n *\n * Features:\n * - Default prompt template with {themes} and {text} placeholders\n * - Custom prompt template support for specialized domains\n * - Structured JSON output using LLM.generateJSON<>\n * - Sequential batch processing to avoid rate limits\n * - Comprehensive error handling with cause chain\n * - Empty text handling with uniform scores\n *\n * @example\n * ```typescript\n * const llm = new OpenAIClient('gpt-4');\n * const classifier = new LLMThemeClassifier(\n * ['technology', 'sports', 'finance'],\n * llm\n * );\n * const result = await classifier.classify('Machine learning is transforming AI');\n * console.log(result.theme); // 'technology'\n * console.log(result.confidence); // 0.95\n * ```\n *\n * @example Custom prompt template\n * ```typescript\n * const customTemplate = `Classify this medical text: {text}\n * Themes: {themes}\n * Return JSON with theme, confidence, allScores.`;\n *\n * const classifier = new LLMThemeClassifier(\n * ['cardiology', 'neurology', 'oncology'],\n * llm,\n * customTemplate\n * );\n * ```\n */\nexport class LLMThemeClassifier implements ThemeClassifier {\n private themes: string[];\n private llm: LLMClient;\n private promptTemplate: string;\n\n /**\n * Creates a new LLMThemeClassifier\n *\n * @param themes - Array of theme labels to classify into\n * @param llm - LLM client instance to use for classification\n * @param promptTemplate - Optional custom prompt template with {themes} and {text} placeholders\n *\n * @example\n * ```typescript\n * const classifier = new LLMThemeClassifier(\n * ['technology', 'sports', 'finance'],\n * llm\n * );\n * ```\n *\n * @example With custom prompt\n * ```typescript\n * const customTemplate = `Classify: {text}\\nThemes: {themes}\\nReturn JSON.`;\n * const classifier = new LLMThemeClassifier(\n * ['technology', 'sports'],\n * llm,\n * customTemplate\n * );\n * ```\n */\n constructor(\n themes: string[],\n llm: LLMClient,\n promptTemplate: string = DEFAULT_PROMPT_TEMPLATE\n ) {\n this.themes = themes;\n this.llm = llm;\n this.promptTemplate = promptTemplate;\n }\n\n /**\n * Build the classification prompt by replacing placeholders\n *\n * @param text - The text to classify\n * @returns The complete prompt with placeholders replaced\n */\n private buildPrompt(text: string): string {\n const themesStr = this.themes.join(', ');\n return this.promptTemplate\n .replace('{themes}', themesStr)\n .replace('{text}', text);\n }\n\n /**\n * Classify a single text into one of the provided themes\n *\n * @param text - The text content to classify\n * @returns A promise that resolves to the theme classification result\n *\n * @example\n * ```typescript\n * const classifier = new LLMThemeClassifier(['technology', 'sports'], llm);\n * const result = await classifier.classify('Machine learning and AI');\n * console.log(result.theme); // 'technology'\n * console.log(result.confidence); // 0.95\n * console.log(result.allScores); // { technology: 0.95, sports: 0.05 }\n * ```\n */\n async classify(text: string): Promise<ThemeClassification> {\n // Handle empty text with uniform scores\n if (!text || text.trim().length === 0) {\n const uniformScore = 1.0 / this.themes.length;\n const allScores: Record<string, number> = {};\n\n for (const theme of this.themes) {\n allScores[theme] = uniformScore;\n }\n\n return {\n theme: this.themes[0], // Return first theme\n confidence: uniformScore,\n allScores,\n };\n }\n\n // Build prompt and call LLM\n const prompt = this.buildPrompt(text);\n\n try {\n const result = await this.llm.generateJSON<ThemeClassification>(prompt);\n return result;\n } catch (error) {\n // Chain the error with context\n const message = `Failed to classify text with LLM: ${error instanceof Error ? error.message : 'unknown error'}`;\n const classificationError = new Error(message);\n\n // Preserve the original error as the cause\n if (error instanceof Error) {\n (classificationError as any).cause = error;\n }\n\n throw classificationError;\n }\n }\n\n /**\n * Classify multiple texts sequentially\n *\n * Processes texts one at a time to avoid rate limits and ensure predictable behavior.\n * Sequential processing provides better error handling and rate limit compliance.\n *\n * @param texts - Array of text contents to classify\n * @returns A promise that resolves to an array of theme classifications\n *\n * @example\n * ```typescript\n * const classifier = new LLMThemeClassifier(['technology', 'sports', 'finance'], llm);\n * const results = await classifier.classifyBatch([\n * 'Machine learning is transforming AI',\n * 'The football team won the championship',\n * 'Stock market hits record high'\n * ]);\n * // results[0].theme === 'technology'\n * // results[1].theme === 'sports'\n * // results[2].theme === 'finance'\n * ```\n */\n async classifyBatch(texts: string[]): Promise<ThemeClassification[]> {\n // Sequential processing to avoid rate limits\n const results: ThemeClassification[] = [];\n\n for (const text of texts) {\n const result = await this.classify(text);\n results.push(result);\n }\n\n return results;\n }\n}\n","/**\n * Enrichment pipeline for adding metadata to vector records.\n *\n * This class provides the main enrichment functionality:\n * - Vertical enrichment: Classify documents into business verticals\n * - Theme enrichment: Add thematic tags to documents\n * - Section enrichment: Structure documents into logical sections\n * - Batch processing: Efficiently process large collections\n *\n * Design principles:\n * 1. Database-agnostic: Works with any VectorDBAdapter\n * 2. Strategy pattern: Multiple enrichment strategies per operation\n * 3. Batch processing: Efficient iteration and bulk updates\n * 4. Error resilience: Continue processing despite individual failures\n */\n\nimport type { VectorDBAdapter } from '../adapters';\nimport type { VectorRecord } from '../types';\nimport type { MetadataUpdate } from '../adapters/types';\nimport type { UniversalFilter } from '../filters/types';\nimport type {\n EnrichmentStats,\n VerticalEnrichmentConfig,\n ThemeEnrichmentConfig,\n SectionEnrichmentConfig,\n EnrichAllConfig,\n FieldMappingConfig,\n ExtractorConfig,\n AutomaticExtractionConfig,\n} from './types';\n\n/**\n * EnrichmentPipeline provides methods to enrich vector records with metadata.\n *\n * The pipeline supports three types of enrichment:\n * 1. Vertical enrichment: Classify into business verticals (technology, finance, etc.)\n * 2. Theme enrichment: Add thematic tags (innovation, research, etc.)\n * 3. Section enrichment: Structure into logical sections\n *\n * Each enrichment type supports multiple strategies for maximum flexibility.\n *\n * @example\n * ```typescript\n * const pipeline = new EnrichmentPipeline(adapter, embedder, llm);\n *\n * // Enrich using field mapping\n * await pipeline.enrichVertical('my-collection', {\n * mapping: { 'tech': 'technology', 'hc': 'healthcare' }\n * });\n *\n * // Enrich using custom extractor\n * await pipeline.enrichVertical('my-collection', {\n * extractor: async (doc) => extractVertical(doc)\n * });\n *\n * // Enrich using LLM\n * await pipeline.enrichVertical('my-collection', {\n * automatic: {\n * llm: myLLMClient,\n * fields: ['technology', 'finance', 'healthcare']\n * }\n * });\n * ```\n */\nexport class EnrichmentPipeline {\n /**\n * Create a new enrichment pipeline.\n *\n * @param adapter - Vector database adapter for reading/writing records\n * @param embedder - Optional embedder for embedding-based enrichment\n * @param llm - Optional LLM client for automatic enrichment\n */\n constructor(\n private adapter: VectorDBAdapter,\n private embedder?: any,\n private llm?: any\n ) {}\n\n /**\n * Enrich records with vertical classifications.\n *\n * Supports three strategies:\n * 1. Field mapping: Map existing field values to verticals\n * 2. Custom extractor: Use a custom function to extract verticals\n * 3. Automatic LLM: Use an LLM to classify documents\n *\n * @param collection - Name of the collection to enrich\n * @param config - Vertical enrichment configuration\n * @returns Statistics about the enrichment operation\n *\n * @example\n * ```typescript\n * // Field mapping\n * await pipeline.enrichVertical('docs', {\n * mapping: { 'tech': 'technology' }\n * });\n *\n * // Custom extractor\n * await pipeline.enrichVertical('docs', {\n * extractor: async (doc) => 'technology'\n * });\n *\n * // Automatic LLM\n * await pipeline.enrichVertical('docs', {\n * automatic: {\n * llm: myLLMClient,\n * fields: ['technology', 'finance']\n * }\n * });\n * ```\n */\n async enrichVertical(\n collection: string,\n config: VerticalEnrichmentConfig\n ): Promise<EnrichmentStats> {\n const startTime = Date.now();\n const stats: EnrichmentStats = {\n recordsProcessed: 0,\n recordsUpdated: 0,\n recordsSkipped: 0,\n timeMs: 0,\n errors: [],\n };\n\n try {\n // Determine which strategy to use\n if ('mapping' in config) {\n await this.enrichWithFieldMapping(collection, config, stats);\n } else if ('extractor' in config) {\n await this.enrichWithExtractor(collection, config, stats);\n } else if ('automatic' in config) {\n await this.enrichWithLLM(collection, config, stats);\n }\n } catch (error) {\n stats.errors?.push(\n `Pipeline error: ${error instanceof Error ? error.message : 'unknown error'}`\n );\n }\n\n stats.timeMs = Date.now() - startTime;\n return stats;\n }\n\n /**\n * Enrich records using field mapping strategy.\n *\n * Maps values from an existing field to vertical classifications.\n *\n * @param collection - Collection name\n * @param config - Field mapping configuration\n * @param stats - Statistics object to update\n */\n private async enrichWithFieldMapping(\n collection: string,\n config: FieldMappingConfig,\n stats: EnrichmentStats\n ): Promise<void> {\n const batchSize = config.batchSize || 100;\n\n for await (const batch of this.adapter.iterate(collection, {\n batchSize,\n filter: config.filter,\n })) {\n const updates: MetadataUpdate[] = [];\n\n for (const record of batch) {\n stats.recordsProcessed++;\n\n try {\n const vertical = this.applyFieldMapping(record, config.mapping);\n\n if (vertical) {\n updates.push({\n id: record.id,\n metadata: { vertical },\n });\n } else {\n stats.recordsSkipped++;\n }\n } catch (error) {\n stats.recordsSkipped++;\n stats.errors?.push(\n `Error mapping record ${record.id}: ${error instanceof Error ? error.message : 'unknown error'}`\n );\n }\n }\n\n // Apply updates if any\n if (updates.length > 0) {\n try {\n await this.adapter.updateMetadata(collection, updates);\n stats.recordsUpdated += updates.length;\n } catch (error) {\n stats.errors?.push(\n `Error updating batch: ${error instanceof Error ? error.message : 'unknown error'}`\n );\n }\n }\n }\n }\n\n /**\n * Apply field mapping to extract vertical from a record.\n *\n * @param record - Vector record\n * @param mapping - Field mapping configuration\n * @returns Vertical label or null if no match\n */\n private applyFieldMapping(\n record: VectorRecord,\n mapping: Record<string, string>\n ): string | null {\n const category = record.metadata?.category;\n\n if (category && typeof category === 'string' && category in mapping) {\n return mapping[category];\n }\n\n return null;\n }\n\n /**\n * Enrich records using custom extractor strategy.\n *\n * Calls the provided extractor function for each record.\n *\n * @param collection - Collection name\n * @param config - Extractor configuration\n * @param stats - Statistics object to update\n */\n private async enrichWithExtractor(\n collection: string,\n config: ExtractorConfig,\n stats: EnrichmentStats\n ): Promise<void> {\n const batchSize = config.batchSize || 100;\n\n for await (const batch of this.adapter.iterate(collection, {\n batchSize,\n filter: config.filter,\n })) {\n const updates: MetadataUpdate[] = [];\n\n for (const record of batch) {\n stats.recordsProcessed++;\n\n try {\n const vertical = await config.extractor(record);\n\n if (vertical) {\n updates.push({\n id: record.id,\n metadata: { vertical },\n });\n } else {\n stats.recordsSkipped++;\n }\n } catch (error) {\n stats.recordsSkipped++;\n stats.errors?.push(\n `Extractor error for record ${record.id}: ${error instanceof Error ? error.message : 'unknown error'}`\n );\n }\n }\n\n // Apply updates if any\n if (updates.length > 0) {\n try {\n await this.adapter.updateMetadata(collection, updates);\n stats.recordsUpdated += updates.length;\n } catch (error) {\n stats.errors?.push(\n `Error updating batch: ${error instanceof Error ? error.message : 'unknown error'}`\n );\n }\n }\n }\n }\n\n /**\n * Enrich records using automatic LLM strategy.\n *\n * Uses a language model to classify documents into verticals.\n *\n * @param collection - Collection name\n * @param config - Automatic extraction configuration\n * @param stats - Statistics object to update\n */\n private async enrichWithLLM(\n collection: string,\n config: AutomaticExtractionConfig,\n stats: EnrichmentStats\n ): Promise<void> {\n const batchSize = config.batchSize || 10;\n const { llm, fields, promptTemplate, textField } = config.automatic;\n const fieldName = textField || 'content';\n\n for await (const batch of this.adapter.iterate(collection, {\n batchSize,\n filter: config.filter,\n })) {\n const updates: MetadataUpdate[] = [];\n\n for (const record of batch) {\n stats.recordsProcessed++;\n\n try {\n const vertical = await this.extractWithLLM(\n record,\n llm,\n fields,\n fieldName,\n promptTemplate\n );\n\n if (vertical) {\n updates.push({\n id: record.id,\n metadata: { vertical },\n });\n } else {\n stats.recordsSkipped++;\n }\n } catch (error) {\n stats.recordsSkipped++;\n stats.errors?.push(\n `LLM extraction error for record ${record.id}: ${error instanceof Error ? error.message : 'unknown error'}`\n );\n }\n }\n\n // Apply updates if any\n if (updates.length > 0) {\n try {\n await this.adapter.updateMetadata(collection, updates);\n stats.recordsUpdated += updates.length;\n } catch (error) {\n stats.errors?.push(\n `Error updating batch: ${error instanceof Error ? error.message : 'unknown error'}`\n );\n }\n }\n }\n }\n\n /**\n * Extract vertical classification using LLM.\n *\n * @param record - Vector record\n * @param llm - LLM client\n * @param fields - Available vertical fields\n * @param textField - Field name containing text to classify\n * @param promptTemplate - Optional custom prompt template\n * @returns Vertical label\n */\n private async extractWithLLM(\n record: VectorRecord,\n llm: any,\n fields: string[],\n textField: string,\n promptTemplate?: string\n ): Promise<string> {\n const text = record.metadata?.[textField];\n\n if (!text || typeof text !== 'string') {\n throw new Error(`No text found in field '${textField}'`);\n }\n\n // Build prompt\n const prompt = promptTemplate\n ? promptTemplate\n .replace('{fields}', fields.join(', '))\n .replace('{text}', text)\n : `Classify the following text into one of these categories: ${fields.join(', ')}\\n\\nText: ${text}\\n\\nCategory:`;\n\n // Call LLM\n const result = await llm.generate(prompt);\n\n return result.trim();\n }\n\n /**\n * Enrich records with theme classifications.\n *\n * Uses a theme classifier to identify themes in text content and updates\n * record metadata with theme information. Supports single and multi-theme\n * classification with configurable confidence thresholds.\n *\n * @param collection - Name of the collection to enrich\n * @param config - Theme enrichment configuration\n * @returns Statistics about the enrichment operation\n *\n * @example\n * ```typescript\n * // Single theme classification\n * await pipeline.enrichThemes('docs', {\n * themes: ['technology', 'business', 'science'],\n * classifier: new KeywordThemeClassifier(),\n * confidenceThreshold: 0.7\n * });\n *\n * // Multi-theme classification\n * await pipeline.enrichThemes('docs', {\n * themes: ['technology', 'business', 'science'],\n * classifier: new LLMThemeClassifier(),\n * multiTheme: true,\n * confidenceThreshold: 0.5\n * });\n * ```\n */\n async enrichThemes(\n collection: string,\n config: ThemeEnrichmentConfig\n ): Promise<EnrichmentStats> {\n const startTime = Date.now();\n const stats: EnrichmentStats = {\n recordsProcessed: 0,\n recordsUpdated: 0,\n recordsSkipped: 0,\n timeMs: 0,\n errors: [],\n };\n\n try {\n await this.enrichWithThemeClassifier(collection, config, stats);\n } catch (error) {\n stats.errors?.push(\n `Pipeline error: ${error instanceof Error ? error.message : 'unknown error'}`\n );\n }\n\n stats.timeMs = Date.now() - startTime;\n return stats;\n }\n\n /**\n * Enrich records using theme classifier.\n *\n * @param collection - Collection name\n * @param config - Theme enrichment configuration\n * @param stats - Statistics object to update\n */\n private async enrichWithThemeClassifier(\n collection: string,\n config: ThemeEnrichmentConfig,\n stats: EnrichmentStats\n ): Promise<void> {\n const batchSize = config.batchSize || 100;\n const textField = config.textField || 'content';\n const confidenceThreshold = config.confidenceThreshold ?? 0.5;\n const multiTheme = config.multiTheme || false;\n\n for await (const batch of this.adapter.iterate(collection, {\n batchSize,\n filter: config.filter,\n })) {\n // Extract texts from batch\n const textsToClassify: string[] = [];\n const recordsToProcess: VectorRecord[] = [];\n\n for (const record of batch) {\n stats.recordsProcessed++;\n\n // Extract text from record\n const text = record.text || record.metadata?.[textField];\n\n if (!text || typeof text !== 'string' || text.trim() === '') {\n stats.recordsSkipped++;\n continue;\n }\n\n textsToClassify.push(text);\n recordsToProcess.push(record);\n }\n\n // Skip if no valid texts to classify\n if (textsToClassify.length === 0) {\n continue;\n }\n\n // Classify batch\n let classifications: any[];\n try {\n classifications = await config.classifier.classifyBatch(textsToClassify);\n } catch (error) {\n // If batch classification fails, try individual classification for each item\n stats.errors?.push(\n `Batch classification error, falling back to individual classification: ${error instanceof Error ? error.message : 'unknown error'}`\n );\n\n classifications = [];\n for (let i = 0; i < textsToClassify.length; i++) {\n try {\n const result = await config.classifier.classify(textsToClassify[i]);\n classifications.push(result);\n } catch (individualError) {\n // Push null to maintain index alignment\n classifications.push(null);\n stats.errors?.push(\n `Classification error for record ${recordsToProcess[i].id}: ${individualError instanceof Error ? individualError.message : 'unknown error'}`\n );\n }\n }\n }\n\n // Build updates\n const updates: MetadataUpdate[] = [];\n\n for (let i = 0; i < recordsToProcess.length; i++) {\n const record = recordsToProcess[i];\n const classification = classifications[i];\n\n try {\n // Check if classification is valid (might be error object or undefined)\n if (!classification || typeof classification !== 'object') {\n stats.recordsSkipped++;\n stats.errors?.push(\n `Invalid classification for record ${record.id}`\n );\n continue;\n }\n\n // Skip if below confidence threshold\n if (classification.confidence < confidenceThreshold) {\n stats.recordsSkipped++;\n continue;\n }\n\n // Build metadata update\n const metadata: Record<string, any> = {\n __h_theme: classification.theme,\n __h_theme_confidence: classification.confidence,\n };\n\n // Handle multi-theme mode\n if (multiTheme && classification.allScores) {\n const themes = Object.entries(classification.allScores)\n .filter(([_, score]) => (score as number) >= confidenceThreshold)\n .sort(([_, a], [__, b]) => (b as number) - (a as number))\n .map(([theme, _]) => theme);\n\n if (themes.length > 0) {\n metadata.__h_themes = themes;\n }\n }\n\n updates.push({\n id: record.id,\n metadata,\n });\n } catch (error) {\n stats.recordsSkipped++;\n stats.errors?.push(\n `Error processing record ${record.id}: ${error instanceof Error ? error.message : 'unknown error'}`\n );\n }\n }\n\n // Apply updates if any\n if (updates.length > 0) {\n try {\n await this.adapter.updateMetadata(collection, updates);\n stats.recordsUpdated += updates.length;\n } catch (error) {\n stats.errors?.push(\n `Error updating batch: ${error instanceof Error ? error.message : 'unknown error'}`\n );\n }\n }\n\n // Call progress callback if provided\n if (config.onProgress) {\n config.onProgress(stats);\n }\n }\n }\n\n /**\n * Enrich records with section structure.\n *\n * Extracts section metadata from documents using either existing field mappings\n * or automatic detection strategies (markdown, HTML, or pattern-based).\n *\n * @param collection - Name of the collection to enrich\n * @param config - Section enrichment configuration\n * @returns Statistics about the enrichment operation\n *\n * @example\n * ```typescript\n * // Use existing section field\n * await pipeline.enrichSections('docs', {\n * existingField: 'section_path'\n * });\n *\n * // Auto-detect sections\n * await pipeline.enrichSections('docs', {\n * autoDetect: true\n * });\n * ```\n */\n async enrichSections(\n collection: string,\n config: SectionEnrichmentConfig\n ): Promise<EnrichmentStats> {\n const startTime = Date.now();\n const stats: EnrichmentStats = {\n recordsProcessed: 0,\n recordsUpdated: 0,\n recordsSkipped: 0,\n timeMs: 0,\n errors: [],\n };\n\n try {\n await this.enrichWithSectionDetection(collection, config, stats);\n } catch (error) {\n stats.errors?.push(\n `Pipeline error: ${error instanceof Error ? error.message : 'unknown error'}`\n );\n }\n\n stats.timeMs = Date.now() - startTime;\n return stats;\n }\n\n /**\n * Enrich records with all enrichment types.\n *\n * Runs vertical, theme, and section enrichment sequentially with shared\n * configuration. Global filters and batch sizes apply to all operations.\n *\n * @param collection - Name of the collection to enrich\n * @param config - Combined enrichment configuration\n * @returns Statistics about the enrichment operation\n *\n * @example\n * ```typescript\n * await pipeline.enrichAll('docs', {\n * vertical: { mapping: { tech: 'technology' } },\n * themes: { themes: ['innovation'], classifier },\n * sections: { autoDetect: true },\n * filter: { field: 'status', op: 'eq', value: 'pending' },\n * batchSize: 50\n * });\n * ```\n */\n async enrichAll(\n collection: string,\n config: EnrichAllConfig\n ): Promise<EnrichmentStats> {\n const startTime = Date.now();\n const aggregateStats: EnrichmentStats = {\n recordsProcessed: 0,\n recordsUpdated: 0,\n recordsSkipped: 0,\n timeMs: 0,\n errors: [],\n };\n\n try {\n // Run vertical enrichment if configured\n if (config.vertical) {\n const verticalConfig = this.applyGlobalConfig(config.vertical, config);\n const stats = await this.enrichVertical(collection, verticalConfig);\n this.mergeStats(aggregateStats, stats);\n\n // Call progress callback if provided\n if (config.onProgress) {\n config.onProgress(aggregateStats);\n }\n }\n\n // Run theme enrichment if configured\n if (config.themes) {\n const themesConfig = this.applyGlobalConfig(config.themes, config);\n const stats = await this.enrichThemes(collection, themesConfig);\n this.mergeStats(aggregateStats, stats);\n\n // Call progress callback if provided\n if (config.onProgress) {\n config.onProgress(aggregateStats);\n }\n }\n\n // Run section enrichment if configured\n if (config.sections) {\n const sectionsConfig = this.applyGlobalConfig(config.sections, config);\n const stats = await this.enrichSections(collection, sectionsConfig);\n this.mergeStats(aggregateStats, stats);\n\n // Call progress callback if provided\n if (config.onProgress) {\n config.onProgress(aggregateStats);\n }\n }\n } catch (error) {\n aggregateStats.errors?.push(\n `Pipeline error: ${error instanceof Error ? error.message : 'unknown error'}`\n );\n }\n\n aggregateStats.timeMs = Date.now() - startTime;\n return aggregateStats;\n }\n\n /**\n * Apply global configuration to individual enrichment configs.\n *\n * @param individualConfig - Configuration for a specific enrichment type\n * @param globalConfig - Global configuration\n * @returns Merged configuration\n */\n private applyGlobalConfig<T extends { filter?: UniversalFilter; batchSize?: number }>(\n individualConfig: T,\n globalConfig: EnrichAllConfig\n ): T {\n const merged = { ...individualConfig };\n\n // Apply global filter if not overridden\n if (globalConfig.filter && !merged.filter) {\n merged.filter = globalConfig.filter;\n }\n\n // Apply global batch size if not overridden\n if (globalConfig.batchSize && !merged.batchSize) {\n merged.batchSize = globalConfig.batchSize;\n }\n\n return merged;\n }\n\n /**\n * Merge stats from an enrichment operation into aggregate stats.\n *\n * @param aggregate - Aggregate stats to update\n * @param stats - Stats from a single operation\n */\n private mergeStats(aggregate: EnrichmentStats, stats: EnrichmentStats): void {\n aggregate.recordsProcessed += stats.recordsProcessed;\n aggregate.recordsUpdated += stats.recordsUpdated;\n aggregate.recordsSkipped += stats.recordsSkipped;\n\n // Merge errors\n if (stats.errors && stats.errors.length > 0) {\n if (!aggregate.errors) {\n aggregate.errors = [];\n }\n aggregate.errors.push(...stats.errors);\n }\n }\n\n /**\n * Enrich records using section detection.\n *\n * @param collection - Collection name\n * @param config - Section enrichment configuration\n * @param stats - Statistics object to update\n */\n private async enrichWithSectionDetection(\n collection: string,\n config: SectionEnrichmentConfig,\n stats: EnrichmentStats\n ): Promise<void> {\n const batchSize = config.batchSize || 100;\n\n for await (const batch of this.adapter.iterate(collection, {\n batchSize,\n filter: config.filter,\n })) {\n const updates: MetadataUpdate[] = [];\n\n for (const record of batch) {\n stats.recordsProcessed++;\n\n try {\n let sectionMetadata: {\n path?: string;\n level: number;\n title: string;\n } | null = null;\n\n // Use existing field if provided\n if (config.existingField) {\n sectionMetadata = this.extractSectionMetadata(\n record.metadata?.[config.existingField]\n );\n }\n // Otherwise, auto-detect sections\n else if (config.autoDetect) {\n const text = record.text || record.metadata?.content || '';\n if (typeof text === 'string') {\n sectionMetadata = this.detectSections(text);\n }\n }\n\n if (sectionMetadata) {\n const metadata: Record<string, any> = {\n __h_section_level: sectionMetadata.level,\n __h_section_title: sectionMetadata.title,\n };\n\n if (sectionMetadata.path) {\n metadata.__h_section_path = sectionMetadata.path;\n }\n\n updates.push({\n id: record.id,\n metadata,\n });\n } else {\n stats.recordsSkipped++;\n }\n } catch (error) {\n stats.recordsSkipped++;\n stats.errors?.push(\n `Error processing record ${record.id}: ${error instanceof Error ? error.message : 'unknown error'}`\n );\n }\n }\n\n // Apply updates if any\n if (updates.length > 0) {\n try {\n await this.adapter.updateMetadata(collection, updates);\n stats.recordsUpdated += updates.length;\n } catch (error) {\n stats.errors?.push(\n `Error updating batch: ${error instanceof Error ? error.message : 'unknown error'}`\n );\n }\n }\n }\n }\n\n /**\n * Extract section metadata from an existing field value.\n *\n * @param sectionPath - Section path string (e.g., \"introduction/overview\")\n * @returns Section metadata or null\n */\n private extractSectionMetadata(\n sectionPath: any\n ): { path: string; level: number; title: string } | null {\n if (!sectionPath || typeof sectionPath !== 'string') {\n return null;\n }\n\n const parts = sectionPath.split('/').filter(p => p.trim() !== '');\n if (parts.length === 0) {\n return null;\n }\n\n return {\n path: sectionPath,\n level: parts.length,\n title: parts[parts.length - 1],\n };\n }\n\n /**\n * Detect sections in text using heuristics.\n *\n * @param text - Text content to analyze\n * @returns Section metadata or null\n */\n private detectSections(\n text: string\n ): { level: number; title: string } | null {\n // Try markdown detection first\n const markdown = this.detectMarkdownSections(text);\n if (markdown) return markdown;\n\n // Try HTML detection\n const html = this.detectHtmlSections(text);\n if (html) return html;\n\n // Try pattern detection\n const pattern = this.detectPatternSections(text);\n if (pattern) return pattern;\n\n // Fallback: mark as unsectioned\n return { level: 0, title: 'unsectioned' };\n }\n\n /**\n * Detect markdown headers (# Header).\n *\n * @param text - Text content\n * @returns Section metadata or null\n */\n private detectMarkdownSections(\n text: string\n ): { level: number; title: string } | null {\n const match = text.match(/^(#{1,6})\\s+(.+)$/m);\n if (match) {\n const level = match[1].length;\n const title = match[2].trim();\n return { level, title };\n }\n return null;\n }\n\n /**\n * Detect HTML headers (<h1>Header</h1>).\n *\n * @param text - Text content\n * @returns Section metadata or null\n */\n private detectHtmlSections(\n text: string\n ): { level: number; title: string } | null {\n const match = text.match(/<h([1-6])>(.+?)<\\/h[1-6]>/i);\n if (match) {\n const level = parseInt(match[1], 10);\n const title = match[2].trim();\n return { level, title };\n }\n return null;\n }\n\n /**\n * Detect sections using common patterns (SECTION: Title).\n *\n * @param text - Text content\n * @returns Section metadata or null\n */\n private detectPatternSections(\n text: string\n ): { level: number; title: string } | null {\n const match = text.match(/^SECTION:\\s+(.+)$/m);\n if (match) {\n const title = match[1].trim();\n return { level: 1, title };\n }\n return null;\n }\n}\n","// packages/core/src/ingestion/chunkers/text-chunker.ts\nimport type { TextChunk, ChunkConfig } from '../types';\n\n/**\n * Abstract interface for text chunking strategies.\n * Implementations split text into chunks with different algorithms.\n */\nexport interface TextChunker {\n /**\n * Chunk text into smaller pieces.\n * @param text - Text to chunk\n * @param config - Optional chunking configuration\n * @returns Array of text chunks with position metadata\n */\n chunk(text: string, config?: ChunkConfig): TextChunk[];\n}\n\n/**\n * Default chunk size in tokens (approximate).\n */\nexport const DEFAULT_CHUNK_SIZE = 500;\n\n/**\n * Default chunk overlap in tokens (approximate).\n */\nexport const DEFAULT_CHUNK_OVERLAP = 50;\n\n/**\n * Estimate token count from character count.\n * Simple heuristic: 1 token ≈ 4 characters for English text.\n */\nexport function estimateTokens(text: string): number {\n return Math.ceil(text.length / 4);\n}\n\n/**\n * Estimate character count from token count.\n */\nexport function estimateChars(tokens: number): number {\n return tokens * 4;\n}\n","// packages/core/src/ingestion/chunkers/recursive-chunker.ts\nimport type { TextChunker } from './text-chunker';\nimport type { TextChunk, ChunkConfig } from '../types';\nimport {\n DEFAULT_CHUNK_SIZE,\n DEFAULT_CHUNK_OVERLAP,\n estimateChars\n} from './text-chunker';\n\n/**\n * Recursive text chunker that tries different separators hierarchically.\n * Tries to split by paragraphs first, then sentences, then words, then characters.\n */\nexport class RecursiveChunker implements TextChunker {\n private readonly separators = [\n '\\n\\n', // Paragraphs (double newline)\n '\\n', // Lines (single newline)\n '. ', // Sentences (period + space)\n ' ', // Words (space)\n '' // Characters (last resort)\n ];\n\n chunk(text: string, config?: ChunkConfig): TextChunk[] {\n if (!text) return [];\n\n const chunkSize = config?.chunkSize ?? DEFAULT_CHUNK_SIZE;\n const chunkOverlap = config?.chunkOverlap ?? DEFAULT_CHUNK_OVERLAP;\n\n const maxChars = estimateChars(chunkSize);\n const overlapChars = estimateChars(chunkOverlap);\n\n if (text.length <= maxChars) {\n return [{\n text,\n index: 0,\n metadata: {\n source: '',\n chunkIndex: 0,\n totalChunks: 1,\n startChar: 0,\n endChar: text.length\n }\n }];\n }\n\n const splits = this.recursiveSplit(text, maxChars, 0);\n const chunks = this.addOverlap(splits, overlapChars);\n\n return chunks.map((chunk, index) => ({\n text: chunk.text,\n index,\n metadata: {\n source: '', // Will be set by pipeline\n chunkIndex: index,\n totalChunks: chunks.length,\n startChar: chunk.start,\n endChar: chunk.end\n }\n }));\n }\n\n private recursiveSplit(\n text: string,\n maxChars: number,\n separatorIndex: number\n ): Array<{ text: string; start: number; end: number }> {\n if (text.length <= maxChars) {\n return [{ text, start: 0, end: text.length }];\n }\n\n if (separatorIndex >= this.separators.length) {\n // Last resort: split by character\n const result: Array<{ text: string; start: number; end: number }> = [];\n for (let i = 0; i < text.length; i += maxChars) {\n result.push({\n text: text.slice(i, i + maxChars),\n start: i,\n end: Math.min(i + maxChars, text.length)\n });\n }\n return result;\n }\n\n const separator = this.separators[separatorIndex];\n const parts = separator ? text.split(separator) : [text];\n\n if (parts.length <= 1) {\n // Separator didn't split, try next separator\n return this.recursiveSplit(text, maxChars, separatorIndex + 1);\n }\n\n // Group parts into chunks that fit within maxChars\n const result: Array<{ text: string; start: number; end: number }> = [];\n let currentParts: string[] = [];\n let currentStart = 0;\n let runningOffset = 0;\n\n for (let i = 0; i < parts.length; i++) {\n const part = parts[i];\n const combined = currentParts.length > 0\n ? [...currentParts, part].join(separator)\n : part;\n\n if (combined.length <= maxChars) {\n if (currentParts.length === 0) {\n currentStart = runningOffset;\n }\n currentParts.push(part);\n } else {\n // Save current chunk if non-empty\n if (currentParts.length > 0) {\n const chunkText = currentParts.join(separator);\n result.push({\n text: chunkText,\n start: currentStart,\n end: currentStart + chunkText.length\n });\n }\n // Start new chunk with current part\n currentStart = runningOffset;\n // If single part is too large, recursively split it\n if (part.length > maxChars) {\n const subSplits = this.recursiveSplit(part, maxChars, separatorIndex + 1);\n for (const sub of subSplits) {\n result.push({\n text: sub.text,\n start: currentStart + sub.start,\n end: currentStart + sub.end\n });\n }\n currentParts = [];\n } else {\n currentParts = [part];\n }\n }\n runningOffset += part.length + (i < parts.length - 1 ? separator.length : 0);\n }\n\n // Save remaining chunk\n if (currentParts.length > 0) {\n const chunkText = currentParts.join(separator);\n result.push({\n text: chunkText,\n start: currentStart,\n end: currentStart + chunkText.length\n });\n }\n\n return result;\n }\n\n private addOverlap(\n chunks: Array<{ text: string; start: number; end: number }>,\n overlapChars: number\n ): Array<{ text: string; start: number; end: number }> {\n if (overlapChars === 0 || chunks.length <= 1) {\n return chunks;\n }\n\n const result = [chunks[0]];\n\n for (let i = 1; i < chunks.length; i++) {\n const prevChunk = chunks[i - 1];\n const currChunk = chunks[i];\n\n // Get last N chars from previous chunk\n const overlapText = prevChunk.text.slice(-overlapChars);\n\n result.push({\n text: overlapText + currChunk.text,\n start: Math.max(0, prevChunk.end - overlapChars),\n end: currChunk.end\n });\n }\n\n return result;\n }\n}\n","// packages/core/src/ingestion/ingestion-pipeline.ts\nimport type { VectorDBAdapter } from '../adapters/vector-db-adapter';\nimport type { Embedder } from '../embedders/embedder';\nimport type { VectorRecord } from '../types/vector-record';\nimport type { LoaderRegistry } from './loaders/loader-registry';\nimport type { TextChunker } from './chunkers/text-chunker';\nimport type { Document, IngestionConfig, IngestionStats, TextChunk } from './types';\nimport { RecursiveChunker } from './chunkers/recursive-chunker';\nimport { VerticalFields } from '../metadata/constants';\nimport * as path from 'path';\n\n/**\n * Main ingestion pipeline orchestrator.\n * Coordinates loading, chunking, embedding, and upserting documents.\n */\nexport class IngestionPipeline {\n private defaultChunker: TextChunker;\n\n constructor(\n private adapter: VectorDBAdapter,\n private embedder: Embedder,\n private loaderRegistry: LoaderRegistry,\n chunker?: TextChunker\n ) {\n this.defaultChunker = chunker || new RecursiveChunker();\n }\n\n /**\n * Ingest documents into a vector database collection.\n * @param sources - File paths\n * @param collection - Target collection name\n * @param config - Optional ingestion configuration\n * @returns Statistics about the ingestion operation\n */\n async ingest(\n sources: string | string[],\n collection: string,\n config?: IngestionConfig\n ): Promise<IngestionStats> {\n const startTime = Date.now();\n const sourceArray = Array.isArray(sources) ? sources : [sources];\n\n const stats: IngestionStats = {\n documentsProcessed: 0,\n documentsSucceeded: 0,\n documentsFailed: 0,\n chunksCreated: 0,\n chunksUpserted: 0,\n timeMs: 0,\n errors: []\n };\n\n const totalDocuments = sourceArray.length;\n\n for (const source of sourceArray) {\n // Report loading stage\n config?.onProgress?.({\n stage: 'loading',\n documentsProcessed: stats.documentsProcessed,\n totalDocuments,\n chunksProcessed: stats.chunksUpserted,\n currentDocument: source\n });\n\n try {\n await this.ingestFile(source, collection, config, stats, totalDocuments);\n stats.documentsSucceeded++;\n } catch (error) {\n stats.documentsFailed++;\n stats.errors!.push({\n source,\n stage: 'load',\n error: error as Error\n });\n }\n stats.documentsProcessed++;\n }\n\n stats.timeMs = Date.now() - startTime;\n return stats;\n }\n\n private async ingestFile(\n filePath: string,\n collection: string,\n config: IngestionConfig | undefined,\n stats: IngestionStats,\n totalDocuments: number\n ): Promise<void> {\n // Load document\n const doc = await this.loaderRegistry.load(filePath);\n config?.onDocumentLoaded?.(doc);\n\n // Report chunking stage\n config?.onProgress?.({\n stage: 'chunking',\n documentsProcessed: stats.documentsProcessed,\n totalDocuments,\n chunksProcessed: stats.chunksUpserted,\n currentDocument: filePath\n });\n\n // Chunk text\n const chunker = config?.chunker || this.defaultChunker;\n const chunks = chunker.chunk(doc.text, {\n chunkSize: config?.chunkSize,\n chunkOverlap: config?.chunkOverlap\n });\n\n // Update source in chunk metadata\n for (const chunk of chunks) {\n chunk.metadata.source = doc.source;\n }\n\n stats.chunksCreated += chunks.length;\n config?.onChunksCreated?.(chunks);\n\n // Report embedding stage\n config?.onProgress?.({\n stage: 'embedding',\n documentsProcessed: stats.documentsProcessed,\n totalDocuments,\n chunksProcessed: stats.chunksUpserted,\n totalChunks: stats.chunksCreated,\n currentDocument: filePath\n });\n\n // Embed chunks\n const texts = chunks.map((c: TextChunk) => c.text);\n const embeddings = await this.embedder.embedBatch(texts);\n\n // Build vector records with metadata\n const records: VectorRecord[] = chunks.map((chunk: TextChunk, i: number) => {\n const metadata = this.buildMetadata(doc, chunk, config);\n\n return {\n id: `${path.basename(doc.source)}:${chunk.index}`,\n embedding: embeddings[i],\n text: chunk.text,\n metadata\n };\n });\n\n // Report upserting stage\n config?.onProgress?.({\n stage: 'upserting',\n documentsProcessed: stats.documentsProcessed,\n totalDocuments,\n chunksProcessed: stats.chunksUpserted,\n totalChunks: stats.chunksCreated,\n currentDocument: filePath\n });\n\n // Upsert to database in batches\n const batchSize = config?.batchSize || 100;\n for (let i = 0; i < records.length; i += batchSize) {\n const batch = records.slice(i, i + batchSize);\n await this.adapter.upsert(collection, batch);\n stats.chunksUpserted += batch.length;\n }\n }\n\n private buildMetadata(\n doc: Document,\n chunk: TextChunk,\n config: IngestionConfig | undefined\n ): Record<string, any> {\n // Auto-extract vertical metadata\n const basename = path.basename(doc.source, path.extname(doc.source));\n const dirname = path.dirname(doc.source);\n\n const autoMetadata: Record<string, any> = {\n [VerticalFields.SOURCE]: doc.source,\n [VerticalFields.DOC_TYPE]: doc.type,\n [VerticalFields.DOC_ID]: basename,\n [VerticalFields.PARTITION]: dirname\n };\n\n // Apply custom extractor\n const extractedMetadata = config?.metadataExtractor?.(doc) || {};\n\n // Apply user metadata\n const userMetadata = config?.metadata || {};\n\n // Add chunk metadata\n const chunkMetadata = {\n chunkIndex: chunk.metadata.chunkIndex,\n totalChunks: chunk.metadata.totalChunks,\n startChar: chunk.metadata.startChar,\n endChar: chunk.metadata.endChar\n };\n\n // Merge all metadata (user overrides auto-extracted)\n return {\n ...autoMetadata,\n ...extractedMetadata,\n ...userMetadata,\n ...chunkMetadata\n };\n }\n}\n","// packages/core/src/ingestion/loaders/text-loader.ts\nimport * as fs from 'fs/promises';\nimport * as path from 'path';\nimport type { DocumentLoader } from './document-loader';\nimport type { Document } from '../types';\n\n/**\n * Loader for plain text files (.txt, .md).\n * No external dependencies, uses Node.js built-in fs.\n */\nexport class TextLoader implements DocumentLoader {\n canHandle(filePath: string): boolean {\n return /\\.(txt|md)$/i.test(filePath);\n }\n\n async load(filePath: string): Promise<Document> {\n const text = await fs.readFile(filePath, 'utf-8');\n const type = path.extname(filePath).slice(1).toLowerCase();\n const stats = await fs.stat(filePath);\n const extension = path.extname(filePath);\n\n return {\n text,\n source: filePath,\n type,\n metadata: {\n size: stats.size,\n extension\n }\n };\n }\n}\n","// packages/core/src/ingestion/loaders/pdf-loader.ts\nimport * as fs from 'fs/promises';\nimport pdfParse from 'pdf-parse';\nimport type { DocumentLoader } from './document-loader';\nimport type { Document } from '../types';\n\n/**\n * Loader for PDF files using pdf-parse library.\n * Extracts text from all pages and includes PDF metadata.\n */\nexport class PDFLoader implements DocumentLoader {\n canHandle(filePath: string): boolean {\n return /\\.pdf$/i.test(filePath);\n }\n\n async load(filePath: string): Promise<Document> {\n const dataBuffer = await fs.readFile(filePath);\n const pdfData = await pdfParse(dataBuffer);\n\n return {\n text: pdfData.text,\n source: filePath,\n type: 'pdf',\n metadata: {\n pages: pdfData.numpages,\n info: pdfData.info\n }\n };\n }\n}\n","// packages/core/src/ingestion/loaders/docx-loader.ts\nimport mammoth from 'mammoth';\nimport type { DocumentLoader } from './document-loader';\nimport type { Document } from '../types';\n\n/**\n * Loader for DOCX files using mammoth library.\n * Converts DOCX to plain text, preserves paragraph structure.\n */\nexport class DOCXLoader implements DocumentLoader {\n canHandle(filePath: string): boolean {\n return /\\.docx$/i.test(filePath);\n }\n\n async load(filePath: string): Promise<Document> {\n const result = await mammoth.extractRawText({ path: filePath });\n\n return {\n text: result.value,\n source: filePath,\n type: 'docx',\n metadata: {\n warnings: result.messages // Conversion warnings from mammoth\n }\n };\n }\n}\n","// packages/core/src/ingestion/loaders/html-loader.ts\nimport * as fs from 'fs/promises';\nimport * as cheerio from 'cheerio';\nimport type { DocumentLoader } from './document-loader';\nimport type { Document } from '../types';\n\n/**\n * Loader for HTML files using cheerio library.\n * Strips tags, extracts visible text, removes scripts/styles.\n */\nexport class HTMLLoader implements DocumentLoader {\n canHandle(filePath: string): boolean {\n return /\\.html?$/i.test(filePath);\n }\n\n async load(filePath: string): Promise<Document> {\n const html = await fs.readFile(filePath, 'utf-8');\n const $ = cheerio.load(html);\n\n // Remove script, style, nav, footer\n $('script, style, nav, footer').remove();\n\n // Extract text from body\n const text = $('body').text()\n .replace(/\\s+/g, ' ') // Normalize whitespace\n .trim();\n\n return {\n text,\n source: filePath,\n type: 'html',\n metadata: {\n title: $('title').text() || undefined,\n description: $('meta[name=\"description\"]').attr('content') || undefined\n }\n };\n }\n}\n","// packages/core/src/ingestion/loaders/loader-registry.ts\nimport type { DocumentLoader } from './document-loader';\nimport type { Document } from '../types';\nimport { TextLoader } from './text-loader';\nimport { PDFLoader } from './pdf-loader';\nimport { DOCXLoader } from './docx-loader';\nimport { HTMLLoader } from './html-loader';\n\n/**\n * Registry for document loaders.\n * Manages loaders and routes files to correct loader based on extension.\n */\nexport class LoaderRegistry {\n private loaders: DocumentLoader[] = [];\n\n constructor() {\n // Register built-in loaders\n this.register(new TextLoader());\n this.register(new PDFLoader());\n this.register(new DOCXLoader());\n this.register(new HTMLLoader());\n }\n\n /**\n * Register a custom document loader.\n * @param loader - Loader to register\n */\n register(loader: DocumentLoader): void {\n this.loaders.push(loader);\n }\n\n /**\n * Check if any loader can handle this file.\n * @param filePath - Path to check\n * @returns true if a loader exists for this file type\n */\n canLoad(filePath: string): boolean {\n return this.loaders.some(l => l.canHandle(filePath));\n }\n\n /**\n * Load a document using the appropriate loader.\n * @param filePath - Path to the file to load\n * @returns Promise resolving to Document\n * @throws Error if no loader found for file type\n */\n async load(filePath: string): Promise<Document> {\n const loader = this.loaders.find(l => l.canHandle(filePath));\n if (!loader) {\n throw new Error(`No loader found for file: ${filePath}`);\n }\n return loader.load(filePath);\n }\n}\n","// packages/core/src/ingestion/chunkers/fixed-chunker.ts\nimport type { TextChunker } from './text-chunker';\nimport type { TextChunk, ChunkConfig } from '../types';\nimport {\n DEFAULT_CHUNK_SIZE,\n DEFAULT_CHUNK_OVERLAP,\n estimateChars\n} from './text-chunker';\n\n/**\n * Fixed-size text chunker that splits at exact character boundaries.\n * Fast and predictable, but may split mid-sentence or mid-word.\n */\nexport class FixedChunker implements TextChunker {\n chunk(text: string, config?: ChunkConfig): TextChunk[] {\n if (!text) return [];\n\n const chunkSize = config?.chunkSize ?? DEFAULT_CHUNK_SIZE;\n const chunkOverlap = config?.chunkOverlap ?? DEFAULT_CHUNK_OVERLAP;\n\n const maxChars = estimateChars(chunkSize);\n const overlapChars = estimateChars(chunkOverlap);\n const step = maxChars - overlapChars;\n\n if (text.length <= maxChars) {\n return [{\n text,\n index: 0,\n metadata: {\n source: '',\n chunkIndex: 0,\n totalChunks: 1,\n startChar: 0,\n endChar: text.length\n }\n }];\n }\n\n const chunks: TextChunk[] = [];\n let position = 0;\n\n while (position < text.length) {\n const end = Math.min(text.length, position + maxChars);\n const chunkText = text.slice(position, end);\n\n chunks.push({\n text: chunkText,\n index: chunks.length,\n metadata: {\n source: '',\n chunkIndex: chunks.length,\n totalChunks: 0, // Updated after loop\n startChar: position,\n endChar: end\n }\n });\n\n position += step;\n // Prevent infinite loop if step is 0\n if (step <= 0) break;\n }\n\n // Update totalChunks\n for (const chunk of chunks) {\n chunk.metadata.totalChunks = chunks.length;\n }\n\n return chunks;\n }\n}\n","// packages/core/src/ingestion/chunkers/sentence-chunker.ts\nimport type { TextChunker } from './text-chunker';\nimport type { TextChunk, ChunkConfig } from '../types';\nimport {\n DEFAULT_CHUNK_SIZE,\n DEFAULT_CHUNK_OVERLAP,\n estimateChars\n} from './text-chunker';\n\n/**\n * Sentence-aware chunker that splits on sentence boundaries.\n * Uses a simple regex-based sentence splitter for portability.\n */\nexport class SentenceChunker implements TextChunker {\n chunk(text: string, config?: ChunkConfig): TextChunk[] {\n if (!text) return [];\n\n const chunkSize = config?.chunkSize ?? DEFAULT_CHUNK_SIZE;\n const chunkOverlap = config?.chunkOverlap ?? DEFAULT_CHUNK_OVERLAP;\n\n const maxChars = estimateChars(chunkSize);\n const overlapChars = estimateChars(chunkOverlap);\n\n // Split into sentences using regex\n const sentences = this.splitSentences(text);\n\n if (sentences.length === 0) {\n return [{\n text,\n index: 0,\n metadata: {\n source: '',\n chunkIndex: 0,\n totalChunks: 1,\n startChar: 0,\n endChar: text.length\n }\n }];\n }\n\n // Group sentences into chunks\n const rawChunks: Array<{ text: string; start: number; end: number }> = [];\n let currentSentences: string[] = [];\n let currentStart = 0;\n\n for (const sentence of sentences) {\n const combined = currentSentences.length > 0\n ? [...currentSentences, sentence].join(' ')\n : sentence;\n\n if (currentSentences.length === 0) {\n currentSentences = [sentence];\n currentStart = text.indexOf(sentence);\n } else if (combined.length <= maxChars) {\n currentSentences.push(sentence);\n } else {\n // Save current chunk\n const chunkText = currentSentences.join(' ');\n rawChunks.push({\n text: chunkText,\n start: currentStart,\n end: currentStart + chunkText.length\n });\n\n // Start new chunk\n currentSentences = [sentence];\n currentStart = text.indexOf(sentence, currentStart + 1);\n if (currentStart === -1) currentStart = 0;\n }\n }\n\n // Save last chunk\n if (currentSentences.length > 0) {\n const chunkText = currentSentences.join(' ');\n rawChunks.push({\n text: chunkText,\n start: currentStart,\n end: currentStart + chunkText.length\n });\n }\n\n // Add overlap\n const withOverlap = this.addSentenceOverlap(rawChunks, overlapChars);\n\n return withOverlap.map((chunk, index) => ({\n text: chunk.text,\n index,\n metadata: {\n source: '',\n chunkIndex: index,\n totalChunks: withOverlap.length,\n startChar: chunk.start,\n endChar: chunk.end\n }\n }));\n }\n\n private splitSentences(text: string): string[] {\n // Split on sentence-ending punctuation followed by space or end of string\n const parts = text.match(/[^.!?]*[.!?]+(?:\\s|$)|[^.!?]+$/g);\n if (!parts) return [text];\n return parts.map(s => s.trim()).filter(s => s.length > 0);\n }\n\n private addSentenceOverlap(\n chunks: Array<{ text: string; start: number; end: number }>,\n overlapChars: number\n ): Array<{ text: string; start: number; end: number }> {\n if (overlapChars === 0 || chunks.length <= 1) {\n return chunks;\n }\n\n const result = [chunks[0]];\n\n for (let i = 1; i < chunks.length; i++) {\n const prevChunk = chunks[i - 1];\n const currChunk = chunks[i];\n\n // Find last sentence from previous chunk to use as overlap\n const prevSentences = this.splitSentences(prevChunk.text);\n const lastSentence = prevSentences[prevSentences.length - 1] || '';\n\n if (lastSentence && lastSentence.length <= overlapChars) {\n result.push({\n text: lastSentence + ' ' + currChunk.text,\n start: Math.max(0, prevChunk.end - lastSentence.length),\n end: currChunk.end\n });\n } else {\n result.push(currChunk);\n }\n }\n\n return result;\n }\n}\n","// packages/core/src/client/rag-client.ts\nimport type { VectorDBAdapter } from '../adapters/vector-db-adapter';\nimport type { Embedder } from '../embedders/embedder';\nimport type { LLMClient } from '../llm/llm-client';\nimport type { DistanceMetric } from '../adapters/types';\nimport type { UniversalFilter } from '../filters/types';\nimport type { RetrievalResult } from '../query/types';\nimport type { IngestionConfig, IngestionStats } from '../ingestion/types';\nimport type { EnrichAllConfig, EnrichmentStats } from '../enrichment/types';\nimport type { RAGClientConfig, RetrieveOptions, QueryOptions, RAGResponse } from './types';\nimport { RAGQueryComposer } from '../query/rag-query-composer';\nimport { IngestionPipeline } from '../ingestion/ingestion-pipeline';\nimport { EnrichmentPipeline } from '../enrichment/enrichment-pipeline';\nimport { LoaderRegistry } from '../ingestion/loaders/loader-registry';\nimport { VerticalFields } from '../metadata/constants';\nimport { HorizontalFields } from '../metadata/constants';\n\nconst DEFAULT_TOP_K = 10;\n\nconst DEFAULT_RAG_SYSTEM_PROMPT =\n 'You are a helpful assistant. Answer the question based on the provided context. ' +\n 'If the context doesn\\'t contain enough information, say so.';\n\n/**\n * RAGClient - Unified facade for all VectorORM operations.\n *\n * Ties together adapter, embedder, LLM, ingestion, enrichment, and query\n * into a single developer-facing API.\n *\n * @example\n * ```typescript\n * const client = new RAGClient({\n * adapter: new ChromaAdapter(),\n * embedder: new OpenAIEmbedder(),\n * llm: new OpenAIClient(),\n * defaultCollection: 'my-docs'\n * });\n *\n * // Ingest documents\n * await client.ingest(['docs/*.pdf']);\n *\n * // Retrieve\n * const result = await client.retrieve('pricing info');\n *\n * // Full RAG query\n * const response = await client.query('What are the pricing terms?');\n * console.log(response.answer);\n * ```\n */\nexport class RAGClient {\n private readonly adapter: VectorDBAdapter;\n private readonly embedder: Embedder;\n private readonly llm?: LLMClient;\n private readonly defaultCollection?: string;\n private readonly defaultTopK: number;\n\n private readonly queryComposer: RAGQueryComposer;\n private readonly ingestionPipeline: IngestionPipeline;\n private readonly enrichmentPipeline: EnrichmentPipeline;\n\n constructor(config: RAGClientConfig) {\n this.adapter = config.adapter;\n this.embedder = config.embedder;\n this.llm = config.llm;\n this.defaultCollection = config.defaultCollection;\n this.defaultTopK = config.defaultTopK ?? DEFAULT_TOP_K;\n\n // Auto-construct internal pipelines\n this.queryComposer = new RAGQueryComposer(this.adapter, this.embedder);\n this.ingestionPipeline = new IngestionPipeline(\n this.adapter,\n this.embedder,\n new LoaderRegistry()\n );\n this.enrichmentPipeline = new EnrichmentPipeline(this.adapter);\n }\n\n // ==========================================================================\n // COLLECTION MANAGEMENT\n // ==========================================================================\n\n /**\n * Create a new vector collection.\n * Dimension defaults to embedder.dimensions if not specified.\n */\n async createCollection(\n name: string,\n dimension?: number,\n metric?: DistanceMetric\n ): Promise<void> {\n const dim = dimension ?? this.embedder.dimensions;\n await this.adapter.createCollection(name, dim, metric);\n }\n\n /**\n * Delete a collection.\n */\n async deleteCollection(name: string): Promise<void> {\n await this.adapter.deleteCollection(name);\n }\n\n /**\n * Check if a collection exists.\n */\n async collectionExists(name: string): Promise<boolean> {\n return this.adapter.collectionExists(name);\n }\n\n // ==========================================================================\n // INGESTION\n // ==========================================================================\n\n /**\n * Ingest documents into a collection.\n * Collection defaults to defaultCollection if not specified.\n */\n async ingest(\n sources: string | string[],\n collection?: string,\n config?: IngestionConfig\n ): Promise<IngestionStats> {\n const col = collection ?? this.defaultCollection;\n if (!col) {\n throw new Error(\n 'No collection specified. Pass a collection name or set defaultCollection in config.'\n );\n }\n return this.ingestionPipeline.ingest(sources, col, config);\n }\n\n // ==========================================================================\n // RETRIEVAL\n // ==========================================================================\n\n /**\n * Retrieve relevant chunks for a query.\n * Supports filter shorthands (partition, theme) and groupBy.\n */\n async retrieve(\n query: string,\n options?: RetrieveOptions\n ): Promise<RetrievalResult> {\n const collection = options?.collection ?? this.defaultCollection;\n if (!collection) {\n throw new Error(\n 'No collection specified. Pass a collection name or set defaultCollection in config.'\n );\n }\n\n const topK = options?.topK ?? this.defaultTopK;\n\n // Build filters from shorthands\n let verticalFilters: UniversalFilter | undefined;\n let horizontalFilters: UniversalFilter | undefined;\n const customFilters = options?.filter;\n\n if (options?.partition) {\n verticalFilters = {\n field: VerticalFields.PARTITION,\n op: 'eq' as const,\n value: options.partition\n };\n }\n\n if (options?.theme) {\n horizontalFilters = {\n field: HorizontalFields.THEME,\n op: 'eq' as const,\n value: options.theme\n };\n }\n\n const params = {\n query,\n collection,\n topK,\n verticalFilters,\n horizontalFilters,\n customFilters\n };\n\n // Handle groupBy\n if (options?.groupBy === 'document') {\n const grouped = await this.queryComposer.retrieveVertical(params);\n // Flatten grouped results back into RetrievalResult\n const records = Array.from(grouped.values()).flat();\n return { records, query, filtersApplied: { vertical: verticalFilters, horizontal: horizontalFilters, custom: customFilters } };\n }\n\n if (options?.groupBy === 'theme') {\n const grouped = await this.queryComposer.retrieveHorizontal(params);\n const records = Array.from(grouped.values()).flat();\n return { records, query, filtersApplied: { vertical: verticalFilters, horizontal: horizontalFilters, custom: customFilters } };\n }\n\n return this.queryComposer.retrieve(params);\n }\n\n // ==========================================================================\n // ENRICHMENT\n // ==========================================================================\n\n /**\n * Enrich a collection with vertical, theme, and/or section metadata.\n */\n async enrich(\n collection: string,\n config: EnrichAllConfig\n ): Promise<EnrichmentStats> {\n return this.enrichmentPipeline.enrichAll(collection, config);\n }\n\n // ==========================================================================\n // FULL RAG QUERY\n // ==========================================================================\n\n /**\n * Full RAG: retrieve relevant context and generate an answer using LLM.\n * Requires an LLM client to be provided in the constructor config.\n */\n async query(\n question: string,\n options?: QueryOptions\n ): Promise<RAGResponse> {\n if (!this.llm) {\n throw new Error(\n 'RAGClient.query() requires an LLM client. Pass one in the constructor config.'\n );\n }\n\n // 1. Retrieve relevant chunks\n const retrievalResult = await this.retrieve(question, options);\n\n // 2. Build context from chunk texts\n const context = retrievalResult.records\n .map((r) => r.text)\n .filter(Boolean)\n .join('\\n\\n');\n\n // 3. Build prompt\n const systemPrompt = options?.systemPrompt ?? DEFAULT_RAG_SYSTEM_PROMPT;\n const prompt = `${systemPrompt}\\n\\nContext:\\n${context}\\n\\nQuestion: ${question}`;\n\n // 4. Generate answer\n const answer = await this.llm.generate(prompt, {\n temperature: options?.temperature,\n maxTokens: options?.maxTokens\n });\n\n return {\n answer,\n sources: retrievalResult.records,\n query: question,\n retrievalResult\n };\n }\n}\n"],"mappings":";AAQO,IAAM,oBAAoB;AAAA,EAC/B,UAAU;AAAA,EACV,YAAY;AAAA,EACZ,YAAY;AACd;AAKO,IAAM,iBAAiB;AAAA;AAAA,EAE5B,QAAQ;AAAA;AAAA,EAGR,QAAQ;AAAA;AAAA,EAGR,WAAW;AAAA;AAAA,EAGX,UAAU;AAAA;AAAA,EAGV,MAAM;AACR;AAKO,IAAM,mBAAmB;AAAA;AAAA,EAE9B,OAAO;AAAA;AAAA,EAGP,QAAQ;AAAA;AAAA,EAGR,kBAAkB;AAAA;AAAA,EAGlB,cAAc;AAAA;AAAA,EAGd,eAAe;AAAA;AAAA,EAGf,eAAe;AACjB;AAKO,IAAM,mBAAmB;AAAA;AAAA,EAE9B,aAAa;AAAA;AAAA,EAGb,WAAW;AAAA;AAAA,EAGX,cAAc;AAAA;AAAA,EAGd,cAAc;AAChB;;;AClDO,IAAM,kBAAN,MAAsB;AAAA,EACnB,WAAgC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASzC,SAAS,QAAmC;AAC1C,eAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,MAAM,GAAG;AACjD,UAAI,UAAU,QAAW;AACvB,aAAK,SAAS,GAAG,kBAAkB,QAAQ,GAAG,GAAG,EAAE,IAAI;AAAA,MACzD;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,WAAW,QAAmC;AAC5C,eAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,MAAM,GAAG;AACjD,UAAI,UAAU,QAAW;AACvB,aAAK,SAAS,GAAG,kBAAkB,UAAU,GAAG,GAAG,EAAE,IAAI;AAAA,MAC3D;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,WAAW,QAAmC;AAC5C,eAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,MAAM,GAAG;AACjD,UAAI,UAAU,QAAW;AACvB,aAAK,SAAS,GAAG,kBAAkB,UAAU,GAAG,GAAG,EAAE,IAAI;AAAA,MAC3D;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,OAAO,QAAmC;AACxC,eAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,MAAM,GAAG;AACjD,UAAI,UAAU,QAAW;AACvB,aAAK,SAAS,GAAG,IAAI;AAAA,MACvB;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,QAA6B;AAC3B,WAAO,EAAE,GAAG,KAAK,SAAS;AAAA,EAC5B;AACF;;;ACtFA,IAAM,kBAAoC;AAAA,EACxC;AAAA,EAAM;AAAA,EAAO;AAAA,EAAM;AAAA,EAAO;AAAA,EAAM;AAAA,EAAO;AAAA,EAAM;AAAA,EAAO;AAAA,EAAY;AAClE;AAKO,IAAM,mBAAN,MAAuB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAS5B,OAAO,UAAU,OAA2D;AAE1E,QAAI,KAAK,iBAAiB,KAAK,GAAG;AAChC,aAAO;AAAA,IACT;AAGA,WAAO,KAAK,cAAc,KAAwB;AAAA,EACpD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,OAAO,SAAS,QAA+B;AAC7C,QAAI,KAAK,WAAW,MAAM,GAAG;AAC3B,YAAM,WAAW;AACjB,YAAM,aAAa,SAAS,WAAW,SAAS,MAAM,SAAS;AAE/D,UAAI,CAAC,MAAM,QAAQ,UAAU,KAAK,WAAW,WAAW,GAAG;AACzD,cAAM,IAAI,MAAM,kDAAkD;AAAA,MACpE;AAEA,iBAAW,QAAQ,OAAK,KAAK,SAAS,CAAC,CAAC;AAAA,IAC1C,OAAO;AACL,YAAM,YAAY;AAElB,UAAI,CAAC,UAAU,SAAS,OAAO,UAAU,UAAU,UAAU;AAC3D,cAAM,IAAI,MAAM,yCAAyC;AAAA,MAC3D;AAEA,UAAI,CAAC,gBAAgB,SAAS,UAAU,EAAE,GAAG;AAC3C,cAAM,IAAI,MAAM,4BAA4B,UAAU,EAAE,EAAE;AAAA,MAC5D;AAEA,UAAI,UAAU,UAAU,QAAW;AACjC,cAAM,IAAI,MAAM,0BAA0B;AAAA,MAC5C;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,WAAW,QAAkC;AAClD,WAAO,SAAS,UAAU,QAAQ;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA,EAKA,OAAe,iBAAiB,OAAqB;AACnD,QAAI,CAAC,SAAS,OAAO,UAAU,UAAU;AACvC,aAAO;AAAA,IACT;AAGA,QAAI,SAAS,SAAS,QAAQ,OAAO;AACnC,aAAO;AAAA,IACT;AAGA,QAAI,WAAW,SAAS,QAAQ,SAAS,WAAW,OAAO;AACzD,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,OAAe,cAAc,WAA6C;AACxE,UAAM,UAAU,OAAO,QAAQ,SAAS;AAExC,QAAI,QAAQ,WAAW,GAAG;AACxB,YAAM,IAAI,MAAM,8CAA8C;AAAA,IAChE;AAEA,UAAM,aAAgC,CAAC;AAEvC,eAAW,CAAC,KAAK,KAAK,KAAK,SAAS;AAElC,UAAI;AACJ,UAAI;AAEJ,UAAI,IAAI,SAAS,IAAI,KAAK,CAAC,IAAI,WAAW,IAAI,GAAG;AAE/C,cAAM,YAAY,IAAI,YAAY,IAAI;AACtC,gBAAQ,IAAI,UAAU,GAAG,SAAS;AAClC,cAAM,cAAc,IAAI,UAAU,YAAY,CAAC;AAE/C,YAAI,CAAC,gBAAgB,SAAS,WAA6B,GAAG;AAC5D,gBAAM,IAAI,MAAM,yCAAyC,WAAW,EAAE;AAAA,QACxE;AAEA,aAAK;AAAA,MACP,OAAO;AAEL,gBAAQ;AACR,aAAK;AAAA,MACP;AAEA,iBAAW,KAAK,EAAE,OAAO,IAAI,MAAM,CAAC;AAAA,IACtC;AAGA,QAAI,WAAW,WAAW,GAAG;AAC3B,aAAO,WAAW,CAAC;AAAA,IACrB;AAGA,WAAO,EAAE,KAAK,WAAW;AAAA,EAC3B;AACF;;;AC5HO,IAAe,kBAAf,MAA+B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA8LpC,yBAAkC;AAChC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,oBAA6B;AAC3B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,0BAAmC;AACjC,WAAO;AAAA,EACT;AACF;;;ACvNO,IAAM,gBAAN,MAAoB;AAAA,EACjB;AAAA,EACA;AAAA,EACA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQR,mBAAmB,QAAqD;AACtE,SAAK,iBAAiB,iBAAiB,UAAU,MAAM;AACvD,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,qBAAqB,QAAqD;AACxE,SAAK,mBAAmB,iBAAiB,UAAU,MAAM;AACzD,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,iBAAiB,QAAqD;AACpE,SAAK,eAAe,iBAAiB,UAAU,MAAM;AACrD,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,QAAqC;AACnC,UAAM,UAA6B,CAAC;AAEpC,QAAI,KAAK,gBAAgB;AACvB,cAAQ,KAAK,KAAK,cAAc;AAAA,IAClC;AAEA,QAAI,KAAK,kBAAkB;AACzB,cAAQ,KAAK,KAAK,gBAAgB;AAAA,IACpC;AAEA,QAAI,KAAK,cAAc;AACrB,cAAQ,KAAK,KAAK,YAAY;AAAA,IAChC;AAGA,QAAI,QAAQ,WAAW,GAAG;AACxB,aAAO;AAAA,IACT;AAGA,QAAI,QAAQ,WAAW,GAAG;AACxB,aAAO,QAAQ,CAAC;AAAA,IAClB;AAGA,WAAO,EAAE,KAAK,QAAQ;AAAA,EACxB;AACF;;;ACnDO,IAAM,mBAAN,MAAuB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAO5B,YACmB,SACA,UACjB;AAFiB;AACA;AAAA,EAChB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcH,MAAM,SAAS,QAAmD;AAEhE,UAAM,cAAc,MAAM,KAAK,SAAS,MAAM,OAAO,KAAK;AAG1D,UAAM,gBAAgB,IAAI,cAAc;AAExC,QAAI,OAAO,iBAAiB;AAC1B,oBAAc,mBAAmB,OAAO,eAAe;AAAA,IACzD;AAEA,QAAI,OAAO,mBAAmB;AAC5B,oBAAc,qBAAqB,OAAO,iBAAiB;AAAA,IAC7D;AAEA,QAAI,OAAO,eAAe;AACxB,oBAAc,iBAAiB,OAAO,aAAa;AAAA,IACrD;AAEA,UAAM,iBAAiB,cAAc,MAAM;AAG3C,UAAM,eAAe,MAAM,KAAK,QAAQ;AAAA,MACtC,OAAO;AAAA,MACP;AAAA,MACA;AAAA,QACE,MAAM,OAAO;AAAA,QACb,QAAQ;AAAA,QACR,iBAAiB;AAAA,QACjB,eAAe,OAAO;AAAA,MACxB;AAAA,IACF;AAGA,WAAO;AAAA,MACL,SAAS,aAAa;AAAA,MACtB,OAAO,OAAO;AAAA,MACd,gBAAgB;AAAA,QACd,GAAI,OAAO,mBAAmB,EAAE,UAAU,OAAO,gBAAgB;AAAA,QACjE,GAAI,OAAO,qBAAqB,EAAE,YAAY,OAAO,kBAAkB;AAAA,QACvE,GAAI,OAAO,iBAAiB,EAAE,QAAQ,OAAO,cAAc;AAAA,MAC7D;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,iBACJ,QACsC;AACtC,UAAM,SAAS,MAAM,KAAK,SAAS,MAAM;AAEzC,UAAM,UAAU,oBAAI,IAA4B;AAEhD,eAAW,UAAU,OAAO,SAAS;AACnC,YAAM,QAAQ,OAAO,SAAS,eAAe,MAAM;AAEnD,UAAI,OAAO,UAAU,UAAU;AAC7B,YAAI,CAAC,QAAQ,IAAI,KAAK,GAAG;AACvB,kBAAQ,IAAI,OAAO,CAAC,CAAC;AAAA,QACvB;AACA,gBAAQ,IAAI,KAAK,EAAG,KAAK,MAAM;AAAA,MACjC;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,mBACJ,QACsC;AACtC,UAAM,SAAS,MAAM,KAAK,SAAS,MAAM;AAEzC,UAAM,UAAU,oBAAI,IAA4B;AAEhD,eAAW,UAAU,OAAO,SAAS;AACnC,YAAM,QAAQ,OAAO,SAAS,iBAAiB,KAAK;AAEpD,UAAI,OAAO,UAAU,UAAU;AAC7B,YAAI,CAAC,QAAQ,IAAI,KAAK,GAAG;AACvB,kBAAQ,IAAI,OAAO,CAAC,CAAC;AAAA,QACvB;AACA,gBAAQ,IAAI,KAAK,EAAG,KAAK,MAAM;AAAA,MACjC;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AACF;;;AC9IO,IAAe,WAAf,MAAe,UAAS;AAAA;AAAA;AAAA;AAAA;AAAA,EAkCnB,cAAc;AACtB,QAAI,eAAe,WAAU;AAC3B,YAAM,IAAI,MAAM,qDAAqD;AAAA,IACvE;AAAA,EACF;AACF;;;AChCO,IAAe,YAAf,MAAe,WAAU;AAAA;AAAA;AAAA;AAAA;AAAA,EAoDpB,cAAc;AACtB,QAAI,eAAe,YAAW;AAC5B,YAAM,IAAI,MAAM,sDAAsD;AAAA,IACxE;AAAA,EACF;AACF;;;AC9EO,IAAM,UAAN,cAAsB,UAAU;AAAA,EAC7B,YAAoB;AAAA,EAE5B,cAAc;AACZ,UAAM;AAAA,EACR;AAAA,EAEA,IAAI,YAAoB;AACtB,WAAO;AAAA,EACT;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,YAAY,UAAwB;AAClC,SAAK,YAAY;AAAA,EACnB;AAAA,EAEA,MAAM,SACJ,QACA,SACiB;AAEjB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAM,aACJ,QACA,SACY;AAEZ,QAAI;AACF,aAAO,KAAK,MAAM,KAAK,SAAS;AAAA,IAClC,SAAS,OAAO;AACd,YAAM,IAAI;AAAA,QACR,0CAA0C,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,MACpG;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,cACJ,SACA,SACmB;AAEnB,WAAO,QAAQ,IAAI,MAAM,KAAK,SAAS;AAAA,EACzC;AACF;;;ACxDO,IAAM,yBAAN,MAA6B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUlC,YACU,QACR,UACQ,gBAAyB,OACjC;AAHQ;AAEA;AAER,SAAK,WAAW,oBAAI,IAAI;AACxB,SAAK,gBAAgB,oBAAI,IAAI;AAG7B,eAAW,SAAS,QAAQ;AAC1B,YAAM,gBAAgB,SAAS,KAAK,KAAK,CAAC;AAC1C,WAAK,cAAc,IAAI,OAAO,cAAc,MAAM;AAElD,YAAM,WAAW,cAAc,IAAI,CAAC,YAAY;AAC9C,cAAM,iBAAiB,KAAK,YAAY,OAAO;AAC/C,cAAM,QAAQ,gBAAgB,MAAM;AACpC,eAAO,IAAI,OAAO,MAAM,cAAc,OAAO,KAAK;AAAA,MACpD,CAAC;AAED,WAAK,SAAS,IAAI,OAAO,QAAQ;AAAA,IACnC;AAAA,EACF;AAAA,EA9BQ;AAAA,EACA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAoCR,SAAS,MAAyC;AAChD,QAAI,CAAC,QAAQ,KAAK,KAAK,EAAE,WAAW,GAAG;AACrC,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,QACZ,WAAW,CAAC;AAAA,MACd;AAAA,IACF;AAEA,UAAM,SAAiC,CAAC;AACxC,QAAI,WAAW;AACf,QAAI,eAAe;AAGnB,eAAW,SAAS,KAAK,QAAQ;AAC/B,YAAM,WAAW,KAAK,SAAS,IAAI,KAAK,KAAK,CAAC;AAC9C,UAAI,aAAa;AAEjB,iBAAW,WAAW,UAAU;AAC9B,cAAM,UAAU,KAAK,MAAM,OAAO;AAClC,YAAI,SAAS;AACX,wBAAc,QAAQ;AAAA,QACxB;AAAA,MACF;AAEA,aAAO,KAAK,IAAI;AAGhB,UAAI,aAAa,UAAU;AACzB,mBAAW;AACX,uBAAe;AAAA,MACjB;AAAA,IACF;AAGA,QAAI,aAAa,GAAG;AAClB,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,QACZ,WAAW;AAAA,MACb;AAAA,IACF;AAGA,UAAM,gBAAgB,KAAK,cAAc,IAAI,YAAY,KAAK;AAC9D,UAAM,aAAa,WAAW;AAE9B,WAAO;AAAA,MACL,OAAO;AAAA,MACP,YAAY,KAAK,IAAI,YAAY,CAAG;AAAA;AAAA,MACpC,WAAW;AAAA,IACb;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,cAAc,OAA8C;AAC1D,WAAO,MAAM,IAAI,CAAC,SAAS,KAAK,SAAS,IAAI,CAAC;AAAA,EAChD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,YAAY,KAAqB;AACvC,WAAO,IAAI,QAAQ,uBAAuB,MAAM;AAAA,EAClD;AACF;;;AC3FO,IAAM,0BAAN,MAAyD;AAAA,EACtD,QAAa;AAAA,EACb;AAAA,EACA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAoBR,YACE,QACA,YAAoB,uCACpB;AACA,SAAK,SAAS;AACd,SAAK,YAAY;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAc,oBAAkC;AAC9C,QAAI,CAAC,KAAK,OAAO;AACf,YAAM,EAAE,SAAS,IAAI,MAAM,OAAO,sBAAsB;AACxD,WAAK,QAAQ,MAAM,SAAS,4BAA4B,KAAK,SAAS;AAAA,IACxE;AACA,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiBA,MAAM,SAAS,MAA4C;AAEzD,QAAI,CAAC,QAAQ,KAAK,KAAK,EAAE,WAAW,GAAG;AACrC,YAAM,eAAe,IAAM,KAAK,OAAO;AACvC,YAAMA,aAAoC,CAAC;AAE3C,iBAAW,SAAS,KAAK,QAAQ;AAC/B,QAAAA,WAAU,KAAK,IAAI;AAAA,MACrB;AAEA,aAAO;AAAA,QACL,OAAO,KAAK,OAAO,CAAC;AAAA;AAAA,QACpB,YAAY;AAAA,QACZ,WAAAA;AAAA,MACF;AAAA,IACF;AAEA,UAAM,QAAQ,MAAM,KAAK,kBAAkB;AAG3C,UAAM,SAAS,MAAM,MAAM,MAAM,KAAK,MAAM;AAM5C,UAAM,YAAoC,CAAC;AAC3C,aAAS,IAAI,GAAG,IAAI,OAAO,OAAO,QAAQ,KAAK;AAC7C,gBAAU,OAAO,OAAO,CAAC,CAAC,IAAI,OAAO,OAAO,CAAC;AAAA,IAC/C;AAGA,WAAO;AAAA,MACL,OAAO,OAAO,OAAO,CAAC;AAAA,MACtB,YAAY,OAAO,OAAO,CAAC;AAAA,MAC3B;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAwBA,MAAM,cAAc,OAAiD;AAEnE,UAAM,KAAK,kBAAkB;AAG7B,UAAM,UAAiC,CAAC;AACxC,eAAW,QAAQ,OAAO;AACxB,YAAM,SAAS,MAAM,KAAK,SAAS,IAAI;AACvC,cAAQ,KAAK,MAAM;AAAA,IACrB;AAEA,WAAO;AAAA,EACT;AACF;;;ACnIO,IAAM,2BAAN,MAA0D;AAAA,EACvD,kBAAmD;AAAA,EACnD;AAAA,EACA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAsBR,YACE,QACA,UACA,uBACA;AACA,SAAK,SAAS;AACd,SAAK,WAAW;AAChB,SAAK,kBAAkB,yBAAyB;AAAA,EAClD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAc,wBAA2D;AACvE,QAAI,CAAC,KAAK,iBAAiB;AACzB,WAAK,kBAAkB,CAAC;AAGxB,YAAM,aAAa,MAAM,KAAK,SAAS,WAAW,KAAK,MAAM;AAE7D,eAAS,IAAI,GAAG,IAAI,KAAK,OAAO,QAAQ,KAAK;AAC3C,aAAK,gBAAgB,KAAK,OAAO,CAAC,CAAC,IAAI,WAAW,CAAC;AAAA,MACrD;AAAA,IACF;AAEA,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeQ,iBAAiB,GAAa,GAAqB;AACzD,QAAI,EAAE,WAAW,EAAE,QAAQ;AACzB,YAAM,IAAI,MAAM,yDAAyD;AAAA,IAC3E;AAEA,QAAI,aAAa;AACjB,QAAI,QAAQ;AACZ,QAAI,QAAQ;AAEZ,aAAS,IAAI,GAAG,IAAI,EAAE,QAAQ,KAAK;AACjC,oBAAc,EAAE,CAAC,IAAI,EAAE,CAAC;AACxB,eAAS,EAAE,CAAC,IAAI,EAAE,CAAC;AACnB,eAAS,EAAE,CAAC,IAAI,EAAE,CAAC;AAAA,IACrB;AAEA,YAAQ,KAAK,KAAK,KAAK;AACvB,YAAQ,KAAK,KAAK,KAAK;AAGvB,QAAI,UAAU,KAAK,UAAU,GAAG;AAC9B,aAAO;AAAA,IACT;AAEA,WAAO,cAAc,QAAQ;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUQ,sBAAsB,YAA4B;AACxD,YAAQ,aAAa,KAAK;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiBA,MAAM,SAAS,MAA4C;AAEzD,QAAI,CAAC,QAAQ,KAAK,KAAK,EAAE,WAAW,GAAG;AACrC,YAAM,eAAe,IAAM,KAAK,OAAO;AACvC,YAAMC,aAAoC,CAAC;AAE3C,iBAAW,SAAS,KAAK,QAAQ;AAC/B,QAAAA,WAAU,KAAK,IAAI;AAAA,MACrB;AAEA,aAAO;AAAA,QACL,OAAO,KAAK,OAAO,CAAC;AAAA;AAAA,QACpB,YAAY;AAAA,QACZ,WAAAA;AAAA,MACF;AAAA,IACF;AAGA,UAAM,kBAAkB,MAAM,KAAK,sBAAsB;AAGzD,UAAM,gBAAgB,MAAM,KAAK,SAAS,MAAM,IAAI;AAGpD,UAAM,eAAuC,CAAC;AAC9C,QAAI,gBAAgB;AACpB,QAAI,eAAe,KAAK,OAAO,CAAC;AAEhC,eAAW,SAAS,KAAK,QAAQ;AAC/B,YAAM,iBAAiB,gBAAgB,KAAK;AAC5C,YAAM,aAAa,KAAK,iBAAiB,eAAe,cAAc;AACtE,mBAAa,KAAK,IAAI;AAEtB,UAAI,aAAa,eAAe;AAC9B,wBAAgB;AAChB,uBAAe;AAAA,MACjB;AAAA,IACF;AAGA,UAAM,YAAoC,CAAC;AAC3C,eAAW,SAAS,KAAK,QAAQ;AAC/B,gBAAU,KAAK,IAAI,KAAK,sBAAsB,aAAa,KAAK,CAAC;AAAA,IACnE;AAEA,WAAO;AAAA,MACL,OAAO;AAAA,MACP,YAAY,KAAK,sBAAsB,aAAa;AAAA,MACpD;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAwBA,MAAM,cAAc,OAAiD;AAEnE,UAAM,KAAK,sBAAsB;AAGjC,UAAM,UAAiC,CAAC;AACxC,eAAW,QAAQ,OAAO;AACxB,YAAM,SAAS,MAAM,KAAK,SAAS,IAAI;AACvC,cAAQ,KAAK,MAAM;AAAA,IACrB;AAEA,WAAO;AAAA,EACT;AACF;;;ACrOA,IAAM,0BAA0B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAsDzB,IAAM,qBAAN,MAAoD;AAAA,EACjD;AAAA,EACA;AAAA,EACA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA2BR,YACE,QACA,KACA,iBAAyB,yBACzB;AACA,SAAK,SAAS;AACd,SAAK,MAAM;AACX,SAAK,iBAAiB;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,YAAY,MAAsB;AACxC,UAAM,YAAY,KAAK,OAAO,KAAK,IAAI;AACvC,WAAO,KAAK,eACT,QAAQ,YAAY,SAAS,EAC7B,QAAQ,UAAU,IAAI;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiBA,MAAM,SAAS,MAA4C;AAEzD,QAAI,CAAC,QAAQ,KAAK,KAAK,EAAE,WAAW,GAAG;AACrC,YAAM,eAAe,IAAM,KAAK,OAAO;AACvC,YAAM,YAAoC,CAAC;AAE3C,iBAAW,SAAS,KAAK,QAAQ;AAC/B,kBAAU,KAAK,IAAI;AAAA,MACrB;AAEA,aAAO;AAAA,QACL,OAAO,KAAK,OAAO,CAAC;AAAA;AAAA,QACpB,YAAY;AAAA,QACZ;AAAA,MACF;AAAA,IACF;AAGA,UAAM,SAAS,KAAK,YAAY,IAAI;AAEpC,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,IAAI,aAAkC,MAAM;AACtE,aAAO;AAAA,IACT,SAAS,OAAO;AAEd,YAAM,UAAU,qCAAqC,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAC7G,YAAM,sBAAsB,IAAI,MAAM,OAAO;AAG7C,UAAI,iBAAiB,OAAO;AAC1B,QAAC,oBAA4B,QAAQ;AAAA,MACvC;AAEA,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAwBA,MAAM,cAAc,OAAiD;AAEnE,UAAM,UAAiC,CAAC;AAExC,eAAW,QAAQ,OAAO;AACxB,YAAM,SAAS,MAAM,KAAK,SAAS,IAAI;AACvC,cAAQ,KAAK,MAAM;AAAA,IACrB;AAEA,WAAO;AAAA,EACT;AACF;;;AC5IO,IAAM,qBAAN,MAAyB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQ9B,YACU,SACA,UACA,KACR;AAHQ;AACA;AACA;AAAA,EACP;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAmCH,MAAM,eACJ,YACA,QAC0B;AAC1B,UAAM,YAAY,KAAK,IAAI;AAC3B,UAAM,QAAyB;AAAA,MAC7B,kBAAkB;AAAA,MAClB,gBAAgB;AAAA,MAChB,gBAAgB;AAAA,MAChB,QAAQ;AAAA,MACR,QAAQ,CAAC;AAAA,IACX;AAEA,QAAI;AAEF,UAAI,aAAa,QAAQ;AACvB,cAAM,KAAK,uBAAuB,YAAY,QAAQ,KAAK;AAAA,MAC7D,WAAW,eAAe,QAAQ;AAChC,cAAM,KAAK,oBAAoB,YAAY,QAAQ,KAAK;AAAA,MAC1D,WAAW,eAAe,QAAQ;AAChC,cAAM,KAAK,cAAc,YAAY,QAAQ,KAAK;AAAA,MACpD;AAAA,IACF,SAAS,OAAO;AACd,YAAM,QAAQ;AAAA,QACZ,mBAAmB,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,MAC7E;AAAA,IACF;AAEA,UAAM,SAAS,KAAK,IAAI,IAAI;AAC5B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAc,uBACZ,YACA,QACA,OACe;AACf,UAAM,YAAY,OAAO,aAAa;AAEtC,qBAAiB,SAAS,KAAK,QAAQ,QAAQ,YAAY;AAAA,MACzD;AAAA,MACA,QAAQ,OAAO;AAAA,IACjB,CAAC,GAAG;AACF,YAAM,UAA4B,CAAC;AAEnC,iBAAW,UAAU,OAAO;AAC1B,cAAM;AAEN,YAAI;AACF,gBAAM,WAAW,KAAK,kBAAkB,QAAQ,OAAO,OAAO;AAE9D,cAAI,UAAU;AACZ,oBAAQ,KAAK;AAAA,cACX,IAAI,OAAO;AAAA,cACX,UAAU,EAAE,SAAS;AAAA,YACvB,CAAC;AAAA,UACH,OAAO;AACL,kBAAM;AAAA,UACR;AAAA,QACF,SAAS,OAAO;AACd,gBAAM;AACN,gBAAM,QAAQ;AAAA,YACZ,wBAAwB,OAAO,EAAE,KAAK,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,UAChG;AAAA,QACF;AAAA,MACF;AAGA,UAAI,QAAQ,SAAS,GAAG;AACtB,YAAI;AACF,gBAAM,KAAK,QAAQ,eAAe,YAAY,OAAO;AACrD,gBAAM,kBAAkB,QAAQ;AAAA,QAClC,SAAS,OAAO;AACd,gBAAM,QAAQ;AAAA,YACZ,yBAAyB,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,UACnF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASQ,kBACN,QACA,SACe;AACf,UAAM,WAAW,OAAO,UAAU;AAElC,QAAI,YAAY,OAAO,aAAa,YAAY,YAAY,SAAS;AACnE,aAAO,QAAQ,QAAQ;AAAA,IACzB;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAc,oBACZ,YACA,QACA,OACe;AACf,UAAM,YAAY,OAAO,aAAa;AAEtC,qBAAiB,SAAS,KAAK,QAAQ,QAAQ,YAAY;AAAA,MACzD;AAAA,MACA,QAAQ,OAAO;AAAA,IACjB,CAAC,GAAG;AACF,YAAM,UAA4B,CAAC;AAEnC,iBAAW,UAAU,OAAO;AAC1B,cAAM;AAEN,YAAI;AACF,gBAAM,WAAW,MAAM,OAAO,UAAU,MAAM;AAE9C,cAAI,UAAU;AACZ,oBAAQ,KAAK;AAAA,cACX,IAAI,OAAO;AAAA,cACX,UAAU,EAAE,SAAS;AAAA,YACvB,CAAC;AAAA,UACH,OAAO;AACL,kBAAM;AAAA,UACR;AAAA,QACF,SAAS,OAAO;AACd,gBAAM;AACN,gBAAM,QAAQ;AAAA,YACZ,8BAA8B,OAAO,EAAE,KAAK,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,UACtG;AAAA,QACF;AAAA,MACF;AAGA,UAAI,QAAQ,SAAS,GAAG;AACtB,YAAI;AACF,gBAAM,KAAK,QAAQ,eAAe,YAAY,OAAO;AACrD,gBAAM,kBAAkB,QAAQ;AAAA,QAClC,SAAS,OAAO;AACd,gBAAM,QAAQ;AAAA,YACZ,yBAAyB,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,UACnF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAc,cACZ,YACA,QACA,OACe;AACf,UAAM,YAAY,OAAO,aAAa;AACtC,UAAM,EAAE,KAAK,QAAQ,gBAAgB,UAAU,IAAI,OAAO;AAC1D,UAAM,YAAY,aAAa;AAE/B,qBAAiB,SAAS,KAAK,QAAQ,QAAQ,YAAY;AAAA,MACzD;AAAA,MACA,QAAQ,OAAO;AAAA,IACjB,CAAC,GAAG;AACF,YAAM,UAA4B,CAAC;AAEnC,iBAAW,UAAU,OAAO;AAC1B,cAAM;AAEN,YAAI;AACF,gBAAM,WAAW,MAAM,KAAK;AAAA,YAC1B;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAEA,cAAI,UAAU;AACZ,oBAAQ,KAAK;AAAA,cACX,IAAI,OAAO;AAAA,cACX,UAAU,EAAE,SAAS;AAAA,YACvB,CAAC;AAAA,UACH,OAAO;AACL,kBAAM;AAAA,UACR;AAAA,QACF,SAAS,OAAO;AACd,gBAAM;AACN,gBAAM,QAAQ;AAAA,YACZ,mCAAmC,OAAO,EAAE,KAAK,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,UAC3G;AAAA,QACF;AAAA,MACF;AAGA,UAAI,QAAQ,SAAS,GAAG;AACtB,YAAI;AACF,gBAAM,KAAK,QAAQ,eAAe,YAAY,OAAO;AACrD,gBAAM,kBAAkB,QAAQ;AAAA,QAClC,SAAS,OAAO;AACd,gBAAM,QAAQ;AAAA,YACZ,yBAAyB,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,UACnF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAc,eACZ,QACA,KACA,QACA,WACA,gBACiB;AACjB,UAAM,OAAO,OAAO,WAAW,SAAS;AAExC,QAAI,CAAC,QAAQ,OAAO,SAAS,UAAU;AACrC,YAAM,IAAI,MAAM,2BAA2B,SAAS,GAAG;AAAA,IACzD;AAGA,UAAM,SAAS,iBACX,eACG,QAAQ,YAAY,OAAO,KAAK,IAAI,CAAC,EACrC,QAAQ,UAAU,IAAI,IACzB,6DAA6D,OAAO,KAAK,IAAI,CAAC;AAAA;AAAA,QAAa,IAAI;AAAA;AAAA;AAGnG,UAAM,SAAS,MAAM,IAAI,SAAS,MAAM;AAExC,WAAO,OAAO,KAAK;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA+BA,MAAM,aACJ,YACA,QAC0B;AAC1B,UAAM,YAAY,KAAK,IAAI;AAC3B,UAAM,QAAyB;AAAA,MAC7B,kBAAkB;AAAA,MAClB,gBAAgB;AAAA,MAChB,gBAAgB;AAAA,MAChB,QAAQ;AAAA,MACR,QAAQ,CAAC;AAAA,IACX;AAEA,QAAI;AACF,YAAM,KAAK,0BAA0B,YAAY,QAAQ,KAAK;AAAA,IAChE,SAAS,OAAO;AACd,YAAM,QAAQ;AAAA,QACZ,mBAAmB,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,MAC7E;AAAA,IACF;AAEA,UAAM,SAAS,KAAK,IAAI,IAAI;AAC5B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAc,0BACZ,YACA,QACA,OACe;AACf,UAAM,YAAY,OAAO,aAAa;AACtC,UAAM,YAAY,OAAO,aAAa;AACtC,UAAM,sBAAsB,OAAO,uBAAuB;AAC1D,UAAM,aAAa,OAAO,cAAc;AAExC,qBAAiB,SAAS,KAAK,QAAQ,QAAQ,YAAY;AAAA,MACzD;AAAA,MACA,QAAQ,OAAO;AAAA,IACjB,CAAC,GAAG;AAEF,YAAM,kBAA4B,CAAC;AACnC,YAAM,mBAAmC,CAAC;AAE1C,iBAAW,UAAU,OAAO;AAC1B,cAAM;AAGN,cAAM,OAAO,OAAO,QAAQ,OAAO,WAAW,SAAS;AAEvD,YAAI,CAAC,QAAQ,OAAO,SAAS,YAAY,KAAK,KAAK,MAAM,IAAI;AAC3D,gBAAM;AACN;AAAA,QACF;AAEA,wBAAgB,KAAK,IAAI;AACzB,yBAAiB,KAAK,MAAM;AAAA,MAC9B;AAGA,UAAI,gBAAgB,WAAW,GAAG;AAChC;AAAA,MACF;AAGA,UAAI;AACJ,UAAI;AACF,0BAAkB,MAAM,OAAO,WAAW,cAAc,eAAe;AAAA,MACzE,SAAS,OAAO;AAEd,cAAM,QAAQ;AAAA,UACZ,0EAA0E,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,QACpI;AAEA,0BAAkB,CAAC;AACnB,iBAAS,IAAI,GAAG,IAAI,gBAAgB,QAAQ,KAAK;AAC/C,cAAI;AACF,kBAAM,SAAS,MAAM,OAAO,WAAW,SAAS,gBAAgB,CAAC,CAAC;AAClE,4BAAgB,KAAK,MAAM;AAAA,UAC7B,SAAS,iBAAiB;AAExB,4BAAgB,KAAK,IAAI;AACzB,kBAAM,QAAQ;AAAA,cACZ,mCAAmC,iBAAiB,CAAC,EAAE,EAAE,KAAK,2BAA2B,QAAQ,gBAAgB,UAAU,eAAe;AAAA,YAC5I;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAGA,YAAM,UAA4B,CAAC;AAEnC,eAAS,IAAI,GAAG,IAAI,iBAAiB,QAAQ,KAAK;AAChD,cAAM,SAAS,iBAAiB,CAAC;AACjC,cAAM,iBAAiB,gBAAgB,CAAC;AAExC,YAAI;AAEF,cAAI,CAAC,kBAAkB,OAAO,mBAAmB,UAAU;AACzD,kBAAM;AACN,kBAAM,QAAQ;AAAA,cACZ,qCAAqC,OAAO,EAAE;AAAA,YAChD;AACA;AAAA,UACF;AAGA,cAAI,eAAe,aAAa,qBAAqB;AACnD,kBAAM;AACN;AAAA,UACF;AAGA,gBAAM,WAAgC;AAAA,YACpC,WAAW,eAAe;AAAA,YAC1B,sBAAsB,eAAe;AAAA,UACvC;AAGA,cAAI,cAAc,eAAe,WAAW;AAC1C,kBAAM,SAAS,OAAO,QAAQ,eAAe,SAAS,EACnD,OAAO,CAAC,CAAC,GAAG,KAAK,MAAO,SAAoB,mBAAmB,EAC/D,KAAK,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC,MAAO,IAAgB,CAAY,EACvD,IAAI,CAAC,CAAC,OAAO,CAAC,MAAM,KAAK;AAE5B,gBAAI,OAAO,SAAS,GAAG;AACrB,uBAAS,aAAa;AAAA,YACxB;AAAA,UACF;AAEA,kBAAQ,KAAK;AAAA,YACX,IAAI,OAAO;AAAA,YACX;AAAA,UACF,CAAC;AAAA,QACH,SAAS,OAAO;AACd,gBAAM;AACN,gBAAM,QAAQ;AAAA,YACZ,2BAA2B,OAAO,EAAE,KAAK,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,UACnG;AAAA,QACF;AAAA,MACF;AAGA,UAAI,QAAQ,SAAS,GAAG;AACtB,YAAI;AACF,gBAAM,KAAK,QAAQ,eAAe,YAAY,OAAO;AACrD,gBAAM,kBAAkB,QAAQ;AAAA,QAClC,SAAS,OAAO;AACd,gBAAM,QAAQ;AAAA,YACZ,yBAAyB,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,UACnF;AAAA,QACF;AAAA,MACF;AAGA,UAAI,OAAO,YAAY;AACrB,eAAO,WAAW,KAAK;AAAA,MACzB;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAyBA,MAAM,eACJ,YACA,QAC0B;AAC1B,UAAM,YAAY,KAAK,IAAI;AAC3B,UAAM,QAAyB;AAAA,MAC7B,kBAAkB;AAAA,MAClB,gBAAgB;AAAA,MAChB,gBAAgB;AAAA,MAChB,QAAQ;AAAA,MACR,QAAQ,CAAC;AAAA,IACX;AAEA,QAAI;AACF,YAAM,KAAK,2BAA2B,YAAY,QAAQ,KAAK;AAAA,IACjE,SAAS,OAAO;AACd,YAAM,QAAQ;AAAA,QACZ,mBAAmB,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,MAC7E;AAAA,IACF;AAEA,UAAM,SAAS,KAAK,IAAI,IAAI;AAC5B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAuBA,MAAM,UACJ,YACA,QAC0B;AAC1B,UAAM,YAAY,KAAK,IAAI;AAC3B,UAAM,iBAAkC;AAAA,MACtC,kBAAkB;AAAA,MAClB,gBAAgB;AAAA,MAChB,gBAAgB;AAAA,MAChB,QAAQ;AAAA,MACR,QAAQ,CAAC;AAAA,IACX;AAEA,QAAI;AAEF,UAAI,OAAO,UAAU;AACnB,cAAM,iBAAiB,KAAK,kBAAkB,OAAO,UAAU,MAAM;AACrE,cAAM,QAAQ,MAAM,KAAK,eAAe,YAAY,cAAc;AAClE,aAAK,WAAW,gBAAgB,KAAK;AAGrC,YAAI,OAAO,YAAY;AACrB,iBAAO,WAAW,cAAc;AAAA,QAClC;AAAA,MACF;AAGA,UAAI,OAAO,QAAQ;AACjB,cAAM,eAAe,KAAK,kBAAkB,OAAO,QAAQ,MAAM;AACjE,cAAM,QAAQ,MAAM,KAAK,aAAa,YAAY,YAAY;AAC9D,aAAK,WAAW,gBAAgB,KAAK;AAGrC,YAAI,OAAO,YAAY;AACrB,iBAAO,WAAW,cAAc;AAAA,QAClC;AAAA,MACF;AAGA,UAAI,OAAO,UAAU;AACnB,cAAM,iBAAiB,KAAK,kBAAkB,OAAO,UAAU,MAAM;AACrE,cAAM,QAAQ,MAAM,KAAK,eAAe,YAAY,cAAc;AAClE,aAAK,WAAW,gBAAgB,KAAK;AAGrC,YAAI,OAAO,YAAY;AACrB,iBAAO,WAAW,cAAc;AAAA,QAClC;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,qBAAe,QAAQ;AAAA,QACrB,mBAAmB,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,MAC7E;AAAA,IACF;AAEA,mBAAe,SAAS,KAAK,IAAI,IAAI;AACrC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASQ,kBACN,kBACA,cACG;AACH,UAAM,SAAS,EAAE,GAAG,iBAAiB;AAGrC,QAAI,aAAa,UAAU,CAAC,OAAO,QAAQ;AACzC,aAAO,SAAS,aAAa;AAAA,IAC/B;AAGA,QAAI,aAAa,aAAa,CAAC,OAAO,WAAW;AAC/C,aAAO,YAAY,aAAa;AAAA,IAClC;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,WAAW,WAA4B,OAA8B;AAC3E,cAAU,oBAAoB,MAAM;AACpC,cAAU,kBAAkB,MAAM;AAClC,cAAU,kBAAkB,MAAM;AAGlC,QAAI,MAAM,UAAU,MAAM,OAAO,SAAS,GAAG;AAC3C,UAAI,CAAC,UAAU,QAAQ;AACrB,kBAAU,SAAS,CAAC;AAAA,MACtB;AACA,gBAAU,OAAO,KAAK,GAAG,MAAM,MAAM;AAAA,IACvC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAc,2BACZ,YACA,QACA,OACe;AACf,UAAM,YAAY,OAAO,aAAa;AAEtC,qBAAiB,SAAS,KAAK,QAAQ,QAAQ,YAAY;AAAA,MACzD;AAAA,MACA,QAAQ,OAAO;AAAA,IACjB,CAAC,GAAG;AACF,YAAM,UAA4B,CAAC;AAEnC,iBAAW,UAAU,OAAO;AAC1B,cAAM;AAEN,YAAI;AACF,cAAI,kBAIO;AAGX,cAAI,OAAO,eAAe;AACxB,8BAAkB,KAAK;AAAA,cACrB,OAAO,WAAW,OAAO,aAAa;AAAA,YACxC;AAAA,UACF,WAES,OAAO,YAAY;AAC1B,kBAAM,OAAO,OAAO,QAAQ,OAAO,UAAU,WAAW;AACxD,gBAAI,OAAO,SAAS,UAAU;AAC5B,gCAAkB,KAAK,eAAe,IAAI;AAAA,YAC5C;AAAA,UACF;AAEA,cAAI,iBAAiB;AACnB,kBAAM,WAAgC;AAAA,cACpC,mBAAmB,gBAAgB;AAAA,cACnC,mBAAmB,gBAAgB;AAAA,YACrC;AAEA,gBAAI,gBAAgB,MAAM;AACxB,uBAAS,mBAAmB,gBAAgB;AAAA,YAC9C;AAEA,oBAAQ,KAAK;AAAA,cACX,IAAI,OAAO;AAAA,cACX;AAAA,YACF,CAAC;AAAA,UACH,OAAO;AACL,kBAAM;AAAA,UACR;AAAA,QACF,SAAS,OAAO;AACd,gBAAM;AACN,gBAAM,QAAQ;AAAA,YACZ,2BAA2B,OAAO,EAAE,KAAK,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,UACnG;AAAA,QACF;AAAA,MACF;AAGA,UAAI,QAAQ,SAAS,GAAG;AACtB,YAAI;AACF,gBAAM,KAAK,QAAQ,eAAe,YAAY,OAAO;AACrD,gBAAM,kBAAkB,QAAQ;AAAA,QAClC,SAAS,OAAO;AACd,gBAAM,QAAQ;AAAA,YACZ,yBAAyB,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,UACnF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,uBACN,aACuD;AACvD,QAAI,CAAC,eAAe,OAAO,gBAAgB,UAAU;AACnD,aAAO;AAAA,IACT;AAEA,UAAM,QAAQ,YAAY,MAAM,GAAG,EAAE,OAAO,OAAK,EAAE,KAAK,MAAM,EAAE;AAChE,QAAI,MAAM,WAAW,GAAG;AACtB,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,MACL,MAAM;AAAA,MACN,OAAO,MAAM;AAAA,MACb,OAAO,MAAM,MAAM,SAAS,CAAC;AAAA,IAC/B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,eACN,MACyC;AAEzC,UAAM,WAAW,KAAK,uBAAuB,IAAI;AACjD,QAAI,SAAU,QAAO;AAGrB,UAAM,OAAO,KAAK,mBAAmB,IAAI;AACzC,QAAI,KAAM,QAAO;AAGjB,UAAM,UAAU,KAAK,sBAAsB,IAAI;AAC/C,QAAI,QAAS,QAAO;AAGpB,WAAO,EAAE,OAAO,GAAG,OAAO,cAAc;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,uBACN,MACyC;AACzC,UAAM,QAAQ,KAAK,MAAM,oBAAoB;AAC7C,QAAI,OAAO;AACT,YAAM,QAAQ,MAAM,CAAC,EAAE;AACvB,YAAM,QAAQ,MAAM,CAAC,EAAE,KAAK;AAC5B,aAAO,EAAE,OAAO,MAAM;AAAA,IACxB;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,mBACN,MACyC;AACzC,UAAM,QAAQ,KAAK,MAAM,4BAA4B;AACrD,QAAI,OAAO;AACT,YAAM,QAAQ,SAAS,MAAM,CAAC,GAAG,EAAE;AACnC,YAAM,QAAQ,MAAM,CAAC,EAAE,KAAK;AAC5B,aAAO,EAAE,OAAO,MAAM;AAAA,IACxB;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,sBACN,MACyC;AACzC,UAAM,QAAQ,KAAK,MAAM,oBAAoB;AAC7C,QAAI,OAAO;AACT,YAAM,QAAQ,MAAM,CAAC,EAAE,KAAK;AAC5B,aAAO,EAAE,OAAO,GAAG,MAAM;AAAA,IAC3B;AACA,WAAO;AAAA,EACT;AACF;;;ACp5BO,IAAM,qBAAqB;AAK3B,IAAM,wBAAwB;AAM9B,SAAS,eAAe,MAAsB;AACnD,SAAO,KAAK,KAAK,KAAK,SAAS,CAAC;AAClC;AAKO,SAAS,cAAc,QAAwB;AACpD,SAAO,SAAS;AAClB;;;AC3BO,IAAM,mBAAN,MAA8C;AAAA,EAClC,aAAa;AAAA,IAC5B;AAAA;AAAA,IACA;AAAA;AAAA,IACA;AAAA;AAAA,IACA;AAAA;AAAA,IACA;AAAA;AAAA,EACF;AAAA,EAEA,MAAM,MAAc,QAAmC;AACrD,QAAI,CAAC,KAAM,QAAO,CAAC;AAEnB,UAAM,YAAY,QAAQ,aAAa;AACvC,UAAM,eAAe,QAAQ,gBAAgB;AAE7C,UAAM,WAAW,cAAc,SAAS;AACxC,UAAM,eAAe,cAAc,YAAY;AAE/C,QAAI,KAAK,UAAU,UAAU;AAC3B,aAAO,CAAC;AAAA,QACN;AAAA,QACA,OAAO;AAAA,QACP,UAAU;AAAA,UACR,QAAQ;AAAA,UACR,YAAY;AAAA,UACZ,aAAa;AAAA,UACb,WAAW;AAAA,UACX,SAAS,KAAK;AAAA,QAChB;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,SAAS,KAAK,eAAe,MAAM,UAAU,CAAC;AACpD,UAAM,SAAS,KAAK,WAAW,QAAQ,YAAY;AAEnD,WAAO,OAAO,IAAI,CAAC,OAAO,WAAW;AAAA,MACnC,MAAM,MAAM;AAAA,MACZ;AAAA,MACA,UAAU;AAAA,QACR,QAAQ;AAAA;AAAA,QACR,YAAY;AAAA,QACZ,aAAa,OAAO;AAAA,QACpB,WAAW,MAAM;AAAA,QACjB,SAAS,MAAM;AAAA,MACjB;AAAA,IACF,EAAE;AAAA,EACJ;AAAA,EAEQ,eACN,MACA,UACA,gBACqD;AACrD,QAAI,KAAK,UAAU,UAAU;AAC3B,aAAO,CAAC,EAAE,MAAM,OAAO,GAAG,KAAK,KAAK,OAAO,CAAC;AAAA,IAC9C;AAEA,QAAI,kBAAkB,KAAK,WAAW,QAAQ;AAE5C,YAAMC,UAA8D,CAAC;AACrE,eAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK,UAAU;AAC9C,QAAAA,QAAO,KAAK;AAAA,UACV,MAAM,KAAK,MAAM,GAAG,IAAI,QAAQ;AAAA,UAChC,OAAO;AAAA,UACP,KAAK,KAAK,IAAI,IAAI,UAAU,KAAK,MAAM;AAAA,QACzC,CAAC;AAAA,MACH;AACA,aAAOA;AAAA,IACT;AAEA,UAAM,YAAY,KAAK,WAAW,cAAc;AAChD,UAAM,QAAQ,YAAY,KAAK,MAAM,SAAS,IAAI,CAAC,IAAI;AAEvD,QAAI,MAAM,UAAU,GAAG;AAErB,aAAO,KAAK,eAAe,MAAM,UAAU,iBAAiB,CAAC;AAAA,IAC/D;AAGA,UAAM,SAA8D,CAAC;AACrE,QAAI,eAAyB,CAAC;AAC9B,QAAI,eAAe;AACnB,QAAI,gBAAgB;AAEpB,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,YAAM,OAAO,MAAM,CAAC;AACpB,YAAM,WAAW,aAAa,SAAS,IACnC,CAAC,GAAG,cAAc,IAAI,EAAE,KAAK,SAAS,IACtC;AAEJ,UAAI,SAAS,UAAU,UAAU;AAC/B,YAAI,aAAa,WAAW,GAAG;AAC7B,yBAAe;AAAA,QACjB;AACA,qBAAa,KAAK,IAAI;AAAA,MACxB,OAAO;AAEL,YAAI,aAAa,SAAS,GAAG;AAC3B,gBAAM,YAAY,aAAa,KAAK,SAAS;AAC7C,iBAAO,KAAK;AAAA,YACV,MAAM;AAAA,YACN,OAAO;AAAA,YACP,KAAK,eAAe,UAAU;AAAA,UAChC,CAAC;AAAA,QACH;AAEA,uBAAe;AAEf,YAAI,KAAK,SAAS,UAAU;AAC1B,gBAAM,YAAY,KAAK,eAAe,MAAM,UAAU,iBAAiB,CAAC;AACxE,qBAAW,OAAO,WAAW;AAC3B,mBAAO,KAAK;AAAA,cACV,MAAM,IAAI;AAAA,cACV,OAAO,eAAe,IAAI;AAAA,cAC1B,KAAK,eAAe,IAAI;AAAA,YAC1B,CAAC;AAAA,UACH;AACA,yBAAe,CAAC;AAAA,QAClB,OAAO;AACL,yBAAe,CAAC,IAAI;AAAA,QACtB;AAAA,MACF;AACA,uBAAiB,KAAK,UAAU,IAAI,MAAM,SAAS,IAAI,UAAU,SAAS;AAAA,IAC5E;AAGA,QAAI,aAAa,SAAS,GAAG;AAC3B,YAAM,YAAY,aAAa,KAAK,SAAS;AAC7C,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,OAAO;AAAA,QACP,KAAK,eAAe,UAAU;AAAA,MAChC,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,WACN,QACA,cACqD;AACrD,QAAI,iBAAiB,KAAK,OAAO,UAAU,GAAG;AAC5C,aAAO;AAAA,IACT;AAEA,UAAM,SAAS,CAAC,OAAO,CAAC,CAAC;AAEzB,aAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,YAAM,YAAY,OAAO,IAAI,CAAC;AAC9B,YAAM,YAAY,OAAO,CAAC;AAG1B,YAAM,cAAc,UAAU,KAAK,MAAM,CAAC,YAAY;AAEtD,aAAO,KAAK;AAAA,QACV,MAAM,cAAc,UAAU;AAAA,QAC9B,OAAO,KAAK,IAAI,GAAG,UAAU,MAAM,YAAY;AAAA,QAC/C,KAAK,UAAU;AAAA,MACjB,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AACF;;;ACxKA,YAAY,UAAU;AAMf,IAAM,oBAAN,MAAwB;AAAA,EAG7B,YACU,SACA,UACA,gBACR,SACA;AAJQ;AACA;AACA;AAGR,SAAK,iBAAiB,WAAW,IAAI,iBAAiB;AAAA,EACxD;AAAA,EATQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAkBR,MAAM,OACJ,SACA,YACA,QACyB;AACzB,UAAM,YAAY,KAAK,IAAI;AAC3B,UAAM,cAAc,MAAM,QAAQ,OAAO,IAAI,UAAU,CAAC,OAAO;AAE/D,UAAM,QAAwB;AAAA,MAC5B,oBAAoB;AAAA,MACpB,oBAAoB;AAAA,MACpB,iBAAiB;AAAA,MACjB,eAAe;AAAA,MACf,gBAAgB;AAAA,MAChB,QAAQ;AAAA,MACR,QAAQ,CAAC;AAAA,IACX;AAEA,UAAM,iBAAiB,YAAY;AAEnC,eAAW,UAAU,aAAa;AAEhC,cAAQ,aAAa;AAAA,QACnB,OAAO;AAAA,QACP,oBAAoB,MAAM;AAAA,QAC1B;AAAA,QACA,iBAAiB,MAAM;AAAA,QACvB,iBAAiB;AAAA,MACnB,CAAC;AAED,UAAI;AACF,cAAM,KAAK,WAAW,QAAQ,YAAY,QAAQ,OAAO,cAAc;AACvE,cAAM;AAAA,MACR,SAAS,OAAO;AACd,cAAM;AACN,cAAM,OAAQ,KAAK;AAAA,UACjB;AAAA,UACA,OAAO;AAAA,UACP;AAAA,QACF,CAAC;AAAA,MACH;AACA,YAAM;AAAA,IACR;AAEA,UAAM,SAAS,KAAK,IAAI,IAAI;AAC5B,WAAO;AAAA,EACT;AAAA,EAEA,MAAc,WACZ,UACA,YACA,QACA,OACA,gBACe;AAEf,UAAM,MAAM,MAAM,KAAK,eAAe,KAAK,QAAQ;AACnD,YAAQ,mBAAmB,GAAG;AAG9B,YAAQ,aAAa;AAAA,MACnB,OAAO;AAAA,MACP,oBAAoB,MAAM;AAAA,MAC1B;AAAA,MACA,iBAAiB,MAAM;AAAA,MACvB,iBAAiB;AAAA,IACnB,CAAC;AAGD,UAAM,UAAU,QAAQ,WAAW,KAAK;AACxC,UAAM,SAAS,QAAQ,MAAM,IAAI,MAAM;AAAA,MACrC,WAAW,QAAQ;AAAA,MACnB,cAAc,QAAQ;AAAA,IACxB,CAAC;AAGD,eAAW,SAAS,QAAQ;AAC1B,YAAM,SAAS,SAAS,IAAI;AAAA,IAC9B;AAEA,UAAM,iBAAiB,OAAO;AAC9B,YAAQ,kBAAkB,MAAM;AAGhC,YAAQ,aAAa;AAAA,MACnB,OAAO;AAAA,MACP,oBAAoB,MAAM;AAAA,MAC1B;AAAA,MACA,iBAAiB,MAAM;AAAA,MACvB,aAAa,MAAM;AAAA,MACnB,iBAAiB;AAAA,IACnB,CAAC;AAGD,UAAM,QAAQ,OAAO,IAAI,CAAC,MAAiB,EAAE,IAAI;AACjD,UAAM,aAAa,MAAM,KAAK,SAAS,WAAW,KAAK;AAGvD,UAAM,UAA0B,OAAO,IAAI,CAAC,OAAkB,MAAc;AAC1E,YAAM,WAAW,KAAK,cAAc,KAAK,OAAO,MAAM;AAEtD,aAAO;AAAA,QACL,IAAI,GAAQ,cAAS,IAAI,MAAM,CAAC,IAAI,MAAM,KAAK;AAAA,QAC/C,WAAW,WAAW,CAAC;AAAA,QACvB,MAAM,MAAM;AAAA,QACZ;AAAA,MACF;AAAA,IACF,CAAC;AAGD,YAAQ,aAAa;AAAA,MACnB,OAAO;AAAA,MACP,oBAAoB,MAAM;AAAA,MAC1B;AAAA,MACA,iBAAiB,MAAM;AAAA,MACvB,aAAa,MAAM;AAAA,MACnB,iBAAiB;AAAA,IACnB,CAAC;AAGD,UAAM,YAAY,QAAQ,aAAa;AACvC,aAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK,WAAW;AAClD,YAAM,QAAQ,QAAQ,MAAM,GAAG,IAAI,SAAS;AAC5C,YAAM,KAAK,QAAQ,OAAO,YAAY,KAAK;AAC3C,YAAM,kBAAkB,MAAM;AAAA,IAChC;AAAA,EACF;AAAA,EAEQ,cACN,KACA,OACA,QACqB;AAErB,UAAMC,YAAgB,cAAS,IAAI,QAAa,aAAQ,IAAI,MAAM,CAAC;AACnE,UAAMC,WAAe,aAAQ,IAAI,MAAM;AAEvC,UAAM,eAAoC;AAAA,MACxC,CAAC,eAAe,MAAM,GAAG,IAAI;AAAA,MAC7B,CAAC,eAAe,QAAQ,GAAG,IAAI;AAAA,MAC/B,CAAC,eAAe,MAAM,GAAGD;AAAA,MACzB,CAAC,eAAe,SAAS,GAAGC;AAAA,IAC9B;AAGA,UAAM,oBAAoB,QAAQ,oBAAoB,GAAG,KAAK,CAAC;AAG/D,UAAM,eAAe,QAAQ,YAAY,CAAC;AAG1C,UAAM,gBAAgB;AAAA,MACpB,YAAY,MAAM,SAAS;AAAA,MAC3B,aAAa,MAAM,SAAS;AAAA,MAC5B,WAAW,MAAM,SAAS;AAAA,MAC1B,SAAS,MAAM,SAAS;AAAA,IAC1B;AAGA,WAAO;AAAA,MACL,GAAG;AAAA,MACH,GAAG;AAAA,MACH,GAAG;AAAA,MACH,GAAG;AAAA,IACL;AAAA,EACF;AACF;;;ACvMA,YAAY,QAAQ;AACpB,YAAYC,WAAU;AAQf,IAAM,aAAN,MAA2C;AAAA,EAChD,UAAU,UAA2B;AACnC,WAAO,eAAe,KAAK,QAAQ;AAAA,EACrC;AAAA,EAEA,MAAM,KAAK,UAAqC;AAC9C,UAAM,OAAO,MAAS,YAAS,UAAU,OAAO;AAChD,UAAM,OAAY,cAAQ,QAAQ,EAAE,MAAM,CAAC,EAAE,YAAY;AACzD,UAAM,QAAQ,MAAS,QAAK,QAAQ;AACpC,UAAM,YAAiB,cAAQ,QAAQ;AAEvC,WAAO;AAAA,MACL;AAAA,MACA,QAAQ;AAAA,MACR;AAAA,MACA,UAAU;AAAA,QACR,MAAM,MAAM;AAAA,QACZ;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;;;AC9BA,YAAYC,SAAQ;AACpB,OAAO,cAAc;AAQd,IAAM,YAAN,MAA0C;AAAA,EAC/C,UAAU,UAA2B;AACnC,WAAO,UAAU,KAAK,QAAQ;AAAA,EAChC;AAAA,EAEA,MAAM,KAAK,UAAqC;AAC9C,UAAM,aAAa,MAAS,aAAS,QAAQ;AAC7C,UAAM,UAAU,MAAM,SAAS,UAAU;AAEzC,WAAO;AAAA,MACL,MAAM,QAAQ;AAAA,MACd,QAAQ;AAAA,MACR,MAAM;AAAA,MACN,UAAU;AAAA,QACR,OAAO,QAAQ;AAAA,QACf,MAAM,QAAQ;AAAA,MAChB;AAAA,IACF;AAAA,EACF;AACF;;;AC5BA,OAAO,aAAa;AAQb,IAAM,aAAN,MAA2C;AAAA,EAChD,UAAU,UAA2B;AACnC,WAAO,WAAW,KAAK,QAAQ;AAAA,EACjC;AAAA,EAEA,MAAM,KAAK,UAAqC;AAC9C,UAAM,SAAS,MAAM,QAAQ,eAAe,EAAE,MAAM,SAAS,CAAC;AAE9D,WAAO;AAAA,MACL,MAAM,OAAO;AAAA,MACb,QAAQ;AAAA,MACR,MAAM;AAAA,MACN,UAAU;AAAA,QACR,UAAU,OAAO;AAAA;AAAA,MACnB;AAAA,IACF;AAAA,EACF;AACF;;;ACzBA,YAAYC,SAAQ;AACpB,YAAY,aAAa;AAQlB,IAAM,aAAN,MAA2C;AAAA,EAChD,UAAU,UAA2B;AACnC,WAAO,YAAY,KAAK,QAAQ;AAAA,EAClC;AAAA,EAEA,MAAM,KAAK,UAAqC;AAC9C,UAAM,OAAO,MAAS,aAAS,UAAU,OAAO;AAChD,UAAM,IAAY,aAAK,IAAI;AAG3B,MAAE,4BAA4B,EAAE,OAAO;AAGvC,UAAM,OAAO,EAAE,MAAM,EAAE,KAAK,EACzB,QAAQ,QAAQ,GAAG,EACnB,KAAK;AAER,WAAO;AAAA,MACL;AAAA,MACA,QAAQ;AAAA,MACR,MAAM;AAAA,MACN,UAAU;AAAA,QACR,OAAO,EAAE,OAAO,EAAE,KAAK,KAAK;AAAA,QAC5B,aAAa,EAAE,0BAA0B,EAAE,KAAK,SAAS,KAAK;AAAA,MAChE;AAAA,IACF;AAAA,EACF;AACF;;;ACzBO,IAAM,iBAAN,MAAqB;AAAA,EAClB,UAA4B,CAAC;AAAA,EAErC,cAAc;AAEZ,SAAK,SAAS,IAAI,WAAW,CAAC;AAC9B,SAAK,SAAS,IAAI,UAAU,CAAC;AAC7B,SAAK,SAAS,IAAI,WAAW,CAAC;AAC9B,SAAK,SAAS,IAAI,WAAW,CAAC;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,SAAS,QAA8B;AACrC,SAAK,QAAQ,KAAK,MAAM;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,QAAQ,UAA2B;AACjC,WAAO,KAAK,QAAQ,KAAK,OAAK,EAAE,UAAU,QAAQ,CAAC;AAAA,EACrD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,KAAK,UAAqC;AAC9C,UAAM,SAAS,KAAK,QAAQ,KAAK,OAAK,EAAE,UAAU,QAAQ,CAAC;AAC3D,QAAI,CAAC,QAAQ;AACX,YAAM,IAAI,MAAM,6BAA6B,QAAQ,EAAE;AAAA,IACzD;AACA,WAAO,OAAO,KAAK,QAAQ;AAAA,EAC7B;AACF;;;ACxCO,IAAM,eAAN,MAA0C;AAAA,EAC/C,MAAM,MAAc,QAAmC;AACrD,QAAI,CAAC,KAAM,QAAO,CAAC;AAEnB,UAAM,YAAY,QAAQ,aAAa;AACvC,UAAM,eAAe,QAAQ,gBAAgB;AAE7C,UAAM,WAAW,cAAc,SAAS;AACxC,UAAM,eAAe,cAAc,YAAY;AAC/C,UAAM,OAAO,WAAW;AAExB,QAAI,KAAK,UAAU,UAAU;AAC3B,aAAO,CAAC;AAAA,QACN;AAAA,QACA,OAAO;AAAA,QACP,UAAU;AAAA,UACR,QAAQ;AAAA,UACR,YAAY;AAAA,UACZ,aAAa;AAAA,UACb,WAAW;AAAA,UACX,SAAS,KAAK;AAAA,QAChB;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,SAAsB,CAAC;AAC7B,QAAI,WAAW;AAEf,WAAO,WAAW,KAAK,QAAQ;AAC7B,YAAM,MAAM,KAAK,IAAI,KAAK,QAAQ,WAAW,QAAQ;AACrD,YAAM,YAAY,KAAK,MAAM,UAAU,GAAG;AAE1C,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,OAAO,OAAO;AAAA,QACd,UAAU;AAAA,UACR,QAAQ;AAAA,UACR,YAAY,OAAO;AAAA,UACnB,aAAa;AAAA;AAAA,UACb,WAAW;AAAA,UACX,SAAS;AAAA,QACX;AAAA,MACF,CAAC;AAED,kBAAY;AAEZ,UAAI,QAAQ,EAAG;AAAA,IACjB;AAGA,eAAW,SAAS,QAAQ;AAC1B,YAAM,SAAS,cAAc,OAAO;AAAA,IACtC;AAEA,WAAO;AAAA,EACT;AACF;;;ACxDO,IAAM,kBAAN,MAA6C;AAAA,EAClD,MAAM,MAAc,QAAmC;AACrD,QAAI,CAAC,KAAM,QAAO,CAAC;AAEnB,UAAM,YAAY,QAAQ,aAAa;AACvC,UAAM,eAAe,QAAQ,gBAAgB;AAE7C,UAAM,WAAW,cAAc,SAAS;AACxC,UAAM,eAAe,cAAc,YAAY;AAG/C,UAAM,YAAY,KAAK,eAAe,IAAI;AAE1C,QAAI,UAAU,WAAW,GAAG;AAC1B,aAAO,CAAC;AAAA,QACN;AAAA,QACA,OAAO;AAAA,QACP,UAAU;AAAA,UACR,QAAQ;AAAA,UACR,YAAY;AAAA,UACZ,aAAa;AAAA,UACb,WAAW;AAAA,UACX,SAAS,KAAK;AAAA,QAChB;AAAA,MACF,CAAC;AAAA,IACH;AAGA,UAAM,YAAiE,CAAC;AACxE,QAAI,mBAA6B,CAAC;AAClC,QAAI,eAAe;AAEnB,eAAW,YAAY,WAAW;AAChC,YAAM,WAAW,iBAAiB,SAAS,IACvC,CAAC,GAAG,kBAAkB,QAAQ,EAAE,KAAK,GAAG,IACxC;AAEJ,UAAI,iBAAiB,WAAW,GAAG;AACjC,2BAAmB,CAAC,QAAQ;AAC5B,uBAAe,KAAK,QAAQ,QAAQ;AAAA,MACtC,WAAW,SAAS,UAAU,UAAU;AACtC,yBAAiB,KAAK,QAAQ;AAAA,MAChC,OAAO;AAEL,cAAM,YAAY,iBAAiB,KAAK,GAAG;AAC3C,kBAAU,KAAK;AAAA,UACb,MAAM;AAAA,UACN,OAAO;AAAA,UACP,KAAK,eAAe,UAAU;AAAA,QAChC,CAAC;AAGD,2BAAmB,CAAC,QAAQ;AAC5B,uBAAe,KAAK,QAAQ,UAAU,eAAe,CAAC;AACtD,YAAI,iBAAiB,GAAI,gBAAe;AAAA,MAC1C;AAAA,IACF;AAGA,QAAI,iBAAiB,SAAS,GAAG;AAC/B,YAAM,YAAY,iBAAiB,KAAK,GAAG;AAC3C,gBAAU,KAAK;AAAA,QACb,MAAM;AAAA,QACN,OAAO;AAAA,QACP,KAAK,eAAe,UAAU;AAAA,MAChC,CAAC;AAAA,IACH;AAGA,UAAM,cAAc,KAAK,mBAAmB,WAAW,YAAY;AAEnE,WAAO,YAAY,IAAI,CAAC,OAAO,WAAW;AAAA,MACxC,MAAM,MAAM;AAAA,MACZ;AAAA,MACA,UAAU;AAAA,QACR,QAAQ;AAAA,QACR,YAAY;AAAA,QACZ,aAAa,YAAY;AAAA,QACzB,WAAW,MAAM;AAAA,QACjB,SAAS,MAAM;AAAA,MACjB;AAAA,IACF,EAAE;AAAA,EACJ;AAAA,EAEQ,eAAe,MAAwB;AAE7C,UAAM,QAAQ,KAAK,MAAM,iCAAiC;AAC1D,QAAI,CAAC,MAAO,QAAO,CAAC,IAAI;AACxB,WAAO,MAAM,IAAI,OAAK,EAAE,KAAK,CAAC,EAAE,OAAO,OAAK,EAAE,SAAS,CAAC;AAAA,EAC1D;AAAA,EAEQ,mBACN,QACA,cACqD;AACrD,QAAI,iBAAiB,KAAK,OAAO,UAAU,GAAG;AAC5C,aAAO;AAAA,IACT;AAEA,UAAM,SAAS,CAAC,OAAO,CAAC,CAAC;AAEzB,aAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,YAAM,YAAY,OAAO,IAAI,CAAC;AAC9B,YAAM,YAAY,OAAO,CAAC;AAG1B,YAAM,gBAAgB,KAAK,eAAe,UAAU,IAAI;AACxD,YAAM,eAAe,cAAc,cAAc,SAAS,CAAC,KAAK;AAEhE,UAAI,gBAAgB,aAAa,UAAU,cAAc;AACvD,eAAO,KAAK;AAAA,UACV,MAAM,eAAe,MAAM,UAAU;AAAA,UACrC,OAAO,KAAK,IAAI,GAAG,UAAU,MAAM,aAAa,MAAM;AAAA,UACtD,KAAK,UAAU;AAAA,QACjB,CAAC;AAAA,MACH,OAAO;AACL,eAAO,KAAK,SAAS;AAAA,MACvB;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AACF;;;ACtHA,IAAM,gBAAgB;AAEtB,IAAM,4BACJ;AA6BK,IAAM,YAAN,MAAgB;AAAA,EACJ;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EAEjB,YAAY,QAAyB;AACnC,SAAK,UAAU,OAAO;AACtB,SAAK,WAAW,OAAO;AACvB,SAAK,MAAM,OAAO;AAClB,SAAK,oBAAoB,OAAO;AAChC,SAAK,cAAc,OAAO,eAAe;AAGzC,SAAK,gBAAgB,IAAI,iBAAiB,KAAK,SAAS,KAAK,QAAQ;AACrE,SAAK,oBAAoB,IAAI;AAAA,MAC3B,KAAK;AAAA,MACL,KAAK;AAAA,MACL,IAAI,eAAe;AAAA,IACrB;AACA,SAAK,qBAAqB,IAAI,mBAAmB,KAAK,OAAO;AAAA,EAC/D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,iBACJ,MACA,WACA,QACe;AACf,UAAM,MAAM,aAAa,KAAK,SAAS;AACvC,UAAM,KAAK,QAAQ,iBAAiB,MAAM,KAAK,MAAM;AAAA,EACvD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,iBAAiB,MAA6B;AAClD,UAAM,KAAK,QAAQ,iBAAiB,IAAI;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,iBAAiB,MAAgC;AACrD,WAAO,KAAK,QAAQ,iBAAiB,IAAI;AAAA,EAC3C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,OACJ,SACA,YACA,QACyB;AACzB,UAAM,MAAM,cAAc,KAAK;AAC/B,QAAI,CAAC,KAAK;AACR,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AACA,WAAO,KAAK,kBAAkB,OAAO,SAAS,KAAK,MAAM;AAAA,EAC3D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,SACJ,OACA,SAC0B;AAC1B,UAAM,aAAa,SAAS,cAAc,KAAK;AAC/C,QAAI,CAAC,YAAY;AACf,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,UAAM,OAAO,SAAS,QAAQ,KAAK;AAGnC,QAAI;AACJ,QAAI;AACJ,UAAM,gBAAgB,SAAS;AAE/B,QAAI,SAAS,WAAW;AACtB,wBAAkB;AAAA,QAChB,OAAO,eAAe;AAAA,QACtB,IAAI;AAAA,QACJ,OAAO,QAAQ;AAAA,MACjB;AAAA,IACF;AAEA,QAAI,SAAS,OAAO;AAClB,0BAAoB;AAAA,QAClB,OAAO,iBAAiB;AAAA,QACxB,IAAI;AAAA,QACJ,OAAO,QAAQ;AAAA,MACjB;AAAA,IACF;AAEA,UAAM,SAAS;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAGA,QAAI,SAAS,YAAY,YAAY;AACnC,YAAM,UAAU,MAAM,KAAK,cAAc,iBAAiB,MAAM;AAEhE,YAAM,UAAU,MAAM,KAAK,QAAQ,OAAO,CAAC,EAAE,KAAK;AAClD,aAAO,EAAE,SAAS,OAAO,gBAAgB,EAAE,UAAU,iBAAiB,YAAY,mBAAmB,QAAQ,cAAc,EAAE;AAAA,IAC/H;AAEA,QAAI,SAAS,YAAY,SAAS;AAChC,YAAM,UAAU,MAAM,KAAK,cAAc,mBAAmB,MAAM;AAClE,YAAM,UAAU,MAAM,KAAK,QAAQ,OAAO,CAAC,EAAE,KAAK;AAClD,aAAO,EAAE,SAAS,OAAO,gBAAgB,EAAE,UAAU,iBAAiB,YAAY,mBAAmB,QAAQ,cAAc,EAAE;AAAA,IAC/H;AAEA,WAAO,KAAK,cAAc,SAAS,MAAM;AAAA,EAC3C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,OACJ,YACA,QAC0B;AAC1B,WAAO,KAAK,mBAAmB,UAAU,YAAY,MAAM;AAAA,EAC7D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,MACJ,UACA,SACsB;AACtB,QAAI,CAAC,KAAK,KAAK;AACb,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAGA,UAAM,kBAAkB,MAAM,KAAK,SAAS,UAAU,OAAO;AAG7D,UAAM,UAAU,gBAAgB,QAC7B,IAAI,CAAC,MAAM,EAAE,IAAI,EACjB,OAAO,OAAO,EACd,KAAK,MAAM;AAGd,UAAM,eAAe,SAAS,gBAAgB;AAC9C,UAAM,SAAS,GAAG,YAAY;AAAA;AAAA;AAAA,EAAiB,OAAO;AAAA;AAAA,YAAiB,QAAQ;AAG/E,UAAM,SAAS,MAAM,KAAK,IAAI,SAAS,QAAQ;AAAA,MAC7C,aAAa,SAAS;AAAA,MACtB,WAAW,SAAS;AAAA,IACtB,CAAC;AAED,WAAO;AAAA,MACL;AAAA,MACA,SAAS,gBAAgB;AAAA,MACzB,OAAO;AAAA,MACP;AAAA,IACF;AAAA,EACF;AACF;","names":["allScores","allScores","result","basename","dirname","path","fs","fs"]}
|