@nexo-labs/payload-typesense 1.5.1 → 1.5.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts.map +1 -1
- package/dist/index.mjs +1 -1
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.mjs","names":["openaiClient: OpenAI | null","currentOpenAIApiKey: string | null","geminiClient: GoogleGenerativeAI | null","currentGeminiApiKey: string | null","embeddings: number[][]","request: TypesenseSearchRequest","params: AdvancedSearchParams","event: ConversationEvent","allSources: ChunkSource[]","source: ChunkSource","sources: ChunkSource[]","conversationId: string | null","error: unknown","error: unknown","newUserMessage: ChatMessageWithSources","newAssistantMessage: ChatMessageWithSources","searchConfig: RAGSearchConfig","spendingEntries: SpendingEntry[]","conversationIdCapture: string | null","sourcesCapture: ChunkSource[]","resolveDocumentType","sources: ChunkSource[]","conversationId: string | null","resolveDocumentType","llmSpending: SpendingEntry","conversationId: string | null","resolveDocumentType","llmSpending: SpendingEntry","session","error: unknown","endpoints: Array<{ path: string; method: 'connect' | 'delete' | 'get' | 'head' | 'options' | 'patch' | 'post' | 'put'; handler: PayloadHandler }>","collections: Array<Record<string, unknown>>","fields: { name: string; facet?: boolean; index?: boolean }[]","buildOptions: {\n page: number;\n per_page: number;\n searchFields?: string[];\n sort_by?: string;\n exclude_fields?: string;\n }","fields: { name: string; index?: boolean; type?: string }[]","schemaError: unknown","fields: { name: string; index?: boolean; type?: string }[]","results: CollectionResult[]","searchParams: Record<string, unknown>","searchFields: string[] | undefined","fields: { name: string; index?: boolean; type?: string }[]","typesenseClient: Client","pluginOptions: ModularPluginConfig","pluginOptions: ModularPluginConfig","allowedTableNames: Set<string>","allTableNames: Set<string>","targetTables: string[]","pluginOptions: ModularPluginConfig","searchConfigs: Array<[string, TableConfig]>","collectionName: string","collectionNameStr: string","collections: string[] | undefined","errors: string[]","result: {\n q: string;\n page: number;\n per_page: number;\n sort_by?: string;\n mode?: 'simple' | 'semantic';\n collections?: string[];\n exclude_fields?: string;\n query_by?: string;\n simple?: boolean;\n errors?: string[];\n }","DEFAULT_EMBEDDING_DIMENSIONS","DEFAULT_EMBEDDING_DIMENSIONS","client: Client","config: ModularPluginConfig","targetSchema: CollectionCreateSchema","error: unknown","DEFAULT_EMBEDDING_DIMENSIONS","client: Client","config: AgentManagerConfig","err","logger","client: Client","error: unknown","searchParams: Record<string, unknown>","typesenseField: CollectionFieldSchema","parts: string[]","docDeleteError: unknown","chunkDeleteError: unknown","error: unknown"],"sources":["../src/core/client/typesense-client.ts","../src/features/embedding/embeddings.ts","../src/features/rag/query-builder.ts","../src/features/rag/stream-handler.ts","../src/features/rag/setup.ts","../src/features/rag/handlers/rag-search-handler.ts","../src/features/rag/handlers/chunk-fetch-handler.ts","../src/features/rag/handlers/session-handlers.ts","../src/features/rag/utils/sse-utils.ts","../src/features/rag/chat-session-repository.ts","../src/features/rag/endpoints/chat/validators/request-validator.ts","../src/features/rag/endpoints/chat/handlers/embedding-handler.ts","../src/features/rag/endpoints/chat/handlers/session-handler.ts","../src/features/rag/endpoints/chat/handlers/token-limit-handler.ts","../src/features/rag/endpoints/chat/handlers/usage-stats-handler.ts","../src/features/rag/endpoints/chat/route.ts","../src/features/rag/stream-handlers/utils.ts","../src/features/rag/stream-handlers/streaming-handler.ts","../src/features/rag/stream-handlers/non-streaming-handler.ts","../src/features/rag/endpoints/chat/session/route.ts","../src/features/rag/endpoints/chunks/[id]/route.ts","../src/features/rag/endpoints/chat/agents/route.ts","../src/features/rag/endpoints.ts","../src/features/search/endpoints/handlers/collections-handler.ts","../src/shared/cache/cache.ts","../src/features/search/constants.ts","../src/features/search/results/process-traditional-results.ts","../src/features/search/traditional/build-params.ts","../src/features/search/traditional/search-collection.ts","../src/features/search/endpoints/handlers/executors/traditional-multi-collection-search.ts","../src/features/search/results/process-vector-results.ts","../src/features/search/vector/build-params.ts","../src/features/search/vector/build-multi-collection-params.ts","../src/features/search/vector/generate-vector.ts","../src/features/search/services/search-service.ts","../src/features/search/endpoints/handlers/utils/document-transformer.ts","../src/core/utils/naming.ts","../src/features/search/endpoints/handlers/utils/target-resolver.ts","../src/features/search/endpoints/handlers/utils/config-mapper.ts","../src/core/config/config-validation.ts","../src/features/search/utils/extract-collection-name.ts","../src/features/search/utils/extract-search-params.ts","../src/features/search/endpoints/handlers/validators/search-request-validator.ts","../src/features/search/endpoints/handlers/search-handler.ts","../src/features/search/endpoints.ts","../src/core/config/constants.ts","../src/shared/schema/collection-schemas.ts","../src/features/sync/services/schema-manager.ts","../src/features/rag/services/agent-manager.ts","../src/plugin/create-rag-plugin.ts","../src/adapter/typesense-adapter.ts","../src/adapter/create-adapter.ts","../src/features/sync/services/document-delete.ts"],"sourcesContent":["import Typesense from 'typesense'\nimport type { Client } from 'typesense'\nimport type { TypesenseConnectionConfig } from '../../shared/types/plugin-types.js'\n\nexport const createTypesenseClient = (typesenseConfig: TypesenseConnectionConfig) => {\n return new Typesense.Client({\n apiKey: typesenseConfig.apiKey,\n connectionTimeoutSeconds: typesenseConfig.connectionTimeoutSeconds || 2,\n nodes: typesenseConfig.nodes,\n })\n}\n\nexport const testTypesenseConnection = async (client: Client): Promise<boolean> => {\n try {\n await client.health.retrieve()\n return true\n } catch (_error) {\n // Handle Typesense connection error\n return false\n }\n}\n","import OpenAI from \"openai\";\nimport { GoogleGenerativeAI, TaskType } from \"@google/generative-ai\";\nimport type {\n EmbeddingProviderConfig,\n EmbeddingWithUsage,\n BatchEmbeddingWithUsage,\n} from \"../../shared/types/plugin-types.js\";\nimport {\n logger,\n DEFAULT_EMBEDDING_DIMENSIONS,\n DEFAULT_EMBEDDING_MODEL,\n DEFAULT_GEMINI_EMBEDDING_MODEL,\n MIN_EMBEDDING_TEXT_LENGTH,\n} from \"@nexo-labs/payload-indexer\";\n\nlet openaiClient: OpenAI | null = null;\nlet currentOpenAIApiKey: string | null = null;\n\nlet geminiClient: GoogleGenerativeAI | null = null;\nlet currentGeminiApiKey: string | null = null;\n\nconst getOpenAIClient = (apiKey?: string): OpenAI | null => {\n const key = apiKey || process.env.OPENAI_API_KEY;\n\n if (!key) {\n return null;\n }\n\n // Recreate client if API key changed\n if (!openaiClient || currentOpenAIApiKey !== key) {\n openaiClient = new OpenAI({\n apiKey: key,\n });\n currentOpenAIApiKey = key;\n }\n\n return openaiClient;\n};\n\nconst getGeminiClient = (apiKey?: string): GoogleGenerativeAI | null => {\n const key = apiKey || process.env.GOOGLE_API_KEY;\n\n if (!key) {\n return null;\n }\n\n // Recreate client if API key changed\n if (!geminiClient || currentGeminiApiKey !== key) {\n geminiClient = new GoogleGenerativeAI(key);\n currentGeminiApiKey = key;\n }\n\n return geminiClient;\n};\n\n/**\n * Generates an embedding for the given text using OpenAI or Gemini API\n * @param text - The text to generate an embedding for\n * @param config - Optional embedding configuration (provider, model, dimensions, apiKey)\n * @returns The embedding vector as an array of numbers, or null if generation fails\n */\nexport const generateEmbedding = async (\n text: string,\n config?: EmbeddingProviderConfig\n): Promise<number[] | null> => {\n if (!text || text.trim().length < MIN_EMBEDDING_TEXT_LENGTH) {\n logger.debug('Skipping embedding generation for empty or invalid text');\n return null;\n }\n\n const provider = config?.type || 'openai';\n\n if (provider === 'gemini') {\n return generateGeminiEmbedding(text, config);\n } else {\n return generateOpenAIEmbedding(text, config);\n }\n};\n\n/**\n * Generates an embedding using OpenAI API\n */\nconst generateOpenAIEmbedding = async (\n text: string,\n config?: EmbeddingProviderConfig\n): Promise<number[] | null> => {\n const client = getOpenAIClient(config?.apiKey);\n\n if (!client) {\n logger.debug('OpenAI API key not configured, skipping embedding generation');\n return null;\n }\n\n try {\n const model = config?.model || process.env.OPENAI_EMBEDDING_MODEL || DEFAULT_EMBEDDING_MODEL;\n const dimensions = config?.dimensions || DEFAULT_EMBEDDING_DIMENSIONS;\n\n logger.debug('Generating OpenAI embedding', { model, dimensions, textLength: text.length });\n\n const response = await client.embeddings.create({\n model,\n input: text.trim(),\n dimensions,\n });\n\n const embedding = response.data[0]?.embedding;\n\n logger.debug('OpenAI embedding generated', { embeddingLength: embedding?.length });\n\n if (\n !embedding ||\n !Array.isArray(embedding) ||\n embedding.length !== dimensions\n ) {\n logger.warn('Generated embedding has invalid dimensions', {\n expected: dimensions,\n received: embedding?.length,\n });\n return null;\n }\n\n return embedding;\n } catch (error) {\n logger.error('Failed to generate OpenAI embedding', error, {\n textLength: text.length,\n model: config?.model,\n });\n return null;\n }\n};\n\n/**\n * Generates an embedding using Google Gemini API\n */\nconst generateGeminiEmbedding = async (\n text: string,\n config?: EmbeddingProviderConfig\n): Promise<number[] | null> => {\n const client = getGeminiClient(config?.apiKey);\n\n if (!client) {\n logger.debug('Google API key not configured, skipping embedding generation');\n return null;\n }\n\n try {\n const model = config?.model || DEFAULT_GEMINI_EMBEDDING_MODEL;\n const dimensions = config?.dimensions || DEFAULT_EMBEDDING_DIMENSIONS;\n\n logger.debug('Generating Gemini embedding', { model, dimensions, textLength: text.length });\n\n const embeddingModel = client.getGenerativeModel({ model });\n const result = await embeddingModel.embedContent({\n content: { role: \"user\", parts: [{ text: text.trim() }] },\n taskType: TaskType.RETRIEVAL_DOCUMENT,\n });\n\n const embedding = result.embedding.values;\n\n logger.debug('Gemini embedding generated', { embeddingLength: embedding?.length });\n\n if (\n !embedding ||\n !Array.isArray(embedding) ||\n embedding.length !== dimensions\n ) {\n logger.warn('Generated embedding has invalid dimensions', {\n expected: dimensions,\n received: embedding?.length,\n });\n return null;\n }\n\n return embedding;\n } catch (error) {\n logger.error('Failed to generate Gemini embedding', error, {\n textLength: text.length,\n model: config?.model,\n });\n return null;\n }\n};\n\n/**\n * Generate embedding with usage tracking\n *\n * This function returns both the embedding and usage information (tokens used)\n *\n * @param text - The text to generate an embedding for\n * @param config - Optional embedding configuration\n * @returns Embedding with usage information, or null if generation fails\n */\nexport const generateEmbeddingWithUsage = async (\n text: string,\n config?: EmbeddingProviderConfig\n): Promise<EmbeddingWithUsage | null> => {\n if (!text || text.trim().length < MIN_EMBEDDING_TEXT_LENGTH) {\n logger.debug('Skipping embedding generation for empty or invalid text');\n return null;\n }\n\n const provider = config?.type || 'openai';\n\n if (provider === 'gemini') {\n return generateGeminiEmbeddingWithUsage(text, config);\n } else {\n return generateOpenAIEmbeddingWithUsage(text, config);\n }\n};\n\n/**\n * Generate OpenAI embedding with usage tracking\n */\nconst generateOpenAIEmbeddingWithUsage = async (\n text: string,\n config?: EmbeddingProviderConfig\n): Promise<EmbeddingWithUsage | null> => {\n const client = getOpenAIClient(config?.apiKey);\n\n if (!client) {\n logger.debug('OpenAI API key not configured, skipping embedding generation');\n return null;\n }\n\n try {\n const model = config?.model || process.env.OPENAI_EMBEDDING_MODEL || DEFAULT_EMBEDDING_MODEL;\n const dimensions = config?.dimensions || DEFAULT_EMBEDDING_DIMENSIONS;\n\n logger.debug('Generating OpenAI embedding with usage tracking', { model, dimensions });\n\n const response = await client.embeddings.create({\n model,\n input: text.trim(),\n dimensions,\n });\n\n const embedding = response.data[0]?.embedding;\n\n if (\n !embedding ||\n !Array.isArray(embedding) ||\n embedding.length !== dimensions\n ) {\n logger.warn('Generated embedding has invalid dimensions', {\n expected: dimensions,\n received: embedding?.length,\n });\n return null;\n }\n\n return {\n embedding,\n usage: {\n promptTokens: response.usage?.prompt_tokens || 0,\n totalTokens: response.usage?.total_tokens || 0,\n },\n };\n } catch (error) {\n logger.error('Failed to generate OpenAI embedding with usage', error, {\n textLength: text.length,\n model: config?.model,\n });\n return null;\n }\n};\n\n/**\n * Generate Gemini embedding with usage tracking\n * Note: Gemini doesn't provide token usage, so we estimate it\n */\nconst generateGeminiEmbeddingWithUsage = async (\n text: string,\n config?: EmbeddingProviderConfig\n): Promise<EmbeddingWithUsage | null> => {\n const embeddingResult = await generateGeminiEmbedding(text, config);\n\n if (!embeddingResult) {\n return null;\n }\n\n // Estimate tokens (rough approximation: 1 token ≈ 4 characters)\n const estimatedTokens = Math.ceil(text.length / 4);\n\n return {\n embedding: embeddingResult,\n usage: {\n promptTokens: estimatedTokens,\n totalTokens: estimatedTokens,\n },\n };\n};\n\n/**\n * Generate embeddings for multiple texts with usage tracking (batch)\n *\n * @param texts - Array of texts to generate embeddings for\n * @param config - Optional embedding configuration\n * @returns Embeddings with total usage information, or null if generation fails\n */\nexport const generateEmbeddingsBatchWithUsage = async (\n texts: string[],\n config?: EmbeddingProviderConfig\n): Promise<BatchEmbeddingWithUsage | null> => {\n if (!texts || texts.length === 0) {\n logger.debug('No texts provided for batch embedding generation');\n return null;\n }\n\n // Filter out empty texts\n const validTexts = texts.filter(t => t && t.trim().length >= MIN_EMBEDDING_TEXT_LENGTH);\n\n if (validTexts.length === 0) {\n logger.debug('No valid texts after filtering for batch embedding generation');\n return null;\n }\n\n const provider = config?.type || 'openai';\n\n if (provider === 'gemini') {\n return generateGeminiBatchEmbeddingsWithUsage(validTexts, config);\n } else {\n return generateOpenAIBatchEmbeddingsWithUsage(validTexts, config);\n }\n};\n\n/**\n * Generate OpenAI batch embeddings with usage tracking\n */\nconst generateOpenAIBatchEmbeddingsWithUsage = async (\n validTexts: string[],\n config?: EmbeddingProviderConfig\n): Promise<BatchEmbeddingWithUsage | null> => {\n const client = getOpenAIClient(config?.apiKey);\n\n if (!client) {\n logger.debug('OpenAI API key not configured, skipping batch embedding generation');\n return null;\n }\n\n try {\n const model = config?.model || process.env.OPENAI_EMBEDDING_MODEL || DEFAULT_EMBEDDING_MODEL;\n const dimensions = config?.dimensions || DEFAULT_EMBEDDING_DIMENSIONS;\n\n logger.debug('Generating OpenAI batch embeddings with usage tracking', {\n model,\n dimensions,\n batchSize: validTexts.length,\n });\n\n const response = await client.embeddings.create({\n model,\n input: validTexts.map(t => t.trim()),\n dimensions,\n });\n\n const embeddings = response.data.map(item => item.embedding);\n\n // Validate all embeddings\n const allValid = embeddings.every(\n emb => Array.isArray(emb) && emb.length === dimensions\n );\n\n if (!allValid) {\n logger.warn('Some generated embeddings have invalid dimensions', {\n expected: dimensions,\n batchSize: embeddings.length,\n });\n return null;\n }\n\n logger.info('OpenAI batch embeddings generated successfully', {\n count: embeddings.length,\n totalTokens: response.usage?.total_tokens || 0,\n });\n\n return {\n embeddings,\n usage: {\n promptTokens: response.usage?.prompt_tokens || 0,\n totalTokens: response.usage?.total_tokens || 0,\n },\n };\n } catch (error) {\n logger.error('Failed to generate OpenAI batch embeddings with usage', error, {\n batchSize: validTexts.length,\n model: config?.model,\n });\n return null;\n }\n};\n\n/**\n * Generate Gemini batch embeddings with usage tracking\n * Note: Gemini API handles one text at a time, so we batch them sequentially\n */\nconst generateGeminiBatchEmbeddingsWithUsage = async (\n validTexts: string[],\n config?: EmbeddingProviderConfig\n): Promise<BatchEmbeddingWithUsage | null> => {\n const client = getGeminiClient(config?.apiKey);\n\n if (!client) {\n logger.debug('Google API key not configured, skipping batch embedding generation');\n return null;\n }\n\n try {\n const model = config?.model || DEFAULT_GEMINI_EMBEDDING_MODEL;\n const dimensions = config?.dimensions || DEFAULT_EMBEDDING_DIMENSIONS;\n\n logger.debug('Generating Gemini batch embeddings with usage tracking', {\n model,\n dimensions,\n batchSize: validTexts.length,\n });\n\n const embeddingModel = client.getGenerativeModel({ model });\n const embeddings: number[][] = [];\n let totalEstimatedTokens = 0;\n\n // Process each text sequentially\n for (const text of validTexts) {\n const result = await embeddingModel.embedContent({\n content: { role: \"user\", parts: [{ text: text.trim() }] },\n taskType: TaskType.RETRIEVAL_DOCUMENT,\n });\n\n embeddings.push(result.embedding.values);\n totalEstimatedTokens += Math.ceil(text.length / 4);\n }\n\n // Validate all embeddings\n const allValid = embeddings.every(\n emb => Array.isArray(emb) && emb.length === dimensions\n );\n\n if (!allValid) {\n logger.warn('Some generated embeddings have invalid dimensions', {\n expected: dimensions,\n batchSize: embeddings.length,\n });\n return null;\n }\n\n logger.info('Gemini batch embeddings generated successfully', {\n count: embeddings.length,\n estimatedTokens: totalEstimatedTokens,\n });\n\n return {\n embeddings,\n usage: {\n promptTokens: totalEstimatedTokens,\n totalTokens: totalEstimatedTokens,\n },\n };\n } catch (error) {\n logger.error('Failed to generate Gemini batch embeddings with usage', error, {\n batchSize: validTexts.length,\n model: config?.model,\n });\n return null;\n }\n};\n","/**\n * Query builder utilities for Typesense Conversational RAG\n */\n\nimport type { TypesenseConnectionConfig } from '../../index.js'\nimport { TypesenseQueryConfig, AdvancedSearchConfig } from '../../shared/index.js'\n\n/**\n * Typesense search request object\n */\ninterface TypesenseSearchRequest {\n collection: string\n query_by: string\n vector_query: string\n exclude_fields: string\n filter_by?: string\n typo_tokens_threshold?: number\n num_typos?: number\n prefix?: boolean\n drop_tokens_threshold?: number\n enable_stemming?: boolean\n}\n\n/**\n * Advanced search parameters object\n */\ninterface AdvancedSearchParams {\n typo_tokens_threshold?: number\n num_typos?: number\n prefix?: boolean\n drop_tokens_threshold?: number\n enable_stemming?: boolean\n}\n\n/**\n * Build the Typesense conversational search URL with all necessary parameters\n *\n * @param config - Query configuration\n * @param config.userMessage - The user's message/query\n * @param config.chatId - Optional conversation ID for follow-up questions\n * @param conversationModelId - The conversation model ID in Typesense\n * @param typesenseConfig - Typesense connection config\n * @returns URL for the Typesense multi_search endpoint with conversation parameters\n */\nexport function buildConversationalUrl(\n config: { userMessage: string; chatId?: string },\n conversationModelId: string,\n typesenseConfig: TypesenseConnectionConfig\n): URL {\n const protocol = typesenseConfig.nodes[0].protocol || 'http'\n const typesenseUrl = new URL(\n `${protocol}://${typesenseConfig.nodes[0].host}:${typesenseConfig.nodes[0].port}/multi_search`\n )\n\n // Add conversation parameters to URL\n typesenseUrl.searchParams.set('q', config.userMessage)\n typesenseUrl.searchParams.set('conversation', 'true')\n typesenseUrl.searchParams.set('conversation_model_id', conversationModelId)\n\n if (config.chatId) {\n typesenseUrl.searchParams.set('conversation_id', config.chatId)\n }\n\n typesenseUrl.searchParams.set('conversation_stream', 'true')\n\n return typesenseUrl\n}\n\n/**\n * Build multi-search requests for Typesense with hybrid search configuration\n *\n * @param config - Query configuration including embedding, collections, and filters\n * @returns Array of search requests for Typesense multi_search\n */\nexport function buildMultiSearchRequests(config: TypesenseQueryConfig) {\n const {\n searchCollections,\n queryEmbedding,\n selectedDocuments,\n kResults = 10,\n advancedConfig = {}\n } = config\n\n return searchCollections.map((collection: string) => {\n const request: TypesenseSearchRequest = {\n collection,\n query_by: 'chunk_text,title,headers',\n vector_query: `embedding:([${queryEmbedding.join(',')}], k:${kResults})`,\n exclude_fields: 'embedding',\n ...buildAdvancedSearchParams(advancedConfig),\n }\n\n // Add document filter if documents are selected\n if (selectedDocuments && selectedDocuments.length > 0) {\n const documentIds = selectedDocuments.map((id: string) => `\"${id}\"`).join(',')\n request.filter_by = `parent_doc_id:[${documentIds}]`\n }\n\n return request\n })\n}\n\n/**\n * Build advanced search parameters from config\n *\n * @param config - Advanced search configuration\n * @returns Object with advanced search parameters\n */\nfunction buildAdvancedSearchParams(config: AdvancedSearchConfig): AdvancedSearchParams {\n const params: AdvancedSearchParams = {}\n\n if (config.typoTokensThreshold !== undefined) {\n params.typo_tokens_threshold = config.typoTokensThreshold\n }\n\n if (config.numTypos !== undefined) {\n params.num_typos = config.numTypos\n }\n\n if (config.prefix !== undefined) {\n params.prefix = config.prefix\n }\n\n if (config.dropTokensThreshold !== undefined) {\n params.drop_tokens_threshold = config.dropTokensThreshold\n }\n\n if (config.enableStemming !== undefined) {\n params.enable_stemming = config.enableStemming\n }\n\n return params\n}\n\n/**\n * Build the complete Typesense request body for multi-search\n *\n * @param config - Query configuration\n * @returns Request body for Typesense multi_search endpoint\n */\nexport function buildMultiSearchRequestBody(config: TypesenseQueryConfig) {\n return {\n searches: buildMultiSearchRequests(config),\n }\n}\n\n/**\n * Build hybrid search parameters for combining semantic and keyword search\n *\n * @param alpha - Weight between semantic (1.0) and keyword (0.0) search\n * @param rerankMatches - Whether to rerank hybrid search results\n * @param queryFields - Fields to use for keyword search\n * @returns Object with hybrid search parameters\n */\nexport function buildHybridSearchParams(\n alpha = 0.9,\n rerankMatches = true,\n queryFields = 'chunk_text,title'\n) {\n return {\n alpha,\n rerank_hybrid_matches: rerankMatches,\n query_fields: queryFields,\n }\n}\n","/**\n * Stream handler utilities for Typesense Conversational RAG SSE events\n */\n\nimport { ChunkSource, TypesenseRAGChunkDocument, TypesenseRAGSearchResult } from '../../shared/index.js'\nimport { logger } from '../../core/logging/logger.js'\n\n/**\n * Parsed conversation event from Typesense SSE stream\n */\nexport interface ConversationEvent {\n /** Conversation ID */\n conversationId?: string\n /** Message token/chunk */\n message?: string\n /** Search results (only in first event) */\n results?: TypesenseRAGSearchResult[]\n /** Raw parsed data */\n raw?: unknown\n}\n\n/**\n * Stream processing result\n */\nexport interface StreamProcessingResult {\n /** Full assistant message */\n fullMessage: string\n /** Conversation ID */\n conversationId: string | null\n /** Extracted sources */\n sources: ChunkSource[]\n /** Context text (for token estimation) */\n contextText: string\n}\n\n/**\n * Parse a single SSE event from Typesense conversation stream\n *\n * @param line - Raw SSE event line\n * @returns Parsed conversation event or null if not parseable\n */\nexport function parseConversationEvent(line: string): ConversationEvent | null {\n if (!line.startsWith('data: ')) {\n return null\n }\n\n const data = line.slice(6)\n\n if (data === '[DONE]') {\n return { raw: '[DONE]' }\n }\n\n try {\n const parsed = JSON.parse(data)\n const event: ConversationEvent = { raw: parsed }\n\n // Extract conversation ID\n if (parsed.conversation_id) {\n event.conversationId = parsed.conversation_id\n } else if (parsed.conversation?.conversation_id) {\n event.conversationId = parsed.conversation.conversation_id\n }\n\n // Extract message/token\n if (parsed.message !== undefined) {\n event.message = parsed.message\n } else if (parsed.conversation?.answer) {\n event.message = parsed.conversation.answer\n }\n\n // Extract results (usually in first event)\n if (parsed.results) {\n event.results = parsed.results\n }\n\n return event\n } catch (e) {\n logger.error('Error parsing SSE data from conversation stream', e as Error)\n return null\n }\n}\n\n/**\n * Extract sources from Typesense search results\n *\n * @param results - Typesense multi-search results array\n * @param documentTypeResolver - Optional function to resolve document type from collection name\n * @returns Array of chunk sources with metadata\n */\nexport function extractSourcesFromResults(\n results: TypesenseRAGSearchResult[],\n documentTypeResolver?: (collectionName: string) => string\n): ChunkSource[] {\n const allSources: ChunkSource[] = []\n\n for (const result of results) {\n if (result.hits) {\n for (const hit of result.hits) {\n const doc = hit.document as TypesenseRAGChunkDocument\n const score = hit.vector_distance || hit.text_match || 0\n const collectionName = result.request_params?.collection_name || ''\n\n const type = documentTypeResolver\n ? documentTypeResolver(collectionName)\n : getDefaultDocumentType(collectionName)\n\n const fullContent = doc.chunk_text || ''\n\n const source: ChunkSource = {\n id: doc.id || '',\n title: doc.title || 'Sin título',\n slug: doc.slug || '',\n type,\n chunkIndex: doc.chunk_index ?? 0,\n relevanceScore: score,\n content: '', // Empty by default - can be loaded separately\n excerpt: fullContent.substring(0, 200) + (fullContent.length > 200 ? '...' : ''),\n }\n\n allSources.push(source)\n }\n }\n }\n\n return allSources\n}\n\n/**\n * Build context text from results (useful for token estimation)\n *\n * @param results - Typesense multi-search results array\n * @returns Combined context text from all chunks\n */\nexport function buildContextText(results: TypesenseRAGSearchResult[]): string {\n let contextText = ''\n\n for (const result of results) {\n if (result.hits) {\n for (const hit of result.hits) {\n const doc = hit.document as TypesenseRAGChunkDocument\n contextText += (doc.chunk_text || '') + '\\n'\n }\n }\n }\n\n return contextText\n}\n\n/**\n * Process a Typesense conversation stream\n *\n * @param response - Fetch Response with SSE stream\n * @param onEvent - Callback for each parsed event\n * @param documentTypeResolver - Optional function to resolve document type\n * @returns Processing result with full message, ID, and sources\n */\nexport async function processConversationStream(\n response: Response,\n onEvent?: (event: ConversationEvent) => void,\n documentTypeResolver?: (collectionName: string) => string\n): Promise<StreamProcessingResult> {\n const reader = response.body!.getReader()\n const decoder = new TextDecoder()\n\n let buffer = ''\n let sources: ChunkSource[] = []\n let hasCollectedSources = false\n let conversationId: string | null = null\n let contextText = ''\n let fullMessage = ''\n\n while (true) {\n const { done, value } = await reader.read()\n if (done) break\n\n buffer += decoder.decode(value, { stream: true })\n const lines = buffer.split('\\n')\n buffer = lines.pop() || ''\n\n for (const line of lines) {\n const event = parseConversationEvent(line)\n if (!event) continue\n\n // Notify callback\n if (onEvent) {\n onEvent(event)\n }\n\n // Capture conversation ID\n if (!conversationId && event.conversationId) {\n conversationId = event.conversationId\n }\n\n // Extract sources from first results\n if (!hasCollectedSources && event.results) {\n sources = extractSourcesFromResults(event.results, documentTypeResolver)\n contextText = buildContextText(event.results)\n hasCollectedSources = true\n }\n\n // Accumulate message\n if (event.message) {\n fullMessage += event.message\n }\n }\n }\n\n return {\n fullMessage,\n conversationId,\n sources,\n contextText,\n }\n}\n\n/**\n * Create a ReadableStream that forwards SSE events\n *\n * @param response - Fetch Response with SSE stream\n * @param onData - Callback for processing each event before forwarding\n * @returns ReadableStream for SSE events\n */\nexport function createSSEForwardStream(\n response: Response,\n onData?: (event: ConversationEvent) => void\n): ReadableStream<Uint8Array> {\n const reader = response.body!.getReader()\n const decoder = new TextDecoder()\n const encoder = new TextEncoder()\n\n let buffer = ''\n\n return new ReadableStream({\n async start(controller) {\n while (true) {\n const { done, value } = await reader.read()\n if (done) {\n controller.close()\n break\n }\n\n buffer += decoder.decode(value, { stream: true })\n const lines = buffer.split('\\n')\n buffer = lines.pop() || ''\n\n for (const line of lines) {\n const event = parseConversationEvent(line)\n\n if (event && onData) {\n onData(event)\n }\n\n // Forward original line\n if (line) {\n controller.enqueue(encoder.encode(line + '\\n'))\n }\n }\n }\n },\n cancel() {\n reader.cancel()\n }\n })\n}\n\n/**\n * Default document type resolver based on collection name\n *\n * @param collectionName - Name of the Typesense collection\n * @returns Document type string\n */\nfunction getDefaultDocumentType(collectionName: string): string {\n if (collectionName.includes('article')) {\n return 'article'\n }\n if (collectionName.includes('book')) {\n return 'book'\n }\n if (collectionName.includes('post')) {\n return 'post'\n }\n if (collectionName.includes('page')) {\n return 'page'\n }\n return 'document'\n}\n","/**\n * Setup utilities for Typesense Conversational RAG\n */\n\nimport type { Client } from 'typesense'\nimport { logger } from '../../core/logging/logger.js'\nimport { RAGConfig } from '../../shared/index.js'\n/**\n * Ensure conversation history collection exists\n *\n * @param client - Typesense client\n * @param collectionName - Name of the conversation history collection\n * @returns true if collection exists or was created successfully\n */\nexport async function ensureConversationCollection(\n client: Client,\n collectionName: string = 'conversation_history'\n): Promise<boolean> {\n try {\n // Check if collection exists\n await client.collections(collectionName).retrieve()\n logger.info('Conversation collection already exists', { collection: collectionName })\n return true\n } catch (error: unknown) {\n const typesenseError = error as { httpStatus?: number }\n if (typesenseError?.httpStatus === 404) {\n logger.info('Creating conversation collection', { collection: collectionName })\n\n try {\n // Create conversation collection\n // Note: Typesense manages conversation schema automatically\n // We just need to ensure the collection can be created\n await client.collections().create({\n name: collectionName,\n fields: [\n { name: 'conversation_id', type: 'string' },\n { name: 'model_id', type: 'string' },\n { name: 'timestamp', type: 'int32' },\n { name: 'role', type: 'string' },\n { name: 'message', type: 'string' }\n ],\n })\n\n logger.info('Conversation collection created successfully', { collection: collectionName })\n return true\n } catch (createError) {\n logger.error('Failed to create conversation collection', createError as Error, {\n collection: collectionName,\n })\n return false\n }\n }\n\n logger.error('Error checking conversation collection', error as Error, {\n collection: collectionName,\n })\n return false\n }\n}\n\n/**\n * Get default RAG configuration values\n *\n * @returns Default RAG configuration\n */\nexport function getDefaultRAGConfig(): Required<Omit<RAGConfig, 'agents'>> {\n return {\n hybrid: {\n alpha: 0.9,\n rerankMatches: true,\n queryFields: 'chunk_text,title',\n },\n hnsw: {\n efConstruction: 200,\n M: 16,\n ef: 100,\n maxConnections: 64,\n distanceMetric: 'cosine',\n },\n advanced: {\n typoTokensThreshold: 1,\n numTypos: 2,\n prefix: true,\n dropTokensThreshold: 1,\n enableStemming: true,\n },\n }\n}\n\n/**\n * Merge user RAG config with defaults\n *\n * @param userConfig - User-provided RAG configuration\n * @returns Merged configuration with defaults\n */\nexport function mergeRAGConfigWithDefaults(userConfig?: RAGConfig): RAGConfig {\n const defaults = getDefaultRAGConfig()\n\n if (!userConfig) {\n return defaults\n }\n\n return {\n hybrid: { ...defaults.hybrid, ...userConfig.hybrid },\n hnsw: { ...defaults.hnsw, ...userConfig.hnsw },\n advanced: { ...defaults.advanced, ...userConfig.advanced },\n }\n}\n","/**\n * RAG search handler\n *\n * Handles the execution of RAG conversational search against Typesense\n */\n\nimport type { TypesenseConnectionConfig } from '../../../index.js'\nimport { ChunkSource } from '../../../shared/index.js'\nimport {\n buildConversationalUrl,\n buildMultiSearchRequestBody,\n} from '../query-builder.js'\n\n/**\n * Configuration for RAG search\n */\nexport type RAGSearchConfig = {\n /** Collections to search in */\n searchCollections: string[]\n /** Conversation model ID */\n modelId: string\n /** Number of results to retrieve */\n kResults?: number\n /** Advanced search configuration */\n advancedConfig?: {\n typoTokensThreshold?: number\n numTypos?: number\n prefix?: boolean\n dropTokensThreshold?: number\n }\n}\n\n/**\n * Request parameters for RAG chat\n */\nexport type RAGChatRequest = {\n /** User's message */\n userMessage: string\n /** Query embedding vector */\n queryEmbedding: number[]\n /** Optional chat/conversation ID for follow-up messages */\n chatId?: string\n /** Optional selected document IDs to filter search */\n selectedDocuments?: string[]\n}\n\n/**\n * Result of a RAG search operation\n */\nexport type RAGSearchResult = {\n /** Full assistant message (for non-streaming responses) */\n fullAssistantMessage?: string\n /** Conversation ID from Typesense */\n conversationId?: string\n /** Sources/chunks used in the response */\n sources: ChunkSource[]\n /** Raw response from Typesense */\n response: Response\n /** Whether the response is streaming */\n isStreaming: boolean\n}\n\n/**\n * Execute a RAG conversational search\n *\n * This function handles the complete flow of executing a RAG search against Typesense:\n * 1. Builds the conversational URL\n * 2. Builds the multi-search request body\n * 3. Executes the request\n * 4. Returns the response with metadata\n *\n * @param typesenseConfig - Typesense connection configuration\n * @param searchConfig - RAG search configuration\n * @param request - Chat request parameters\n * @returns Promise with search results\n */\nexport async function executeRAGSearch(\n typesenseConfig: TypesenseConnectionConfig,\n searchConfig: RAGSearchConfig,\n request: RAGChatRequest,\n): Promise<RAGSearchResult> {\n // Build the Typesense conversational search URL\n const typesenseUrl = buildConversationalUrl(\n request,\n searchConfig.modelId,\n typesenseConfig\n )\n\n // Build the multi-search request body\n const requestBody = buildMultiSearchRequestBody({\n userMessage: request.userMessage,\n queryEmbedding: request.queryEmbedding,\n selectedDocuments: request.selectedDocuments,\n chatId: request.chatId,\n searchCollections: searchConfig.searchCollections,\n kResults: searchConfig.kResults || 10,\n advancedConfig: searchConfig.advancedConfig,\n })\n\n // Execute the search\n const response = await fetch(typesenseUrl.toString(), {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'X-TYPESENSE-API-KEY': typesenseConfig.apiKey,\n },\n body: JSON.stringify(requestBody),\n })\n\n if (!response.ok) {\n const errorText = await response.text()\n throw new Error(`Typesense search failed: ${errorText}`)\n }\n\n // Check if response is streaming\n const contentType = response.headers.get('content-type')\n const isStreaming = contentType?.includes('text/event-stream') || false\n\n return {\n response,\n isStreaming,\n sources: [], // Will be populated by stream/response handlers\n }\n}\n","/**\n * Chunk fetch handler\n *\n * Handles fetching individual chunk documents by ID from Typesense\n */\n\nimport type { Client } from 'typesense'\nimport { TypesenseRAGChunkDocument } from '../../../shared/index.js'\n\n/**\n * Configuration for fetching a chunk by ID\n */\nexport type ChunkFetchConfig = {\n /** Chunk document ID */\n chunkId: string\n /** Collection name */\n collectionName: string\n /** Valid collection names for validation */\n validCollections?: string[]\n}\n\n/**\n * Result of fetching a chunk\n */\nexport type ChunkFetchResult = {\n id: string\n chunk_text: string\n title?: string\n slug?: string\n chunk_index?: number\n collection: string\n}\n\n/**\n * Fetch a chunk document by ID from Typesense\n *\n * @param client - Typesense client instance\n * @param config - Chunk fetch configuration\n * @returns Promise with chunk data\n * @throws Error if chunk not found or collection is invalid\n */\nexport async function fetchChunkById(\n client: Client,\n config: ChunkFetchConfig,\n): Promise<ChunkFetchResult> {\n const { chunkId, collectionName, validCollections } = config\n\n // Validate collection if validCollections is provided\n if (validCollections && !validCollections.includes(collectionName)) {\n throw new Error(\n `Invalid collection: ${collectionName}. Must be one of: ${validCollections.join(', ')}`,\n )\n }\n\n try {\n // Retrieve the document from Typesense\n const document = (await client\n .collections(collectionName)\n .documents(chunkId)\n .retrieve()) as TypesenseRAGChunkDocument\n\n // Extract chunk data\n const chunkText = document.chunk_text || ''\n\n if (!chunkText) {\n throw new Error('Chunk contains no text')\n }\n\n return {\n id: document.id,\n chunk_text: chunkText,\n title: document.title,\n slug: document.slug,\n chunk_index: document.chunk_index,\n collection: collectionName,\n }\n } catch (error: unknown) {\n // Handle Typesense 404 errors\n if (error && typeof error === 'object' && 'httpStatus' in error && error.httpStatus === 404) {\n throw new Error(`Chunk not found: ${chunkId}`)\n }\n throw error\n }\n}\n","/**\n * Session management handlers\n *\n * Handles all chat session operations including getting, saving, and closing sessions\n */\n\nimport type { CollectionSlug, Payload } from 'payload'\n\n/**\n * Session data structure\n */\nexport type ChatSessionData = {\n conversation_id: string\n messages: Array<Record<string, unknown>>\n status: string\n total_tokens?: number\n total_cost?: number\n last_activity?: string\n}\n\n/**\n * Configuration for session operations\n */\nexport type SessionConfig = {\n /** Collection name for sessions */\n collectionName?: CollectionSlug\n /** Time window for active sessions in milliseconds */\n activeSessionWindow?: number\n}\n\n/**\n * Get active chat session for a user\n *\n * @param payload - Payload CMS instance\n * @param userId - User ID\n * @param config - Session configuration\n * @returns Promise with session data or null\n */\nexport async function getActiveSession(\n payload: Payload,\n userId: string | number,\n config: SessionConfig = {},\n): Promise<ChatSessionData | null> {\n const collectionName = config.collectionName || 'chat-sessions'\n const windowMs = config.activeSessionWindow || 24 * 60 * 60 * 1000 // 24 hours default\n\n const cutoffTime = new Date(Date.now() - windowMs)\n\n const chatSessions = await payload.find({\n collection: collectionName,\n where: {\n and: [\n {\n user: {\n equals: userId,\n },\n },\n {\n status: {\n equals: 'active',\n },\n },\n {\n last_activity: {\n greater_than: cutoffTime.toISOString(),\n },\n },\n ],\n },\n sort: '-last_activity',\n limit: 1,\n })\n\n if (!chatSessions.docs.length) {\n return null\n }\n\n return chatSessions.docs[0] as unknown as ChatSessionData\n}\n\n/**\n * Get session by conversation ID\n *\n * @param payload - Payload CMS instance\n * @param userId - User ID\n * @param conversationId - Conversation ID\n * @param config - Session configuration\n * @returns Promise with session data or null\n */\nexport async function getSessionByConversationId(\n payload: Payload,\n userId: string | number,\n conversationId: string,\n config: SessionConfig = {},\n): Promise<ChatSessionData | null> {\n const collectionName = config.collectionName || 'chat-sessions'\n\n const chatSessions = await payload.find({\n collection: collectionName,\n where: {\n and: [\n {\n conversation_id: {\n equals: conversationId,\n },\n },\n {\n user: {\n equals: userId,\n },\n },\n ],\n },\n limit: 1,\n })\n\n if (!chatSessions.docs.length) {\n return null\n }\n\n return chatSessions.docs[0] as unknown as ChatSessionData\n}\n\n/**\n * Close a chat session\n *\n * @param payload - Payload CMS instance\n * @param userId - User ID\n * @param conversationId - Conversation ID\n * @param config - Session configuration\n * @returns Promise with updated session data or null if not found\n */\nexport async function closeSession(\n payload: Payload,\n userId: string | number,\n conversationId: string,\n config: SessionConfig = {},\n): Promise<ChatSessionData | null> {\n const collectionName = config.collectionName || 'chat-sessions'\n\n const chatSessions = await payload.find({\n collection: collectionName,\n where: {\n and: [\n {\n conversation_id: {\n equals: conversationId,\n },\n },\n {\n user: {\n equals: userId,\n },\n },\n ],\n },\n limit: 1,\n })\n\n if (!chatSessions.docs.length) {\n return null\n }\n\n const session = chatSessions.docs[0] as unknown as ChatSessionData\n if (!session) {\n return null\n }\n await payload.update({\n collection: collectionName,\n where: {\n conversation_id: {\n equals: conversationId,\n },\n },\n data: {\n status: 'closed',\n closed_at: new Date().toISOString(),\n },\n })\n\n return {\n conversation_id: session.conversation_id,\n messages: session.messages || [],\n status: 'closed',\n total_tokens: session.total_tokens,\n total_cost: session.total_cost,\n last_activity: session.last_activity,\n }\n}\n","/**\n * Server-Sent Events (SSE) utilities\n *\n * Provides utilities for formatting and sending SSE events\n */\n\nimport { SSEEvent } from \"../../../shared/index.js\"\n\n\n/**\n * Helper to create an SSE event string\n *\n * @param event - SSE event object\n * @returns Formatted SSE event string\n */\nexport function formatSSEEvent(event: SSEEvent): string {\n return `data: ${JSON.stringify(event)}\\n\\n`\n}\n\n/**\n * Helper to send an SSE event through a controller\n *\n * @param controller - ReadableStreamDefaultController\n * @param encoder - TextEncoder instance\n * @param event - SSE event to send\n */\nexport function sendSSEEvent(\n controller: ReadableStreamDefaultController<Uint8Array>,\n encoder: TextEncoder,\n event: SSEEvent,\n): void {\n const data = formatSSEEvent(event)\n controller.enqueue(encoder.encode(data))\n}\n","/**\n * Chat Session Repository\n * Functions for managing chat sessions in PayloadCMS\n */\n\nimport type { CollectionSlug, Payload } from 'payload'\nimport { logger } from '../../core/logging/logger.js'\nimport { ChunkSource, SpendingEntry } from '../../shared/index.js'\n/**\n * Chat message format with optional sources\n */\nexport interface ChatMessageWithSources {\n role: 'user' | 'assistant'\n content: string\n timestamp: string\n sources?: Array<{\n id: string\n title: string\n type: string\n chunk_index: number\n slug?: string\n }>\n}\n\n/**\n * Chat session document structure\n */\ninterface ChatSessionDocument {\n id: string | number\n messages?: unknown\n spending?: unknown\n total_tokens?: number\n total_cost?: number\n conversation_id?: string\n status?: string\n last_activity?: Date | string\n}\n\n/**\n * Save or update chat session in PayloadCMS\n *\n * @param payload - Payload CMS instance\n * @param userId - User ID\n * @param conversationId - Conversation ID from Typesense\n * @param userMessage - User's message\n * @param assistantMessage - Assistant's response\n * @param sources - Source chunks used for the response\n * @param spending - Token spending entries\n * @param collectionName - Collection name for sessions (default: 'chat-sessions')\n */\nexport async function saveChatSession(\n payload: Payload,\n userId: string | number,\n conversationId: string,\n userMessage: string,\n assistantMessage: string,\n sources: ChunkSource[],\n spending: SpendingEntry[],\n collectionName: CollectionSlug = 'chat-sessions'\n): Promise<void> {\n try {\n // Check if session already exists\n const existing = await payload.find({\n collection: collectionName,\n where: {\n conversation_id: {\n equals: conversationId,\n },\n },\n limit: 1,\n })\n\n const newUserMessage: ChatMessageWithSources = {\n role: 'user',\n content: userMessage,\n timestamp: new Date().toISOString(),\n }\n\n const newAssistantMessage: ChatMessageWithSources = {\n role: 'assistant',\n content: assistantMessage,\n timestamp: new Date().toISOString(),\n sources: sources.map((s) => ({\n id: s.id,\n title: s.title,\n type: s.type,\n chunk_index: s.chunkIndex,\n slug: s.slug,\n })),\n }\n\n if (existing.docs.length > 0 && existing.docs[0]) {\n // Update existing session\n await updateExistingSession(\n payload,\n existing.docs[0] as ChatSessionDocument,\n newUserMessage,\n newAssistantMessage,\n spending,\n collectionName,\n )\n } else {\n // Create new session\n await createNewSession(\n payload,\n userId,\n conversationId,\n newUserMessage,\n newAssistantMessage,\n spending,\n collectionName,\n )\n }\n } catch (error) {\n logger.error('Error saving chat session', error as Error, {\n conversationId,\n userId,\n })\n // Don't fail the request if saving fails\n }\n}\n\n/**\n * Update an existing chat session\n */\nasync function updateExistingSession(\n payload: Payload,\n session: ChatSessionDocument,\n newUserMessage: ChatMessageWithSources,\n newAssistantMessage: ChatMessageWithSources,\n spending: SpendingEntry[],\n collectionName: CollectionSlug,\n): Promise<void> {\n const existingMessages = (session.messages as ChatMessageWithSources[]) || []\n const existingSpending = (session.spending as SpendingEntry[]) || []\n\n const messages = [...existingMessages, newUserMessage, newAssistantMessage]\n const allSpending = [...existingSpending, ...spending]\n const totalTokens =\n (session.total_tokens || 0) + spending.reduce((sum, e) => sum + e.tokens.total, 0)\n const totalCost =\n (session.total_cost || 0) + spending.reduce((sum, e) => sum + (e.cost_usd || 0), 0)\n\n await payload.update({\n collection: collectionName,\n id: session.id,\n data: {\n messages,\n spending: allSpending,\n total_tokens: totalTokens,\n total_cost: totalCost,\n last_activity: new Date().toISOString(),\n status: 'active',\n },\n })\n\n logger.info('Chat session updated successfully', {\n sessionId: session.id,\n conversationId: session.conversation_id,\n totalTokens,\n totalCost,\n })\n}\n\n/**\n * Create a new chat session\n */\nasync function createNewSession(\n payload: Payload,\n userId: string | number,\n conversationId: string,\n newUserMessage: ChatMessageWithSources,\n newAssistantMessage: ChatMessageWithSources,\n spending: SpendingEntry[],\n collectionName: CollectionSlug,\n): Promise<void> {\n const totalTokens = spending.reduce((sum, e) => sum + e.tokens.total, 0)\n const totalCost = spending.reduce((sum, e) => sum + (e.cost_usd || 0), 0)\n\n await payload.create({\n collection: collectionName,\n data: {\n user: userId as string,\n conversation_id: conversationId,\n status: 'active',\n messages: [newUserMessage, newAssistantMessage],\n spending,\n total_tokens: totalTokens,\n total_cost: totalCost,\n last_activity: new Date().toISOString(),\n },\n })\n\n logger.info('New chat session created successfully', {\n conversationId,\n userId,\n totalTokens,\n totalCost,\n })\n}\n","import type { Payload, PayloadRequest } from 'payload';\nimport type { ChatEndpointConfig } from '../route.js';\n\n/**\n * JSON Response helper\n */\nexport const jsonResponse = (data: any, options?: ResponseInit) => {\n return new Response(JSON.stringify(data), {\n headers: { 'Content-Type': 'application/json' },\n ...options,\n });\n };\n\n/**\n * Validates chat request and extracts required data\n */\nexport async function validateChatRequest(\n request: PayloadRequest,\n config: ChatEndpointConfig\n): Promise<\n | { success: false; error: Response }\n | {\n success: true;\n userId: string | number;\n userEmail: string;\n payload: Payload;\n userMessage: string;\n body: { message: string; chatId?: string; selectedDocuments?: string[]; agentSlug?: string };\n }\n> {\n // Check permissions\n if (!await config.checkPermissions(request)) {\n return {\n success: false,\n error: jsonResponse({ error: 'No tienes permisos para acceder a esta sesión.' }, { status: 403 }),\n };\n }\n\n // Validate request structure\n if (!request.url || !request.user) {\n return {\n success: false,\n error: jsonResponse({ error: 'URL not found' }, { status: 400 }),\n };\n }\n\n const { id: userId, email } = request.user;\n const userEmail = email || '';\n const payload = await config.getPayload();\n const body = await request.json?.();\n\n // Validate body exists\n if (!body) {\n return {\n success: false,\n error: jsonResponse({ error: 'Body not found' }, { status: 400 }),\n };\n }\n\n // Validate message\n if (!body.message || typeof body.message !== 'string' || body.message.trim() === '') {\n return {\n success: false,\n error: jsonResponse({ error: 'Se requiere un mensaje.' }, { status: 400 }),\n };\n }\n\n const userMessage = body.message.trim();\n\n return {\n success: true,\n userId,\n userEmail,\n payload,\n userMessage,\n body,\n };\n}\n","import type { ChatEndpointConfig } from '../route.js';\nimport {\n Logger,\n logger,\n DEFAULT_EMBEDDING_MODEL,\n EmbeddingServiceImpl,\n OpenAIEmbeddingProvider,\n GeminiEmbeddingProvider,\n type OpenAIProviderConfig,\n type GeminiProviderConfig,\n} from '@nexo-labs/payload-indexer';\nimport { SpendingEntry } from '../../../../../shared/index.js';\n\n/**\n * Generates embedding and tracks usage\n */\nexport async function generateEmbeddingWithTracking(\n userMessage: string,\n config: ChatEndpointConfig,\n spendingEntries: SpendingEntry[]\n): Promise<number[]> {\n logger.debug('Generating embeddings for semantic search');\n\n const embeddingConfig = config.embeddingConfig;\n \n if (!embeddingConfig) {\n throw new Error('Embedding configuration missing');\n }\n\n let provider;\n \n // Use the strongly typed nested provider configuration\n const providerType = embeddingConfig.type;\n const apiKey = embeddingConfig.apiKey;\n const model = embeddingConfig.model;\n const dimensions = embeddingConfig.dimensions;\n\n const serviceLogger = new Logger({ enabled: true, prefix: '[rag-embedding]' });\n\n if (providerType === 'gemini') {\n provider = new GeminiEmbeddingProvider({\n type: 'gemini',\n apiKey: apiKey,\n model: model,\n dimensions: dimensions\n } as GeminiProviderConfig, serviceLogger);\n } else {\n provider = new OpenAIEmbeddingProvider({\n type: 'openai',\n apiKey: apiKey,\n model: model,\n dimensions: dimensions\n } as OpenAIProviderConfig, serviceLogger);\n }\n\n const service = new EmbeddingServiceImpl(provider, serviceLogger, embeddingConfig);\n \n // We need usage info. The new service interface returns only embedding or array of embeddings.\n // We need to extend service or provider to return usage or get it from provider directly.\n // Let's use provider directly for now to get usage which we know returns EmbeddingResult\n \n const resultWithUsage = await provider.generateEmbedding(userMessage);\n\n if (!resultWithUsage) {\n throw new Error('Failed to generate embedding');\n }\n\n // Track embedding spending if function provided\n // We use model from config or default\n const modelUsed = model || DEFAULT_EMBEDDING_MODEL;\n\n if (config.createEmbeddingSpending) {\n const embeddingSpending = config.createEmbeddingSpending(\n modelUsed,\n resultWithUsage.usage.totalTokens\n );\n spendingEntries.push(embeddingSpending);\n\n logger.info('Embedding generated successfully', {\n model: modelUsed,\n totalTokens: resultWithUsage.usage.totalTokens,\n costUsd: embeddingSpending.cost_usd,\n });\n }\n\n return resultWithUsage.embedding;\n}\n","import type { Payload } from 'payload';\nimport type { ChatEndpointConfig } from '../route.js';\nimport { logger } from '../../../../../core/logging/logger.js';\nimport { ChunkSource, SpendingEntry } from '../../../../../shared/index.js';\n\n/**\n * Saves chat session if function is provided\n */\nexport async function saveChatSessionIfNeeded(\n config: ChatEndpointConfig,\n payload: Payload,\n userId: string | number,\n conversationId: string | null,\n userMessage: string,\n assistantMessage: string,\n sources: ChunkSource[],\n spendingEntries: SpendingEntry[]\n): Promise<void> {\n if (!conversationId || !config.saveChatSession) {\n return;\n }\n\n await config.saveChatSession(\n payload,\n userId,\n conversationId,\n userMessage,\n assistantMessage,\n sources,\n spendingEntries,\n config.collectionName\n );\n\n logger.info('Chat session saved to PayloadCMS', {\n conversationId,\n });\n}\n","import type { Payload } from 'payload';\nimport type { ChatEndpointConfig } from '../route.js';\nimport { jsonResponse } from '../validators/index.js';\nimport { logger } from '../../../../../core/logging/logger.js';\n\n/**\n * Checks token limits before processing request\n */\nexport async function checkTokenLimitsIfNeeded(\n config: ChatEndpointConfig,\n payload: Payload,\n userId: string | number,\n userEmail: string,\n userMessage: string\n): Promise<Response | null> {\n if (!config.estimateTokensFromText || !config.checkTokenLimit) {\n return null; // No token limit check needed\n }\n\n const estimatedEmbeddingTokens = config.estimateTokensFromText(userMessage);\n const estimatedLLMTokens = config.estimateTokensFromText(userMessage) * 10;\n const estimatedTotalTokens = estimatedEmbeddingTokens + estimatedLLMTokens;\n\n const limitCheck = await config.checkTokenLimit(payload, userId, estimatedTotalTokens);\n\n if (!limitCheck.allowed) {\n logger.warn('Token limit exceeded for user', {\n userId,\n limit: limitCheck.limit,\n used: limitCheck.used,\n remaining: limitCheck.remaining,\n });\n return jsonResponse(\n {\n error: 'Has alcanzado tu límite diario de tokens.',\n limit_info: {\n limit: limitCheck.limit,\n used: limitCheck.used,\n remaining: limitCheck.remaining,\n reset_at: limitCheck.reset_at,\n },\n },\n { status: 429 }\n );\n }\n\n logger.info('Chat request started with token limit check passed', {\n userId,\n userEmail,\n limit: limitCheck.limit,\n used: limitCheck.used,\n remaining: limitCheck.remaining,\n });\n\n return null; // Token limit passed\n}\n","import type { Payload } from 'payload';\nimport type { ChatEndpointConfig } from '../route.js';\nimport { logger } from '../../../../../core/logging/logger.js';\nimport { SpendingEntry, SSEEvent } from '../../../../../shared/index.js';\n\n/**\n * Calculates total usage from spending entries\n */\nexport function calculateTotalUsage(spendingEntries: SpendingEntry[]): {\n totalTokens: number;\n totalCostUSD: number;\n} {\n const totalTokensUsed = spendingEntries.reduce(\n (sum, entry) => sum + entry.tokens.total,\n 0\n );\n const totalCostUSD = spendingEntries.reduce(\n (sum, entry) => sum + (entry.cost_usd || 0),\n 0\n );\n\n logger.info('Total token usage calculated', {\n totalTokens: totalTokensUsed,\n totalCostUsd: totalCostUSD,\n });\n\n return { totalTokens: totalTokensUsed, totalCostUSD };\n}\n\n/**\n * Sends usage statistics event to client\n */\nexport async function sendUsageStatsIfNeeded(\n config: ChatEndpointConfig,\n payload: Payload,\n userId: string | number,\n totalTokens: number,\n totalCostUSD: number,\n sendEvent: (event: SSEEvent) => void\n): Promise<void> {\n if (!config.getUserUsageStats) {\n return;\n }\n\n const usageStats = await config.getUserUsageStats(payload, userId);\n\n sendEvent({\n type: 'usage',\n data: {\n tokens_used: totalTokens,\n cost_usd: totalCostUSD,\n daily_limit: usageStats.limit,\n daily_used: usageStats.used,\n daily_remaining: usageStats.remaining,\n reset_at: usageStats.reset_at,\n },\n });\n}\n","import { CollectionSlug, Payload, PayloadRequest } from 'payload'\nimport { logger } from '../../../../core/logging/logger.js'\nimport type { ChunkSource, EmbeddingProviderConfig, RAGFeatureConfig, SpendingEntry, SSEEvent } from '../../../../shared/types/plugin-types.js'\nimport {\n executeRAGSearch,\n sendSSEEvent,\n type RAGSearchConfig,\n type TypesenseConnectionConfig,\n} from '../../index.js'\n\n// Import atomized handlers\nimport { generateEmbeddingWithTracking } from './handlers/embedding-handler.js'\nimport { saveChatSessionIfNeeded } from './handlers/session-handler.js'\nimport { checkTokenLimitsIfNeeded } from './handlers/token-limit-handler.js'\nimport { calculateTotalUsage, sendUsageStatsIfNeeded } from './handlers/usage-stats-handler.js'\nimport { validateChatRequest } from './validators/index.js'\n\n/**\n * Configuration for chat endpoint\n */\nexport type ChatEndpointConfig = {\n /** Collection name for chat sessions */\n collectionName: CollectionSlug;\n /** Check permissions function */\n checkPermissions: (request: PayloadRequest) => Promise<boolean>;\n /** Typesense connection config */\n typesense: TypesenseConnectionConfig\n /** RAG search configuration (full config for multi-agent resolution) */\n rag: RAGFeatureConfig\n /** Get Payload instance */\n getPayload: () => Promise<Payload>\n /** Embedding configuration */\n embeddingConfig?: EmbeddingProviderConfig\n /** Check token limit function */\n checkTokenLimit?: (\n payload: Payload,\n userId: string | number,\n tokens: number,\n ) => Promise<{\n allowed: boolean\n limit: number\n used: number\n remaining: number\n reset_at?: string\n }>\n /** Get user usage stats function */\n getUserUsageStats?: (payload: Payload, userId: string | number) => Promise<{\n limit: number\n used: number\n remaining: number\n reset_at?: string\n }>\n /** Save chat session function */\n saveChatSession?: (\n payload: Payload,\n userId: string | number,\n conversationId: string,\n userMessage: string,\n assistantMessage: string,\n sources: ChunkSource[],\n spendingEntries: SpendingEntry[],\n collectionName: CollectionSlug,\n ) => Promise<void>\n /** Handle streaming response function */\n handleStreamingResponse: (\n response: Response,\n controller: ReadableStreamDefaultController<Uint8Array>,\n encoder: TextEncoder,\n ) => Promise<{\n fullAssistantMessage: string\n conversationId: string | null\n sources: ChunkSource[]\n llmSpending: SpendingEntry\n }>\n /** Handle non-streaming response function */\n handleNonStreamingResponse: (\n data: Record<string, unknown>,\n controller: ReadableStreamDefaultController<Uint8Array>,\n encoder: TextEncoder,\n ) => Promise<{\n fullAssistantMessage: string\n conversationId: string | null\n sources: ChunkSource[]\n llmSpending: SpendingEntry\n }>\n /** Create embedding spending function */\n createEmbeddingSpending?: (model: string, tokens: number) => SpendingEntry\n /** Estimate tokens from text function */\n estimateTokensFromText?: (text: string) => number\n}\n\n/**\n * Create a parameterizable POST handler for chat endpoint\n */\nexport function createChatPOSTHandler(config: ChatEndpointConfig) {\n return async function POST(request: PayloadRequest) {\n try {\n // Validate request\n const validated = await validateChatRequest(request, config);\n if (!validated.success) {\n return validated.error;\n }\n\n const { userId, userEmail, payload, userMessage, body } = validated;\n\n // Resolve Agent Configuration\n let searchConfig: RAGSearchConfig;\n const agentSlug = body.agentSlug;\n \n if (agentSlug && config.rag?.agents) {\n const agent = config.rag.agents.find(a => a.slug === agentSlug);\n if (!agent) {\n return new Response(JSON.stringify({ error: `Agent not found: ${agentSlug}` }), { status: 404 });\n }\n searchConfig = {\n modelId: agent.slug,\n searchCollections: agent.searchCollections,\n kResults: agent.kResults,\n advancedConfig: config.rag.advanced\n };\n } else if (config.rag?.agents && config.rag.agents.length > 0) {\n // Use first agent as default\n const agent = config.rag.agents[0];\n if (!agent) throw new Error(\"Default agent not found\");\n searchConfig = {\n modelId: agent.slug,\n searchCollections: agent.searchCollections,\n kResults: agent.kResults,\n advancedConfig: config.rag.advanced\n };\n } else {\n return new Response(JSON.stringify({ error: 'No RAG configuration available' }), { status: 500 });\n }\n\n // Check token limits if configured\n const tokenLimitError = await checkTokenLimitsIfNeeded(\n config,\n payload,\n userId,\n userEmail,\n userMessage\n );\n if (tokenLimitError) {\n return tokenLimitError;\n }\n\n logger.info('Processing chat message', {\n userId,\n chatId: body.chatId || 'new',\n agentSlug: agentSlug || 'default',\n modelId: searchConfig.modelId,\n isFollowUp: !!body.chatId,\n hasSelectedDocuments: !!body.selectedDocuments,\n messageLength: userMessage.length,\n })\n\n // Create a streaming response\n const encoder = new TextEncoder()\n const stream = new ReadableStream({\n async start(controller) {\n const spendingEntries: SpendingEntry[] = []\n let fullAssistantMessage = ''\n let conversationIdCapture: string | null = null\n let sourcesCapture: ChunkSource[] = []\n\n try {\n const sendEvent = (event: SSEEvent) => sendSSEEvent(controller, encoder, event);\n\n // Generate embedding with tracking\n const queryEmbedding = await generateEmbeddingWithTracking(\n userMessage,\n config,\n spendingEntries\n );\n\n // Execute RAG search\n const searchResult = await executeRAGSearch(\n config.typesense,\n searchConfig,\n {\n userMessage,\n queryEmbedding,\n chatId: body.chatId,\n selectedDocuments: body.selectedDocuments,\n }\n );\n\n // Handle streaming or non-streaming response\n const streamResult = searchResult.isStreaming && searchResult.response.body\n ? await config.handleStreamingResponse(searchResult.response, controller, encoder)\n : await config.handleNonStreamingResponse(\n await searchResult.response.json(),\n controller,\n encoder\n );\n\n // Extract results\n fullAssistantMessage = streamResult.fullAssistantMessage;\n conversationIdCapture = streamResult.conversationId;\n sourcesCapture = streamResult.sources;\n spendingEntries.push(streamResult.llmSpending);\n\n // Calculate total usage\n const { totalTokens: totalTokensUsed, totalCostUSD } =\n calculateTotalUsage(spendingEntries);\n\n // Send usage stats\n await sendUsageStatsIfNeeded(\n config,\n payload,\n userId,\n totalTokensUsed,\n totalCostUSD,\n sendEvent\n );\n\n // Save session\n await saveChatSessionIfNeeded(\n config,\n payload,\n userId,\n conversationIdCapture,\n userMessage,\n fullAssistantMessage,\n sourcesCapture,\n spendingEntries\n );\n\n logger.info('Chat request completed successfully', {\n userId,\n conversationId: conversationIdCapture,\n totalTokens: totalTokensUsed,\n });\n controller.close();\n } catch (error) {\n logger.error('Fatal error in chat stream', error as Error, {\n userId,\n chatId: body.chatId,\n });\n sendSSEEvent(controller, encoder, {\n type: 'error',\n data: {\n error: error instanceof Error ? error.message : 'Error desconocido',\n },\n });\n controller.close();\n }\n },\n })\n\n return new Response(stream, {\n headers: {\n 'Content-Type': 'text/event-stream',\n 'Cache-Control': 'no-cache',\n Connection: 'keep-alive',\n },\n })\n } catch (error) {\n logger.error('Error in chat API endpoint', error as Error, {\n userId: request.user?.id,\n })\n\n return new Response(\n JSON.stringify({\n error: 'Error al procesar tu mensaje. Por favor, inténtalo de nuevo.',\n details: error instanceof Error ? error.message : 'Error desconocido',\n }),\n {\n status: 500,\n headers: { 'Content-Type': 'application/json' },\n },\n )\n }\n }\n}\n\n/**\n * Default export for Next.js App Router\n * Users should call createChatPOSTHandler with their config\n */\nexport { createChatPOSTHandler as POST }\n","/**\n * Stream handler utilities\n *\n * Shared utility functions for stream handlers\n */\n\n/**\n * Resolve document type from collection name\n */\nexport function resolveDocumentType(collectionName: string): string {\n if (collectionName === 'article_web_chunk') return 'article';\n if (collectionName === 'book_chunk') return 'book';\n return 'document';\n}\n\n/**\n * Estimate tokens from text (simple word-based estimation)\n * More accurate implementations can be provided via callbacks\n */\nexport function estimateTokensFromText(text: string): number {\n // Simple estimation: ~1.3 tokens per word\n const words = text.trim().split(/\\s+/).length;\n return Math.ceil(words * 1.3);\n}\n","/**\n * Streaming response handler\n *\n * Handles streaming responses from Typesense conversational search\n */\n\nimport { parseConversationEvent, extractSourcesFromResults, buildContextText } from '../stream-handler.js'\nimport { sendSSEEvent } from '../utils/sse-utils.js'\nimport { logger } from '../../../core/logging/logger.js'\nimport { resolveDocumentType, estimateTokensFromText } from './utils.js'\nimport { ChunkSource, SpendingEntry } from '../../../shared/index.js';\n\n/**\n * Default implementation for handling streaming responses\n */\nexport async function defaultHandleStreamingResponse(\n response: Response,\n controller: ReadableStreamDefaultController<Uint8Array>,\n encoder: TextEncoder,\n): Promise<{\n fullAssistantMessage: string;\n conversationId: string | null;\n sources: ChunkSource[];\n llmSpending: SpendingEntry;\n}> {\n logger.debug('Starting streaming response handling')\n\n if (!response.body) {\n throw new Error('Response body is null');\n }\n\n const reader = response.body.getReader();\n const decoder = new TextDecoder();\n let buffer = '';\n let sources: ChunkSource[] = [];\n let hasCollectedSources = false;\n let conversationId: string | null = null;\n let contextText = ''; // To estimate LLM tokens\n let fullAssistantMessage = '';\n\n try {\n while (true) {\n const { done, value } = await reader.read();\n if (done) {\n logger.debug('Streaming response completed');\n break;\n }\n\n buffer += decoder.decode(value, { stream: true });\n const lines = buffer.split('\\n');\n buffer = lines.pop() || '';\n\n for (const line of lines) {\n const event = parseConversationEvent(line);\n if (!event) continue;\n\n // Handle [DONE] event\n if (event.raw === '[DONE]') {\n sendSSEEvent(controller, encoder, { type: 'done', data: '' });\n continue;\n }\n\n // Capture conversation_id\n if (!conversationId && event.conversationId) {\n conversationId = event.conversationId;\n logger.debug('Conversation ID captured', { conversationId });\n sendSSEEvent(controller, encoder, { type: 'conversation_id', data: conversationId });\n }\n\n // Extract sources\n if (!hasCollectedSources && event.results) {\n sources = extractSourcesFromResults(event.results, resolveDocumentType);\n contextText = buildContextText(event.results);\n\n if (sources.length > 0) {\n sendSSEEvent(controller, encoder, { type: 'sources', data: sources });\n }\n\n hasCollectedSources = true;\n }\n\n // Stream conversation tokens\n if (event.message) {\n fullAssistantMessage += event.message;\n sendSSEEvent(controller, encoder, { type: 'token', data: event.message });\n }\n }\n }\n } finally {\n reader.releaseLock();\n }\n\n // Estimate LLM tokens (context + user message + response)\n const llmInputTokens = estimateTokensFromText(contextText);\n const llmOutputTokens = estimateTokensFromText(fullAssistantMessage);\n\n // Track LLM spending (defaults to a simple model)\n const llmSpending: SpendingEntry = {\n service: 'openai_llm',\n model: 'gpt-4o-mini',\n tokens: {\n input: llmInputTokens,\n output: llmOutputTokens,\n total: llmInputTokens + llmOutputTokens,\n },\n cost_usd: (llmInputTokens * 0.00000015) + (llmOutputTokens * 0.0000006), // gpt-4o-mini pricing\n timestamp: new Date().toISOString(),\n };\n\n logger.info('LLM cost calculated', {\n inputTokens: llmInputTokens,\n outputTokens: llmOutputTokens,\n totalTokens: llmSpending.tokens.total,\n costUsd: llmSpending.cost_usd,\n })\n\n return {\n fullAssistantMessage,\n conversationId,\n sources,\n llmSpending,\n };\n}\n","/**\n * Non-streaming response handler\n *\n * Handles non-streaming (regular JSON) responses from Typesense conversational search\n */\n\nimport { extractSourcesFromResults, buildContextText } from '../stream-handler.js'\nimport { sendSSEEvent } from '../utils/sse-utils.js'\nimport { logger } from '../../../core/logging/logger.js'\nimport { resolveDocumentType, estimateTokensFromText } from './utils.js'\nimport { ChunkSource, SpendingEntry, TypesenseRAGSearchResult } from '../../../shared/index.js';\n\n/**\n * Default implementation for handling non-streaming responses\n */\nexport async function defaultHandleNonStreamingResponse(\n data: Record<string, unknown>,\n controller: ReadableStreamDefaultController<Uint8Array>,\n encoder: TextEncoder,\n): Promise<{\n fullAssistantMessage: string;\n conversationId: string | null;\n sources: ChunkSource[];\n llmSpending: SpendingEntry;\n}> {\n logger.debug('Using non-streaming fallback for response handling');\n\n // Type assertion for accessing known properties from the Typesense response\n const typedData = data as {\n conversation?: { conversation_id?: string; answer?: string };\n conversation_id?: string;\n response?: string;\n message?: string;\n results?: unknown[];\n };\n\n let conversationId: string | null = null;\n if (typedData.conversation?.conversation_id) {\n conversationId = typedData.conversation.conversation_id;\n } else if (typedData.conversation_id) {\n conversationId = typedData.conversation_id;\n }\n\n let fullAnswer = '';\n if (typedData.conversation?.answer) {\n fullAnswer = typedData.conversation.answer;\n } else if (typedData.response || typedData.message) {\n fullAnswer = typedData.response || typedData.message || '';\n }\n\n const sources = extractSourcesFromResults((typedData.results || []) as TypesenseRAGSearchResult[], resolveDocumentType);\n const contextText = buildContextText((typedData.results || []) as TypesenseRAGSearchResult[]);\n\n // Simulate streaming by sending tokens word by word\n if (fullAnswer) {\n const words = fullAnswer.split(' ');\n for (let i = 0; i < words.length; i++) {\n const token = i === 0 ? words[i] : ' ' + words[i];\n if (token) {\n sendSSEEvent(controller, encoder, { type: 'token', data: token });\n }\n }\n }\n\n if (conversationId) {\n sendSSEEvent(controller, encoder, { type: 'conversation_id', data: conversationId });\n }\n\n if (sources.length > 0) {\n sendSSEEvent(controller, encoder, { type: 'sources', data: sources });\n }\n\n sendSSEEvent(controller, encoder, { type: 'done', data: '' });\n\n // Estimate LLM tokens\n const llmInputTokens = estimateTokensFromText(contextText);\n const llmOutputTokens = estimateTokensFromText(fullAnswer);\n\n const llmSpending: SpendingEntry = {\n service: 'openai_llm',\n model: 'gpt-4o-mini',\n tokens: {\n input: llmInputTokens,\n output: llmOutputTokens,\n total: llmInputTokens + llmOutputTokens,\n },\n cost_usd: (llmInputTokens * 0.00000015) + (llmOutputTokens * 0.0000006),\n timestamp: new Date().toISOString(),\n };\n\n return {\n fullAssistantMessage: fullAnswer,\n conversationId,\n sources,\n llmSpending,\n };\n}\n","import { Payload, PayloadRequest } from 'payload';\nimport { jsonResponse } from '../validators/index.js'\nimport { logger } from '../../../../../core/logging/logger.js'\nimport { SessionConfig, getActiveSession, getSessionByConversationId, closeSession } from '../../../handlers/session-handlers.js';\n\n/**\n * Configuration for session endpoints\n */\nexport type SessionEndpointConfig = {\n /** Get Payload instance */\n getPayload: () => Promise<Payload>\n checkPermissions: (request: PayloadRequest) => Promise<boolean>;\n /** Session configuration */\n sessionConfig?: SessionConfig\n}\n\n/**\n * Create a parameterizable GET handler for session endpoint\n *\n * Query params:\n * - ?active=true → Get the most recent active session\n * - ?conversationId=xxx → Get a specific session by conversation ID\n */\nexport function createSessionGETHandler(config: SessionEndpointConfig) {\n return async function GET(request: PayloadRequest) {\n try {\n if (!await config.checkPermissions(request)) {\n return jsonResponse({ error: 'No tienes permisos para acceder a esta sesión.' }, { status: 403 })\n }\n const userId = request.user?.id\n\n if (!request.url || !userId) {\n return jsonResponse({ error: 'URL not found' }, { status: 400 })\n }\n\n const { searchParams } = new URL(request.url)\n const isActive = searchParams.get('active') === 'true'\n const conversationId = searchParams.get('conversationId')\n\n // Get Payload instance\n const payload = await config.getPayload()\n\n // Handle active session request\n if (isActive) {\n const session = await getActiveSession(payload, userId, config.sessionConfig)\n\n if (!session) {\n return jsonResponse({ error: 'No hay sesión activa.' }, { status: 404 })\n }\n\n return jsonResponse(session)\n }\n\n // Handle specific session request\n if (!conversationId) {\n return jsonResponse(\n { error: 'Se requiere conversationId o active=true.' },\n { status: 400 },\n )\n }\n\n const session = await getSessionByConversationId(\n payload,\n userId,\n conversationId,\n config.sessionConfig,\n )\n\n if (!session) {\n return jsonResponse({ error: 'Sesión de chat no encontrada.' }, { status: 404 })\n }\n\n return jsonResponse(session)\n } catch (error) {\n logger.error('Error retrieving chat session', error as Error, {\n userId: request.user?.id,\n })\n\n return jsonResponse(\n {\n error: 'Error al recuperar la sesión.',\n details: error instanceof Error ? error.message : 'Error desconocido',\n },\n { status: 500 },\n )\n }\n }\n}\n\n/**\n * Create a parameterizable DELETE handler for session endpoint\n *\n * DELETE /api/chat/session?conversationId=xxx\n * Close a chat session\n */\nexport function createSessionDELETEHandler(config: SessionEndpointConfig) {\n return async function DELETE(request: PayloadRequest) {\n try {\n if (!await config.checkPermissions(request)) {\n return jsonResponse({ error: 'No tienes permisos para acceder a esta sesión.' }, { status: 403 })\n }\n const userId = request.user?.id\n if (!request.url || !userId) {\n return jsonResponse({ error: 'URL not found' }, { status: 400 })\n }\n\n const { searchParams } = new URL(request.url)\n const conversationId = searchParams.get('conversationId')\n\n if (!conversationId) {\n return jsonResponse(\n { error: 'Se requiere un conversationId válido.' },\n { status: 400 },\n )\n }\n\n // Get Payload instance\n const payload = await config.getPayload()\n\n logger.info('Closing chat session', { conversationId, userId })\n\n const session = await closeSession(payload, userId, conversationId, config.sessionConfig)\n\n if (!session) {\n return jsonResponse(\n { error: 'Sesión de chat no encontrada o no tienes permisos.' },\n { status: 404 },\n )\n }\n\n logger.info('Chat session closed successfully', {\n conversationId,\n totalTokens: session.total_tokens,\n totalCost: session.total_cost,\n })\n\n return jsonResponse({\n success: true,\n message: 'Sesión cerrada correctamente',\n session: {\n conversation_id: conversationId,\n status: 'closed',\n total_tokens: session.total_tokens,\n total_cost: session.total_cost,\n },\n })\n } catch (error) {\n logger.error('Error closing chat session', error as Error, {\n conversationId: request.url ? new URL(request.url).searchParams.get('conversationId') : undefined,\n userId: request.user?.id,\n })\n\n return jsonResponse(\n {\n error: 'Error al cerrar la sesión. Por favor, inténtalo de nuevo.',\n details: error instanceof Error ? error.message : 'Error desconocido',\n },\n { status: 500 },\n )\n }\n }\n}\n\n/**\n * Default exports for Next.js App Router\n */\nexport { createSessionGETHandler as GET, createSessionDELETEHandler as DELETE }\n","import { PayloadRequest } from 'payload'\nimport { createTypesenseClient } from '../../../../../core/client/typesense-client.js'\nimport { fetchChunkById, type TypesenseConnectionConfig } from '../../../index.js'\nimport { jsonResponse } from '../../chat/validators/index.js'\nimport { logger } from '../../../../../core/logging/logger.js'\n\n/**\n * Configuration for chunks endpoint\n */\nexport type ChunksEndpointConfig = {\n /** Typesense connection config */\n typesense: TypesenseConnectionConfig\n /** Check permissions function */\n checkPermissions: (request: PayloadRequest) => Promise<boolean>;\n /** Valid collections for chunks */\n validCollections: string[]\n}\n\n/**\n * Create a parameterizable GET handler for chunks endpoint\n *\n * GET /api/chat/chunks/[id]?collection=article_web_chunk\n * Fetch the full chunk text from Typesense by document ID\n */\nexport function createChunksGETHandler(config: ChunksEndpointConfig) {\n return async function GET(\n request: PayloadRequest\n ) {\n try {\n if (!await config.checkPermissions(request)) {\n return jsonResponse({ error: 'No tienes permisos para acceder a este chunk.' }, { status: 403 })\n }\n if (!request.url || !request.user) {\n return jsonResponse({ error: 'URL not found' }, { status: 400 })\n }\n const id = request.routeParams?.id\n const url = new URL(request.url)\n const collectionName = url.searchParams.get('collection')\n\n // Validate chunk ID\n if (!id) {\n return jsonResponse({ error: 'Se requiere el ID del chunk' }, { status: 400 })\n }\n\n // Validate collection name\n if (!collectionName) {\n return jsonResponse(\n {\n error: 'Se requiere el parámetro collection',\n collections: config.validCollections,\n },\n { status: 400 },\n )\n }\n\n // Get Typesense client\n const client = createTypesenseClient(config.typesense)\n\n // Use the parameterizable function from the package\n const chunkData = await fetchChunkById(client, {\n chunkId: id as string,\n collectionName,\n validCollections: config.validCollections,\n })\n\n // Return the chunk data\n return jsonResponse(chunkData)\n } catch (error: unknown) {\n logger.error('Error fetching chunk', error as Error, {\n chunkId: request.routeParams?.id,\n collection: request.url ? new URL(request.url).searchParams.get('collection') : undefined,\n })\n\n // Handle known errors\n if (error instanceof Error) {\n if (error.message.includes('Invalid collection')) {\n return jsonResponse(\n {\n error: error.message,\n collections: config.validCollections,\n },\n { status: 400 },\n )\n }\n if (error.message.includes('not found')) {\n return jsonResponse({ error: 'Chunk no encontrado' }, { status: 404 })\n }\n }\n\n return jsonResponse(\n {\n error: 'Error al obtener el chunk',\n details: error instanceof Error ? error.message : 'Error desconocido',\n },\n { status: 500 },\n )\n }\n }\n}\n\n/**\n * Default export for Next.js App Router\n */\nexport { createChunksGETHandler as GET }\n","import type { PayloadRequest } from 'payload';\nimport { RAGFeatureConfig } from '../../../../../shared/types/plugin-types.js';\nimport { jsonResponse } from '../validators/index.js';\n\nexport type AgentsEndpointConfig = {\n ragConfig: RAGFeatureConfig;\n checkPermissions: (req: PayloadRequest) => Promise<boolean>;\n};\n\nexport function createAgentsGETHandler(config: AgentsEndpointConfig) {\n return async function GET() {\n try {\n const agents = config.ragConfig?.agents || [];\n \n // Map to PublicAgentInfo\n const publicAgents = agents.map(agent => ({\n slug: agent.slug,\n name: agent.name || agent.slug\n }));\n\n return jsonResponse({ agents: publicAgents }, { status: 200 });\n } catch (error) {\n return jsonResponse({ error: 'Internal Server Error' }, { status: 500 });\n }\n };\n}\n","/**\n * Payload CMS adapters for RAG endpoints\n *\n * These adapters convert the RAG API handlers (designed for standard Request/Response)\n * into Payload CMS handlers that work with Payload's endpoint system.\n */\n\nimport type { PayloadHandler } from \"payload\";\nimport type { TypesenseRAGPluginConfig } from \"../../plugin/rag-types.js\";\nimport { createChatPOSTHandler } from \"./endpoints/chat/route.js\";\nimport { defaultHandleNonStreamingResponse, defaultHandleStreamingResponse } from \"./stream-handlers/index.js\";\nimport { createSessionDELETEHandler, createSessionGETHandler } from \"./endpoints/chat/session/route.js\";\nimport { createChunksGETHandler } from \"./endpoints/chunks/[id]/route.js\";\nimport { createAgentsGETHandler } from \"./endpoints/chat/agents/route.js\";\n\n/**\n * Creates Payload handlers for RAG endpoints\n *\n * @param config - RAG plugin configuration (composable, doesn't depend on ModularPluginConfig)\n */\nexport function createRAGPayloadHandlers(\n config: TypesenseRAGPluginConfig\n): Array<{ path: string; method: 'connect' | 'delete' | 'get' | 'head' | 'options' | 'patch' | 'post' | 'put'; handler: PayloadHandler }> {\n const endpoints: Array<{ path: string; method: 'connect' | 'delete' | 'get' | 'head' | 'options' | 'patch' | 'post' | 'put'; handler: PayloadHandler }> = [];\n\n // Validate required config\n if (!config.agents || config.agents.length === 0 || !config.callbacks) {\n return endpoints;\n }\n\n const { agents, callbacks, typesense } = config;\n\n // Get valid collections from agents configuration\n const agentCollections = agents.flatMap(agent => agent.searchCollections) || [];\n const validCollections = Array.from(new Set(agentCollections));\n\n // Build RAG feature config for handlers that still need it\n const ragFeatureConfig = {\n enabled: true,\n agents,\n callbacks,\n hybrid: config.hybrid,\n hnsw: config.hnsw,\n advanced: config.advanced,\n };\n\n // Add endpoints\n endpoints.push({\n path: \"/chat\",\n method: \"post\" as const,\n handler: createChatPOSTHandler({\n collectionName: 'chat-sessions', // Default fallback\n checkPermissions: callbacks.checkPermissions,\n typesense,\n rag: ragFeatureConfig,\n getPayload: callbacks.getPayload,\n checkTokenLimit: callbacks.checkTokenLimit,\n getUserUsageStats: callbacks.getUserUsageStats,\n saveChatSession: callbacks.saveChatSession,\n handleStreamingResponse: defaultHandleStreamingResponse,\n handleNonStreamingResponse: defaultHandleNonStreamingResponse,\n createEmbeddingSpending: callbacks.createEmbeddingSpending,\n estimateTokensFromText: callbacks.estimateTokensFromText,\n embeddingConfig: config.embeddingConfig,\n }),\n });\n\n endpoints.push({\n path: \"/chat/session\",\n method: \"get\" as const,\n handler: createSessionGETHandler({\n getPayload: callbacks.getPayload,\n checkPermissions: callbacks.checkPermissions,\n }),\n });\n\n endpoints.push({\n path: \"/chat/session\",\n method: \"delete\" as const,\n handler: createSessionDELETEHandler({\n getPayload: callbacks.getPayload,\n checkPermissions: callbacks.checkPermissions,\n }),\n });\n\n endpoints.push({\n path: \"/chat/chunks/:id\",\n method: \"get\" as const,\n handler: createChunksGETHandler({\n typesense,\n checkPermissions: callbacks.checkPermissions,\n validCollections,\n }),\n });\n\n endpoints.push({\n path: \"/chat/agents\",\n method: \"get\" as const,\n handler: createAgentsGETHandler({\n ragConfig: ragFeatureConfig,\n checkPermissions: callbacks.checkPermissions,\n }),\n });\n\n return endpoints;\n}\n","import type { PayloadHandler } from \"payload\";\nimport { ModularPluginConfig } from \"../../../../index.js\";\n\n/**\n * Creates a handler for listing available search collections\n */\nexport const createCollectionsHandler = (\n pluginOptions: ModularPluginConfig\n): PayloadHandler => {\n return () => {\n try {\n // Flatten table configs to collections list\n const collections: Array<Record<string, unknown>> = [];\n for (const [slug, tableConfigs] of Object.entries(\n pluginOptions.collections || {}\n )) {\n if (Array.isArray(tableConfigs)) {\n // Get first enabled config for collection metadata\n const firstEnabledConfig = tableConfigs.find((config) => config.enabled);\n if (firstEnabledConfig) {\n // Extract fields based on mode\n let fields: { name: string; facet?: boolean; index?: boolean }[] = [];\n fields = firstEnabledConfig.fields;\n const facetFields = fields.filter(f => f.facet).map(f => f.name);\n const searchFields = fields.filter(f => f.index !== false).map(f => f.name); // Default to index true unless explicitly false? Or explicit index?\n // In our new config, index is optional, defaulting to... ?\n // Let's assume if it's in the fields list, it's relevant.\n // Actually, we should check 'index' property if we want to be precise.\n // But for now, let's just map all fields as search fields if they are not facets only?\n // The UI probably needs to know what to search.\n \n collections.push({\n slug,\n displayName:\n firstEnabledConfig.displayName ||\n slug.charAt(0).toUpperCase() + slug.slice(1),\n facetFields,\n searchFields,\n });\n }\n }\n }\n\n return Response.json({\n categorized: false, // Categorized setting moved or removed\n collections,\n });\n } catch (_error) {\n // Handle collections error\n return Response.json(\n { error: \"Failed to get collections\" },\n { status: 500 }\n );\n }\n };\n};\n\n\n\n","/**\n * Simple in-memory cache for search results\n * In production, consider using Redis or similar\n */\n\nimport type { CacheEntry, CacheOptions } from '../types/types.js'\n\nclass SearchCache<T = unknown> {\n private cache = new Map<string, CacheEntry<T>>()\n private readonly defaultTTL: number\n private readonly maxSize: number\n\n constructor(options: CacheOptions = {}) {\n this.defaultTTL = options.ttl || 5 * 60 * 1000 // 5 minutes default\n this.maxSize = options.maxSize || 1000 // 1000 entries default\n }\n\n /**\n * Generate cache key from search parameters\n */\n private generateKey(query: string, collection?: string, params?: Record<string, any>): string {\n const baseKey = `${collection || 'universal'}:${query}`\n if (params) {\n const sortedParams = Object.keys(params)\n .sort()\n .map(key => `${key}=${params[key]}`)\n .join('&')\n return `${baseKey}:${sortedParams}`\n }\n return baseKey\n }\n\n /**\n * Clear expired entries\n */\n cleanup(): void {\n const now = Date.now()\n for (const [key, entry] of this.cache.entries()) {\n if (now - entry.timestamp > entry.ttl) {\n this.cache.delete(key)\n }\n }\n }\n\n /**\n * Clear cache entries matching pattern\n */\n clear(pattern?: string): void {\n if (!pattern) {\n this.cache.clear()\n return\n }\n\n for (const key of this.cache.keys()) {\n if (key.includes(pattern)) {\n this.cache.delete(key)\n }\n }\n }\n\n /**\n * Get cached search result\n */\n get(query: string, collection?: string, params?: Record<string, any>): null | T {\n const key = this.generateKey(query, collection || '', params)\n const entry = this.cache.get(key)\n\n if (!entry) {\n return null\n }\n\n // Check if entry has expired\n if (Date.now() - entry.timestamp > entry.ttl) {\n this.cache.delete(key)\n return null\n }\n\n return entry.data\n }\n\n /**\n * Get cache statistics\n */\n getStats(): { hitRate?: number; maxSize: number; size: number } {\n return {\n maxSize: this.maxSize,\n size: this.cache.size\n }\n }\n\n /**\n * Check if cache has valid entry\n */\n has(query: string, collection?: string, params?: Record<string, any>): boolean {\n return this.get(query, collection, params) !== null\n }\n\n /**\n * Set cached search result\n */\n set(\n query: string, \n data: T, \n collection?: string, \n params?: Record<string, any>,\n ttl?: number\n ): void {\n const key = this.generateKey(query, collection || '', params)\n \n // Enforce max size by removing oldest entries\n if (this.cache.size >= this.maxSize) {\n const oldestKey = this.cache.keys().next().value\n if (oldestKey) {\n this.cache.delete(oldestKey)\n }\n }\n\n this.cache.set(key, {\n data,\n timestamp: Date.now(),\n ttl: ttl || this.defaultTTL\n })\n }\n}\n\n// Global cache instance\nexport const searchCache = new SearchCache({\n maxSize: 1000,\n ttl: 5 * 60 * 1000 // 5 minutes\n})\n\n// Cleanup expired entries every 10 minutes\nsetInterval(() => {\n searchCache.cleanup()\n}, 10 * 60 * 1000)\n","/**\n * Default values for vector search parameters\n *\n * K is set high because:\n * - Documents are split into chunks (avg 5-10 chunks per doc)\n * - To get 20 unique documents, we need K = 20 docs × 7 chunks/doc = 140\n * - Higher K = better coverage but slightly slower (still fast with good indexing)\n */\nexport const DEFAULT_K = 150; // High K for good chunk coverage\nexport const DEFAULT_PAGE = 1;\nexport const DEFAULT_PER_PAGE = 20; // Show more results per page (was 10)\nexport const DEFAULT_ALPHA = 0.7;\n\n/**\n * Default search field names when not specified\n */\nexport const DEFAULT_SEARCH_FIELDS = [\"title\", \"content\"];\n\n/**\n * Default snippet threshold for search results\n */\nexport const DEFAULT_SNIPPET_THRESHOLD = 30;\n\n/**\n * Default typo tokens threshold\n */\nexport const DEFAULT_TYPO_TOKENS_THRESHOLD = 1;\n\n/**\n * Default number of typos allowed\n */\nexport const DEFAULT_NUM_TYPOS = 0;\n","import { SearchResponse } from \"typesense/lib/Typesense/Documents.js\";\nimport type { TableConfig } from \"@nexo-labs/payload-indexer\";\nimport type { CombinedSearchResult, CollectionSearchResult, SearchHit } from \"../types.js\";\n\n/**\n * Processes traditional search results from a single collection\n */\nexport const processSingleCollectionTraditionalResults = (\n results: SearchResponse<object>,\n collectionName: string,\n config: TableConfig\n): CollectionSearchResult => {\n return {\n collection: collectionName,\n displayName: config?.displayName || collectionName,\n icon: \"📄\",\n found: results.found,\n hits: results.hits?.map((hit): SearchHit => ({\n ...hit,\n collection: collectionName,\n displayName: config?.displayName || collectionName,\n icon: \"📄\",\n document: (hit.document || {}) as Record<string, unknown>,\n })) || [],\n };\n};\n\n/**\n * Combines traditional search results from multiple collections\n */\nexport const combineTraditionalResults = (\n results: CollectionSearchResult[],\n options: {\n page: number;\n per_page: number;\n query: string;\n }\n): CombinedSearchResult => {\n const { page, per_page, query } = options;\n\n const combinedHits = results.flatMap((result) => result.hits || []);\n const totalFound = results.reduce(\n (sum, result) => sum + (result.found || 0),\n 0\n );\n\n // Sort by text match score\n combinedHits.sort((a, b) => (b.text_match || 0) - (a.text_match || 0));\n\n const searchResult: CombinedSearchResult = {\n collections: results.map((r) => ({\n collection: r.collection,\n displayName: r.displayName,\n error: r.error,\n found: r.found || 0,\n icon: r.icon,\n })),\n found: totalFound,\n hits: combinedHits.slice(0, per_page),\n page,\n request_params: { per_page, query },\n search_cutoff: false,\n search_time_ms: 0,\n };\n\n return searchResult;\n};\n\n","import {\n DEFAULT_NUM_TYPOS,\n DEFAULT_SEARCH_FIELDS,\n DEFAULT_SNIPPET_THRESHOLD,\n DEFAULT_TYPO_TOKENS_THRESHOLD,\n} from \"../constants.js\";\nimport type { TraditionalSearchParams } from \"../types.js\";\n\n/**\n * Builds traditional search parameters for a single collection\n */\nexport const buildTraditionalSearchParams = (\n query: string,\n options: {\n page: number;\n per_page: number;\n searchFields?: string[];\n sort_by?: string;\n exclude_fields?: string;\n }\n): TraditionalSearchParams => {\n const {\n page,\n per_page,\n searchFields = DEFAULT_SEARCH_FIELDS,\n sort_by,\n exclude_fields,\n } = options;\n\n const params: TraditionalSearchParams = {\n highlight_full_fields: searchFields.join(\",\"),\n num_typos: DEFAULT_NUM_TYPOS,\n page,\n per_page,\n q: query,\n query_by: searchFields.join(\",\"),\n snippet_threshold: DEFAULT_SNIPPET_THRESHOLD,\n typo_tokens_threshold: DEFAULT_TYPO_TOKENS_THRESHOLD,\n exclude_fields: exclude_fields,\n sort_by: sort_by,\n };\n\n return params;\n};\n","import type { Client } from \"typesense\";\nimport type { TableConfig } from \"@nexo-labs/payload-indexer\";\nimport { processSingleCollectionTraditionalResults } from \"../results/process-traditional-results.js\";\nimport type { CollectionSearchResult } from \"../types.js\";\nimport { buildTraditionalSearchParams } from \"./build-params.js\";\n\n/**\n * Performs a traditional search on a single collection\n */\nexport const searchTraditionalCollection = async (\n typesenseClient: Client,\n collectionName: string,\n config: TableConfig,\n options: {\n query: string;\n page: number;\n per_page: number;\n searchFields?: string[];\n sort_by?: string;\n exclude_fields?: string;\n skipChunkFilter?: boolean; // Skip the !is_chunk filter for simple searches\n }\n): Promise<CollectionSearchResult> => {\n try {\n const buildOptions: {\n page: number;\n per_page: number;\n searchFields?: string[];\n sort_by?: string;\n exclude_fields?: string;\n } = {\n page: options.page,\n per_page: options.per_page,\n };\n\n // Extract search fields from config if not provided in options\n if (options.searchFields) {\n buildOptions.searchFields = options.searchFields;\n } else if (config) {\n let fields: { name: string; index?: boolean; type?: string }[] = [];\n fields = config.fields;\n // Filter for indexed fields that are searchable (string or string[] types only)\n // Typesense only accepts string/string[] fields in query_by parameter\n const searchFields = fields\n .filter(f =>\n f.index !== false &&\n (f.type === 'string' || f.type === 'string[]')\n )\n .map(f => f.name);\n if (searchFields.length > 0) {\n buildOptions.searchFields = searchFields;\n }\n }\n\n if (options.sort_by) {\n buildOptions.sort_by = options.sort_by;\n }\n\n if (options.exclude_fields) {\n buildOptions.exclude_fields = options.exclude_fields;\n }\n\n const searchParameters = buildTraditionalSearchParams(\n options.query,\n buildOptions\n );\n\n // Try to add chunk filter, but handle gracefully if schema doesn't support it\n // Skip chunk filter for simple searches since we're already searching main collections only\n if (!options.skipChunkFilter) {\n try {\n // First check if schema supports is_chunk field\n const collectionSchema = await typesenseClient\n .collections(collectionName)\n .retrieve();\n\n const fieldNames = collectionSchema.fields?.map(f => f.name) || [];\n if (fieldNames.includes(\"is_chunk\")) {\n // Schema supports chunking, add filter\n searchParameters.filter_by = \"!is_chunk:true\";\n }\n // If schema doesn't support is_chunk, don't add filter (backward compatibility)\n } catch (schemaError: unknown) {\n // If we can't retrieve schema, don't add filter (will work for old collections)\n }\n }\n\n const results = await typesenseClient\n .collections(collectionName)\n .documents()\n .search(searchParameters);\n\n return processSingleCollectionTraditionalResults(\n results,\n collectionName,\n config\n );\n } catch (error) {\n return {\n collection: collectionName,\n displayName: config?.displayName || collectionName,\n error: error instanceof Error ? error.message : \"Unknown error\",\n found: 0,\n hits: [],\n icon: \"📄\",\n };\n }\n};\n","import type { Client } from \"typesense\";\nimport type { TableConfig } from \"@nexo-labs/payload-indexer\";\nimport { searchTraditionalCollection } from \"../../../traditional/search-collection.js\";\nimport type { UniversalSearchOptions, CombinedSearchResult } from \"../../../types.js\";\nimport { combineTraditionalResults } from \"../../../results/process-traditional-results.js\";\nimport { searchCache } from \"../../../../../shared/cache/cache.js\";\nimport { logger } from \"../../../../../core/logging/logger.js\";\n\nexport const performTraditionalMultiCollectionSearch = async (\n typesenseClient: Client,\n enabledCollections: Array<[string, TableConfig]>,\n query: string,\n options: UniversalSearchOptions\n): Promise<CombinedSearchResult> => {\n logger.info('Performing traditional multi-collection search', {\n query,\n collections: enabledCollections.map(([name]) => name),\n });\n\n // Determine search fields (override if query_by is provided)\n const searchFieldsOverride = options.query_by\n ? options.query_by.split(',').map(f => f.trim())\n : undefined;\n\n const searchPromises = enabledCollections.map(\n async ([collectionName, config]) => {\n try {\n const result = await searchTraditionalCollection(\n typesenseClient,\n collectionName,\n config,\n {\n query,\n page: options.page,\n per_page: options.per_page,\n ...(searchFieldsOverride\n ? { searchFields: searchFieldsOverride }\n : (() => {\n // Extract default search fields from config\n if (!config) return {};\n let fields: { name: string; index?: boolean; type?: string }[] = [];\n fields = config.fields;\n // Filter for indexed fields that are searchable (string or string[] types only)\n // Typesense only accepts string/string[] fields in query_by parameter\n const searchFields = fields\n .filter(f =>\n f.index !== false &&\n (f.type === 'string' || f.type === 'string[]')\n )\n .map(f => f.name);\n return searchFields.length > 0 ? { searchFields } : {};\n })()\n ),\n ...(options.sort_by && { sort_by: options.sort_by }),\n ...(options.exclude_fields && { exclude_fields: options.exclude_fields }),\n }\n );\n return result;\n } catch (error) {\n logger.error('Error searching collection', error as Error, {\n collection: collectionName,\n query,\n });\n throw error;\n }\n }\n );\n\n const results = await Promise.all(searchPromises);\n const fallbackResult = combineTraditionalResults(results, {\n page: options.page,\n per_page: options.per_page,\n query,\n });\n\n searchCache.set(query, fallbackResult, \"universal\", options);\n return fallbackResult;\n};\n","import type { TableConfig } from \"@nexo-labs/payload-indexer\";\nimport {\n DEFAULT_PAGE,\n DEFAULT_PER_PAGE,\n} from \"../constants.js\";\nimport type {\n ProcessVectorSearchResultsOptions,\n SearchHit,\n CombinedSearchResult,\n} from \"../types.js\";\n\n/**\n * Typesense search result from a single collection\n */\ninterface TypesenseCollectionResult {\n found?: number;\n error?: string;\n hits?: Array<{\n document?: Record<string, unknown>;\n vector_distance?: number;\n text_match?: number;\n [key: string]: unknown;\n }>;\n}\n\n/**\n * Typesense multi-search response\n */\ninterface TypesenseMultiSearchResponse {\n results?: TypesenseCollectionResult[];\n}\n\n/**\n * Internal result type with collection metadata\n */\ninterface CollectionResult {\n collection: string;\n displayName: string;\n icon: string;\n found: number;\n error: string | undefined;\n hits: SearchHit[];\n}\n\n/**\n * Processes and combines vector search results from multiple collections\n */\nexport const processVectorSearchResults = (\n multiSearchResults: TypesenseMultiSearchResponse,\n enabledCollections: Array<[string, TableConfig]>,\n options: ProcessVectorSearchResultsOptions\n): CombinedSearchResult => {\n const {\n per_page = DEFAULT_PER_PAGE,\n page = DEFAULT_PAGE,\n k,\n query,\n vector,\n } = options;\n\n const rawResults = multiSearchResults.results?.map((result: TypesenseCollectionResult, index: number): CollectionResult | null => {\n if (!enabledCollections[index]) {\n return null;\n }\n const [collectionName, config] = enabledCollections[index];\n\n return {\n collection: collectionName,\n displayName: config?.displayName || collectionName,\n icon: \"📄\",\n found: result.found || 0,\n error: result.error || undefined,\n hits:\n result.hits?.map((hit): SearchHit => {\n const doc = hit.document || {};\n const hint = doc.chunk_text\n ? String(doc.chunk_text).substring(0, 300) + '...'\n : doc.description\n ? String(doc.description).substring(0, 300) + '...'\n : doc.hint;\n\n return {\n ...hit,\n collection: collectionName,\n displayName: config?.displayName || collectionName,\n icon: \"📄\",\n document: {\n ...doc,\n hint,\n // Keep chunk_text as a separate field for chunks\n ...(doc.chunk_text ? { chunk_text: doc.chunk_text } : {}),\n },\n vector_distance: hit.vector_distance,\n text_match: hit.text_match,\n };\n }) || [],\n };\n }) || [];\n\n const results: CollectionResult[] = rawResults.filter((r: CollectionResult | null): r is CollectionResult => r !== null);\n\n // Combine results\n const combinedHits = results.flatMap((result) => result.hits);\n const totalFound = results.reduce(\n (sum, result) => sum + result.found,\n 0\n );\n\n // Sort by vector distance (if available) or relevance\n combinedHits.sort((a, b) => {\n const aDistance = a.vector_distance ?? Infinity;\n const bDistance = b.vector_distance ?? Infinity;\n return aDistance - bDistance;\n });\n\n const searchResult: CombinedSearchResult = {\n collections: results.map((r: CollectionResult) => ({\n collection: r.collection,\n displayName: r.displayName,\n error: r.error,\n found: r.found || 0,\n icon: r.icon,\n })),\n found: totalFound,\n hits: combinedHits.slice(0, per_page),\n page,\n request_params: {\n k: k,\n per_page,\n query: query || null,\n vector: vector ? \"provided\" : null,\n },\n search_cutoff: false,\n search_time_ms: 0,\n };\n\n return searchResult;\n};\n","import {\n DEFAULT_ALPHA,\n DEFAULT_K,\n DEFAULT_PAGE,\n DEFAULT_PER_PAGE,\n DEFAULT_SEARCH_FIELDS,\n} from \"../constants.js\";\nimport type { BuildVectorSearchParamsOptions } from \"../types.js\";\n\n/**\n * Builds vector search parameters for a single collection\n */\nexport const buildVectorSearchParams = (\n searchVector: number[],\n options: BuildVectorSearchParamsOptions\n): Record<string, unknown> => {\n const {\n query,\n k = DEFAULT_K,\n hybrid = false,\n alpha = DEFAULT_ALPHA,\n page = DEFAULT_PAGE,\n per_page = DEFAULT_PER_PAGE,\n filter_by,\n sort_by,\n searchFields,\n } = options;\n\n const searchParams: Record<string, unknown> = {\n q: \"*\", // Required by Typesense, use wildcard for pure vector search\n vector_query: `embedding:([${searchVector.join(\",\")}], k:${k})`,\n per_page,\n page,\n exclude_fields: 'embedding',\n };\n\n // Add keyword search if hybrid mode\n if (hybrid && query) {\n searchParams.q = query;\n searchParams.query_by = searchFields?.join(\",\") || DEFAULT_SEARCH_FIELDS.join(\",\");\n searchParams.vector_query = `embedding:([${searchVector.join(\",\")}], k:${k}, alpha:${alpha})`;\n }\n\n // Add filters if provided\n if (filter_by) {\n searchParams.filter_by = filter_by;\n }\n\n // Add sorting if provided\n if (sort_by) {\n searchParams.sort_by = sort_by;\n }\n\n return searchParams;\n};\n\n\n\n","import type { TableConfig } from \"@nexo-labs/payload-indexer\";\nimport type { BuildMultiCollectionVectorSearchParamsOptions } from \"../types.js\";\nimport { buildVectorSearchParams } from \"./build-params.js\";\n\n/**\n * Builds multi-collection vector search parameters\n */\nexport const buildMultiCollectionVectorSearchParams = (\n searchVector: number[],\n enabledCollections: Array<[string, TableConfig]>,\n options: BuildMultiCollectionVectorSearchParamsOptions\n): Array<Record<string, unknown>> => {\n const {\n query,\n k,\n hybrid,\n alpha,\n page,\n per_page,\n filter_by,\n sort_by,\n } = options;\n\n return enabledCollections.map(([collectionName, config]) => {\n // Extract search fields\n let searchFields: string[] | undefined;\n if (config) {\n let fields: { name: string; index?: boolean; type?: string }[] = [];\n fields = config.fields;\n // Filter for indexed fields that are searchable (string or string[] types only)\n // Typesense only accepts string/string[] fields in query_by parameter\n const extracted = fields\n .filter(f =>\n f.index !== false &&\n (f.type === 'string' || f.type === 'string[]')\n )\n .map(f => f.name);\n if (extracted.length > 0) {\n searchFields = extracted;\n }\n }\n\n // Build search params - don't add filter_by here\n // The filter will be added conditionally in the handler after schema check\n const collectionSearchParams = buildVectorSearchParams(searchVector, {\n ...(query !== undefined && { query }),\n ...(k !== undefined && { k }),\n ...(hybrid !== undefined && { hybrid }),\n ...(alpha !== undefined && { alpha }),\n ...(page !== undefined && { page }),\n ...(per_page !== undefined && { per_page }),\n // Don't add filter_by here - will be handled in handler after schema check\n ...(sort_by !== undefined && { sort_by }),\n ...(searchFields !== undefined && {\n searchFields: searchFields,\n }),\n });\n\n // Store filter_by separately - handler will add it conditionally\n return {\n collection: collectionName,\n ...collectionSearchParams,\n _filter_by: filter_by, // Internal flag for handler to check schema and add filter\n };\n });\n};\n\n\n\n","import type { EmbeddingProviderConfig } from \"../../../shared/types/plugin-types.js\";\nimport { generateEmbedding } from \"../../embedding/embeddings.js\";\n\n/**\n * Generates or retrieves a search vector from query text or provided vector\n */\nexport const generateOrGetVector = async (\n query?: string,\n vector?: number[],\n embeddingConfig?: EmbeddingProviderConfig\n): Promise<number[] | null> => {\n // Use provided vector if available\n if (vector && Array.isArray(vector) && vector.length > 0) {\n return vector;\n }\n\n // Generate embedding from query if vector not provided\n if (query) {\n const searchVector = await generateEmbedding(query, embeddingConfig);\n if (!searchVector || searchVector.length === 0) {\n return null;\n }\n return searchVector;\n }\n\n return null;\n};\n\n\n\n","import type { Client } from \"typesense\";\nimport type { ModularPluginConfig } from \"../../../core/config/types.js\";\nimport { logger } from \"../../../core/logging/logger.js\";\nimport { searchCache } from \"../../../shared/cache/cache.js\";\nimport type { TableConfig } from \"@nexo-labs/payload-indexer\";\nimport { DEFAULT_ALPHA, DEFAULT_K } from \"../constants.js\";\nimport { performTraditionalMultiCollectionSearch } from \"../endpoints/handlers/executors/traditional-multi-collection-search.js\";\nimport { processVectorSearchResults } from \"../results/process-vector-results.js\";\nimport type { CombinedSearchResult, UniversalSearchOptions } from \"../types.js\";\nimport { buildMultiCollectionVectorSearchParams } from \"../vector/build-multi-collection-params.js\";\nimport { generateOrGetVector } from \"../vector/generate-vector.js\";\n\nexport class SearchService {\n constructor(\n private typesenseClient: Client,\n private pluginOptions: ModularPluginConfig\n ) {}\n\n async performSearch(\n query: string,\n targetCollections: Array<[string, TableConfig]>,\n options: UniversalSearchOptions\n ): Promise<CombinedSearchResult> {\n // Cache key generation\n const cacheKey = `search:${query}:${JSON.stringify(options)}:${targetCollections.map(c => c[0]).join(',')}`;\n const cachedResult = searchCache.get(query, cacheKey, options) as CombinedSearchResult | null;\n if (cachedResult) return cachedResult;\n\n const searchMode = options.mode || \"semantic\";\n\n // 1. Simple / Traditional Search\n if (searchMode === \"simple\") {\n return this.performTraditionalSearch(query, targetCollections, options);\n }\n\n // 2. Semantic / Hybrid Search\n const searchVector = await generateOrGetVector(\n query,\n undefined,\n this.pluginOptions.features.embedding\n );\n\n if (!searchVector) {\n // Fallback to traditional if vector generation fails\n return this.performTraditionalSearch(query, targetCollections, options);\n }\n\n try {\n // Execute Vector Search\n const results = await this.executeVectorSearch(query, searchVector, targetCollections, options);\n searchCache.set(query, results, cacheKey, options);\n return results;\n } catch (error) {\n logger.error(\"Vector search failed, falling back to traditional\", error as Error);\n return this.performTraditionalSearch(query, targetCollections, options);\n }\n }\n\n private async performTraditionalSearch(\n query: string,\n targetCollections: Array<[string, TableConfig]>,\n options: UniversalSearchOptions\n ): Promise<CombinedSearchResult> {\n return performTraditionalMultiCollectionSearch(\n this.typesenseClient,\n targetCollections,\n query,\n options\n );\n }\n\n private async executeVectorSearch(\n query: string,\n searchVector: number[],\n targetCollections: Array<[string, TableConfig]>,\n options: UniversalSearchOptions\n ): Promise<CombinedSearchResult> {\n const searches = buildMultiCollectionVectorSearchParams(\n searchVector,\n targetCollections,\n {\n query,\n k: Math.min(30, DEFAULT_K),\n hybrid: true,\n alpha: DEFAULT_ALPHA,\n page: options.page,\n per_page: options.per_page,\n ...(options.sort_by !== undefined && { sort_by: options.sort_by }),\n }\n );\n\n if (searches.length === 0) {\n return {\n collections: [],\n found: 0,\n hits: [],\n page: options.page,\n request_params: {\n per_page: options.per_page,\n query: query,\n },\n search_cutoff: false,\n search_time_ms: 0\n };\n }\n\n const multiSearchResults = await this.typesenseClient.multiSearch.perform({ searches });\n \n return processVectorSearchResults(\n multiSearchResults,\n targetCollections,\n {\n per_page: options.per_page,\n page: options.page,\n k: DEFAULT_K,\n query,\n }\n );\n }\n}\n","import type { CombinedSearchResult, SearchHit } from \"../../../types.js\";\n\n/**\n * Helper to resolve document type from collection name\n */\nfunction resolveDocumentType(collectionName: string): string {\n if (collectionName.includes('article')) return 'article';\n if (collectionName.includes('book')) return 'book';\n return 'document';\n}\n\n/**\n * Simplified document format for API responses\n */\ntype SimplifiedDocument = {\n id: string;\n title: string;\n slug: string;\n type: string;\n collection: string;\n};\n\n/**\n * Transform search response to simplified format\n */\nexport function transformToSimpleFormat(data: CombinedSearchResult): { documents: SimplifiedDocument[] } {\n if (!data.hits) {\n return { documents: [] };\n }\n\n const documents = data.hits.map((hit: SearchHit) => {\n const doc = hit.document || {};\n const collectionValue = hit.collection || doc.collection;\n const collection = typeof collectionValue === 'string' ? collectionValue : '';\n\n return {\n id: String(doc.id || ''),\n title: String(doc.title || 'Sin título'),\n slug: String(doc.slug || ''),\n type: resolveDocumentType(collection),\n collection: collection,\n };\n });\n\n return { documents };\n}\n","import type { TableConfig } from \"@nexo-labs/payload-indexer\";\n\n/**\n * Generates the Typesense collection name based on the configuration.\n *\n * Priority:\n * 1. Explicit `tableName` if provided.\n * 2. `collectionSlug` (fallback).\n *\n * @param collectionSlug The slug of the Payload collection\n * @param tableConfig The configuration for the specific table\n * @returns The generated Typesense collection name\n */\nexport const getTypesenseCollectionName = (\n collectionSlug: string,\n tableConfig: TableConfig\n): string => {\n return tableConfig.tableName ?? collectionSlug;\n};\n","import type { ModularPluginConfig } from \"../../../../../core/config/types.js\";\nimport { getTypesenseCollectionName } from \"../../../../../core/utils/naming.js\";\n\nexport class TargetCollectionResolver {\n private allowedTableNames: string[];\n\n constructor(private pluginOptions: ModularPluginConfig) {\n this.allowedTableNames = this.getAllowedTableNames(pluginOptions);\n }\n\n private getAllowedTableNames(\n pluginOptions: ModularPluginConfig\n ): string[] {\n const configuredAllowed = pluginOptions.features.search?.defaults?.tables || [];\n const allowedTableNames: Set<string> = new Set();\n const allTableNames: Set<string> = new Set();\n \n for (const [collectionSlug, tableConfigs] of Object.entries(\n pluginOptions.collections || {}\n )) {\n if (Array.isArray(tableConfigs)) {\n for (const tableConfig of tableConfigs) {\n if (!tableConfig.enabled) continue;\n \n const tableName = getTypesenseCollectionName(collectionSlug, tableConfig);\n allTableNames.add(tableName);\n \n // If no restrictions are configured, everything is allowed\n if (configuredAllowed.length === 0) {\n allowedTableNames.add(tableName);\n continue;\n }\n \n // STRICT MODE: Only allow if the exact table name is in the allowed list.\n // Do NOT allow by collection slug.\n if (configuredAllowed.includes(tableName)) {\n allowedTableNames.add(tableName);\n }\n }\n }\n }\n \n return Array.from(allowedTableNames);\n };\n \n \n /**\n * Resolves target table names based on request parameters.\n * Handles both multi-collection (array) and single-collection (slug) requests.\n * Enforces strict validation against allowed tables.\n */\n resolveTargetTables(\n collectionNameSlug: string | null,\n requestedCollections: string[] | undefined\n ): string[] {\n // Case 1: Multi-collection search (no path param)\n if (!collectionNameSlug) {\n if (requestedCollections && requestedCollections.length > 0) {\n // Strict filtering: Only keep requested tables that are explicitly allowed\n return requestedCollections.filter((c) =>\n this.allowedTableNames.includes(c)\n );\n }\n // Default: Return all allowed tables\n return this.allowedTableNames;\n }\n\n const targetTables: string[] = [];\n const tableConfigs =\n this.pluginOptions.collections?.[collectionNameSlug] || [];\n\n if (Array.isArray(tableConfigs)) {\n for (const config of tableConfigs) {\n if (config.enabled) {\n const tableName = getTypesenseCollectionName(\n collectionNameSlug,\n config\n );\n if (this.allowedTableNames.includes(tableName)) {\n targetTables.push(tableName);\n }\n }\n }\n }\n\n return targetTables;\n }\n}\n","import type { ModularPluginConfig } from \"../../../../../core/config/types.js\";\nimport type { TableConfig } from \"@nexo-labs/payload-indexer\";\nimport { getTypesenseCollectionName } from \"../../../../../core/utils/naming.js\";\n\nexport class SearchConfigMapper {\n constructor(private pluginOptions: ModularPluginConfig) {}\n\n /**\n * Maps a list of table names to their full configuration objects.\n * Essential for the search service which needs config details (fields, weights, etc.)\n */\n mapTablesToConfigs(\n targetTableNames: string[]\n ): Array<[string, TableConfig]> {\n const searchConfigs: Array<[string, TableConfig]> = [];\n\n // Iterate through all collections in global config\n for (const [slug, configs] of Object.entries(\n this.pluginOptions.collections || {}\n )) {\n if (!Array.isArray(configs)) continue;\n\n for (const config of configs) {\n if (!config.enabled) continue;\n\n const tableName = getTypesenseCollectionName(slug, config);\n \n // If this table is in our target list, add it to the result\n if (targetTableNames.includes(tableName)) {\n searchConfigs.push([tableName, config]);\n }\n }\n }\n\n return searchConfigs;\n }\n}\n","/**\n * Configuration validation using Zod schemas\n */\n\nimport { z } from 'zod'\n\n// Validation result type\nexport interface ValidationResult {\n data?: ValidatedSearchParams\n errors?: string[]\n success: boolean\n}\n\n/**\n * Get configuration validation errors in a user-friendly format\n */\nexport function getValidationErrors(errors: string[]): string {\n return errors.map((error, index) => `${index + 1}. ${error}`).join('\\n')\n}\n\n/**\n * Validate search parameters\n */\nconst SearchParamsSchema = z.object({\n facets: z.array(z.string()).optional(),\n filters: z.record(z.string(), z.any()).optional(),\n highlight_fields: z.array(z.string()).optional(),\n num_typos: z.number().int().min(0).max(4).optional().default(0),\n page: z.number().int().min(1).optional().default(1),\n per_page: z.number().int().min(1).max(250).optional().default(10),\n q: z.string().min(1, 'Query parameter \"q\" is required'),\n snippet_threshold: z.number().int().min(0).max(100).optional().default(30),\n sort_by: z.string().optional(),\n typo_tokens_threshold: z.number().int().min(1).optional().default(1)\n})\n\nexport type ValidatedSearchParams = z.infer<typeof SearchParamsSchema>\n\n/**\n * Validate search parameters\n */\nexport function validateSearchParams(params: unknown): ValidationResult {\n try {\n const validatedParams = SearchParamsSchema.parse(params)\n return {\n data: validatedParams,\n success: true\n }\n } catch (error) {\n if (error instanceof z.ZodError) {\n const errors = error.issues.map((err) => {\n const path = err.path.length > 0 ? `${err.path.join('.')}: ` : ''\n return `${path}${err.message}`\n })\n \n return {\n errors,\n success: false\n }\n }\n \n return {\n errors: ['Invalid search parameters format'],\n success: false\n }\n }\n}\n","import { PayloadRequest } from \"payload\";\n\n/**\n * Extracts collection name from request URL or params\n */\nexport const extractCollectionName = (\n request: PayloadRequest\n): { collectionName: string; collectionNameStr: string } => {\n let collectionName: string;\n let collectionNameStr: string;\n\n if (request.url && typeof request.url === \"string\") {\n const url = new URL(request.url);\n const pathParts = url.pathname.split(\"/\");\n const searchIndex = pathParts.indexOf(\"search\");\n if (searchIndex !== -1 && pathParts[searchIndex + 1]) {\n collectionName = pathParts[searchIndex + 1] || \"\";\n collectionNameStr = String(collectionName);\n } else {\n collectionName = \"\";\n collectionNameStr = \"\";\n }\n } else {\n // Fallback to params extraction\n const params = request.routeParams;\n const paramCollectionName = params?.collectionName;\n collectionName = String(paramCollectionName || \"\");\n collectionNameStr = collectionName;\n }\n\n return { collectionName, collectionNameStr };\n};\n\n\n\n","/**\n * Extracts and validates search parameters from request query\n */\nexport const extractSearchParams = (\n query: Record<string, unknown>\n): {\n q: string;\n page: number;\n per_page: number;\n sort_by?: string;\n mode?: 'simple' | 'semantic';\n collections?: string[];\n exclude_fields?: string;\n query_by?: string;\n simple?: boolean;\n errors?: string[];\n} => {\n const q = String(query?.q || \"\");\n const pageParam = query?.page;\n const perPageParam = query?.per_page;\n const page = pageParam ? parseInt(String(pageParam), 10) : 1;\n const per_page = perPageParam ? parseInt(String(perPageParam), 10) : 10;\n const sort_by = query?.sort_by as string | undefined;\n const mode = query?.mode as 'simple' | 'semantic' | undefined;\n\n // New parameters for collection filtering and simplified response\n const collectionParam = query?.collection;\n const collections: string[] | undefined = collectionParam\n ? Array.isArray(collectionParam)\n ? collectionParam.map(c => String(c))\n : [String(collectionParam)]\n : undefined;\n\n const exclude_fields = query?.exclude_fields as string | undefined;\n const query_by = query?.query_by as string | undefined;\n const simpleParam = query?.simple;\n const simple = simpleParam === 'true' || simpleParam === true || simpleParam === '1';\n\n const errors: string[] = [];\n\n // Validate parsed numbers\n if (isNaN(page) || page < 1) {\n errors.push(\"Invalid page parameter\");\n }\n if (isNaN(per_page) || per_page < 1 || per_page > 250) {\n errors.push(\"Invalid per_page parameter\");\n }\n\n const result: {\n q: string;\n page: number;\n per_page: number;\n sort_by?: string;\n mode?: 'simple' | 'semantic';\n collections?: string[];\n exclude_fields?: string;\n query_by?: string;\n simple?: boolean;\n errors?: string[];\n } = {\n q,\n page,\n per_page,\n };\n\n if (sort_by) {\n result.sort_by = sort_by;\n }\n\n if (mode) {\n result.mode = mode;\n }\n\n if (collections && collections.length > 0) {\n result.collections = collections;\n }\n\n if (exclude_fields) {\n result.exclude_fields = exclude_fields;\n }\n\n if (query_by) {\n result.query_by = query_by;\n }\n\n if (simple) {\n result.simple = simple;\n }\n\n if (errors.length > 0) {\n result.errors = errors;\n }\n\n return result;\n};\n","import { PayloadRequest } from \"payload\";\nimport {\n getValidationErrors,\n validateSearchParams,\n} from \"../../../../../core/config/config-validation.js\";\nimport { extractCollectionName } from \"../../../utils/extract-collection-name.js\";\nimport { extractSearchParams } from \"../../../utils/extract-search-params.js\";\n\n/**\n * Result type for request validation\n */\nexport type ValidationResult =\n | { success: false; error: Response }\n | {\n success: true;\n collectionName: string | null;\n collectionNameStr: string;\n searchParams: ReturnType<typeof extractSearchParams>;\n };\n\n/**\n * Validates search request and returns parsed parameters\n */\nexport function validateSearchRequest(request: PayloadRequest): ValidationResult {\n const { query } = request;\n const { collectionName, collectionNameStr } = extractCollectionName(request);\n const searchParams = extractSearchParams(query as Record<string, unknown>);\n\n // Check for parsing errors\n if (searchParams.errors && searchParams.errors.length > 0) {\n return {\n success: false,\n error: Response.json({ error: searchParams.errors[0] }, { status: 400 }),\n };\n }\n\n // Validate search parameters\n const validation = validateSearchParams({\n page: searchParams.page,\n per_page: searchParams.per_page,\n q: searchParams.q,\n sort_by: searchParams.sort_by,\n });\n\n if (!validation.success) {\n return {\n success: false,\n error: Response.json(\n {\n details: getValidationErrors(validation.errors || []),\n error: \"Invalid search parameters\",\n },\n { status: 400 }\n ),\n };\n }\n\n return { success: true, collectionName, collectionNameStr, searchParams };\n}\n","import type { PayloadHandler, PayloadRequest } from \"payload\";\nimport type { Client } from \"typesense\";\nimport type { ModularPluginConfig } from \"../../../../core/config/types.js\";\nimport { SearchService } from \"../../services/search-service.js\";\nimport {\n SearchConfigMapper,\n TargetCollectionResolver,\n transformToSimpleFormat,\n} from \"./utils/index.js\";\nimport { validateSearchRequest } from \"./validators/index.js\";\n\n/**\n * Creates a handler for standard search requests\n */\nexport const createSearchHandler = (\n typesenseClient: Client,\n pluginOptions: ModularPluginConfig\n): PayloadHandler => {\n const searchService = new SearchService(typesenseClient, pluginOptions);\n const targetResolver = new TargetCollectionResolver(pluginOptions);\n const configMapper = new SearchConfigMapper(pluginOptions);\n\n return async (request: PayloadRequest) => {\n try {\n // 1. Validate Request\n const validated = validateSearchRequest(request);\n if (!validated.success) return validated.error;\n\n const { collectionName, searchParams } = validated;\n\n // 2. Resolve Target Tables (Atomized Logic)\n const targetCollections = targetResolver.resolveTargetTables(\n collectionName, // Pass null if multi-search, or slug if single\n searchParams.collections\n );\n\n // Validation: Check if we have valid targets\n if (targetCollections.length === 0) {\n const isMultiSearch = !collectionName;\n const hasExplicitRequest = isMultiSearch && searchParams.collections && searchParams.collections.length > 0;\n \n if (hasExplicitRequest) {\n return Response.json({ error: \"None of the requested collections are allowed\" }, { status: 403 });\n }\n return Response.json({ error: \"Collection not allowed or not enabled\" }, { status: 403 });\n }\n\n if (!searchParams.q || searchParams.q.trim() === \"\") {\n return Response.json({ error: 'Query parameter \"q\" is required' }, { status: 400 });\n }\n\n // 3. Prepare Search Configuration (Atomized Logic)\n const searchConfigs = configMapper.mapTablesToConfigs(targetCollections);\n\n // 4. Execute Search via Service\n const searchResult = await searchService.performSearch(\n searchParams.q,\n searchConfigs,\n {\n filters: {},\n page: searchParams.page,\n per_page: searchParams.per_page,\n sort_by: searchParams.sort_by,\n mode: searchParams.mode,\n exclude_fields: searchParams.exclude_fields,\n query_by: searchParams.query_by,\n }\n );\n\n // 5. Format Response\n if (searchParams.simple) {\n return Response.json(transformToSimpleFormat(searchResult));\n }\n\n return Response.json(searchResult);\n\n } catch (error) {\n return Response.json(\n {\n details: error instanceof Error ? error.message : \"Unknown error\",\n error: \"Search handler failed\",\n },\n { status: 500 }\n );\n }\n };\n};\n","import type { Client } from \"typesense\";\n\nimport type { ModularPluginConfig } from \"../../core/config/types.js\";\nimport {\n createCollectionsHandler,\n createSearchHandler,\n} from \"./endpoints/handlers/index.js\";\n\n\nexport const createSearchEndpoints = (\n typesenseClient: Client,\n pluginOptions: ModularPluginConfig\n) => {\n return [\n {\n handler: createCollectionsHandler(pluginOptions),\n method: \"get\" as const,\n path: \"/search/collections\",\n },\n {\n handler: createSearchHandler(typesenseClient, pluginOptions),\n method: \"get\" as const,\n path: \"/search/:collectionName\",\n },\n {\n handler: createSearchHandler(typesenseClient, pluginOptions),\n method: \"get\" as const,\n path: \"/search\",\n },\n ];\n};\n","/**\n * Constants for payload-typesense plugin\n * Centralizes all magic numbers and configuration defaults\n */\n\n// ============================================================================\n// EMBEDDING CONSTANTS\n// ============================================================================\n\n/**\n * Default dimensions for OpenAI text-embedding-3-large model\n */\nexport const DEFAULT_EMBEDDING_DIMENSIONS = 3072;\n\n// ============================================================================\n// SEARCH CONSTANTS\n// ============================================================================\n\n/**\n * Default alpha value for hybrid search (0 = pure semantic, 1 = pure keyword)\n */\nexport const DEFAULT_HYBRID_SEARCH_ALPHA = 0.5;\n\n/**\n * Default number of search results to return\n */\nexport const DEFAULT_SEARCH_LIMIT = 10;\n\n// ============================================================================\n// CACHE CONSTANTS\n// ============================================================================\n\n/**\n * Default TTL for cache entries (in milliseconds) - 5 minutes\n */\nexport const DEFAULT_CACHE_TTL_MS = 5 * 60 * 1000;\n\n// ============================================================================\n// RAG CONSTANTS\n// ============================================================================\n\n/**\n * Default maximum tokens for RAG responses\n */\nexport const DEFAULT_RAG_MAX_TOKENS = 1000;\n\n/**\n * Default number of search results to use for RAG context\n */\nexport const DEFAULT_RAG_CONTEXT_LIMIT = 5;\n\n/**\n * Default session TTL (in seconds) - 30 minutes\n */\nexport const DEFAULT_SESSION_TTL_SEC = 30 * 60;\n\n/**\n * Default OpenAI model for RAG chat\n */\nexport const DEFAULT_RAG_LLM_MODEL = 'gpt-4o-mini';\n","import type { CollectionCreateSchema } from \"typesense/lib/Typesense/Collections.js\";\nimport { DEFAULT_EMBEDDING_DIMENSIONS } from \"../../core/config/constants.js\";\nimport type { TableConfig, FieldMapping } from \"@nexo-labs/payload-indexer\";\n\n/**\n * Field schema definitions for Typesense collections\n */\n\n/**\n * Type for Typesense collection field schema\n * Extracted from CollectionCreateSchema to ensure type compatibility\n */\ntype TypesenseFieldSchema = NonNullable<\n CollectionCreateSchema[\"fields\"]\n>[number];\n\n/**\n * Base fields that every collection should have\n */\nconst getBaseFields = () => [\n { name: \"id\", type: \"string\" as const },\n { name: \"slug\", type: \"string\" as const },\n { name: \"createdAt\", type: \"int64\" as const },\n { name: \"updatedAt\", type: \"int64\" as const },\n];\n\n/**\n * Creates embedding field definition\n * @param optional - Whether the embedding field is optional\n * @param dimensions - Number of dimensions for the embedding vector (default: 1536)\n */\nconst getEmbeddingField = (\n optional: boolean = true,\n dimensions: number = DEFAULT_EMBEDDING_DIMENSIONS\n) => ({\n name: \"embedding\",\n type: \"float[]\" as const,\n num_dim: dimensions,\n ...(optional && { optional: true }),\n});\n\n/**\n * Maps FieldMapping to TypesenseFieldSchema\n */\nconst mapFieldMappingsToSchema = (fields: FieldMapping[]): TypesenseFieldSchema[] => {\n return fields.map(field => ({\n name: field.name,\n type: field.type === 'auto' ? 'string' : field.type,\n facet: field.facet,\n index: field.index,\n optional: field.optional\n }));\n};\n\n/**\n * Gets chunk-specific fields for chunk collections\n */\nconst getChunkFields = () => [\n { name: \"parent_doc_id\", type: \"string\" as const, facet: true }, // Required for chunks\n { name: \"chunk_index\", type: \"int32\" as const },\n { name: \"chunk_text\", type: \"string\" as const }, // The chunk content\n { name: \"is_chunk\", type: \"bool\" as const }, // Always true for chunks\n { name: \"headers\", type: \"string[]\" as const, facet: true, optional: true }, // Hierarchical header metadata\n];\n\n/**\n * Creates a complete schema for a chunk collection\n */\nexport const getChunkCollectionSchema = (\n collectionSlug: string,\n tableConfig: TableConfig,\n embeddingDimensions: number = DEFAULT_EMBEDDING_DIMENSIONS\n) => {\n const fields = tableConfig.fields ? mapFieldMappingsToSchema(tableConfig.fields) : [];\n \n // Get user-defined field names to avoid duplicates\n const userFieldNames = new Set([\n ...fields.map(f => f.name),\n ...getChunkFields().map(f => f.name)\n ]);\n \n // Filter base fields to exclude any that are already defined by user or chunk fields\n const baseFields = getBaseFields().filter(f => !userFieldNames.has(f.name));\n\n return {\n name: collectionSlug,\n fields: [\n ...baseFields,\n ...getChunkFields(),\n ...fields,\n getEmbeddingField(false, embeddingDimensions), // Embeddings are required for chunks\n ],\n };\n};\n\n/**\n * Creates a complete schema for a full document collection\n */\nexport const getFullDocumentCollectionSchema = (\n collectionSlug: string,\n tableConfig: TableConfig,\n embeddingDimensions: number = DEFAULT_EMBEDDING_DIMENSIONS\n) => {\n const mappedFields = mapFieldMappingsToSchema(tableConfig.fields);\n \n // Get user-defined field names to avoid duplicates\n const userFieldNames = new Set(mappedFields.map(f => f.name));\n \n // Filter base fields to exclude any that are already defined by user\n const baseFields = getBaseFields().filter(f => !userFieldNames.has(f.name));\n\n return {\n name: collectionSlug,\n fields: [\n ...baseFields,\n ...mappedFields,\n // Optional embedding for full documents\n getEmbeddingField(true, embeddingDimensions) \n ],\n };\n};\n","import type { Client } from \"typesense\";\nimport type { CollectionCreateSchema } from \"typesense/lib/Typesense/Collections.js\";\nimport { logger } from \"../../../core/logging/logger.js\";\nimport { getTypesenseCollectionName } from \"../../../core/utils/naming.js\";\nimport type { ModularPluginConfig } from \"../../../core/config/types.js\";\nimport type { TableConfig } from \"@nexo-labs/payload-indexer\";\nimport { \n getChunkCollectionSchema, \n getFullDocumentCollectionSchema,\n} from \"../../../shared/schema/collection-schemas.js\";\nimport { DEFAULT_EMBEDDING_DIMENSIONS } from \"../../../core/config/constants.js\";\n\nexport class SchemaManager {\n constructor(\n private client: Client,\n private config: ModularPluginConfig\n ) {}\n\n /**\n * Synchronizes all configured collections with Typesense\n */\n async syncCollections(): Promise<void> {\n if (!this.config.collections) return;\n\n logger.info('Starting schema synchronization...');\n\n const embeddingDimensions = this.getEmbeddingDimensions();\n\n for (const [collectionSlug, tableConfigs] of Object.entries(this.config.collections)) {\n if (!tableConfigs) continue;\n\n for (const tableConfig of tableConfigs) {\n if (!tableConfig.enabled) continue;\n\n await this.syncTable(collectionSlug, tableConfig, embeddingDimensions);\n }\n }\n\n logger.info('Schema synchronization completed.');\n }\n\n /**\n * Syncs a single table configuration\n */\n private async syncTable(\n collectionSlug: string,\n tableConfig: TableConfig,\n embeddingDimensions: number\n ): Promise<void> {\n const tableName = getTypesenseCollectionName(collectionSlug, tableConfig);\n\n // Generate target schema\n let targetSchema: CollectionCreateSchema;\n\n if (tableConfig.embedding?.chunking) {\n targetSchema = getChunkCollectionSchema(tableName, tableConfig, embeddingDimensions);\n } else {\n targetSchema = getFullDocumentCollectionSchema(tableName, tableConfig, embeddingDimensions);\n }\n\n try {\n // Check if collection exists\n const collection = await this.client.collections(tableName).retrieve();\n \n // Collection exists, check for updates (new fields)\n // Typesense only allows adding fields, not modifying/deleting (requires reindex)\n await this.updateCollectionSchema(tableName, collection, targetSchema);\n\n } catch (error: unknown) {\n const typesenseError = error as { httpStatus?: number };\n if (typesenseError?.httpStatus === 404) {\n // Collection doesn't exist, create it\n logger.info(`Creating collection: ${tableName}`);\n await this.client.collections().create(targetSchema);\n } else {\n logger.error(`Error checking collection ${tableName}`, error as Error);\n throw error;\n }\n }\n }\n\n private async updateCollectionSchema(\n tableName: string,\n currentSchema: any, // Typesense retrieval response\n targetSchema: CollectionCreateSchema\n ): Promise<void> {\n if (!currentSchema || !currentSchema.fields) return;\n \n const currentFields = new Set(currentSchema.fields.map((f: any) => f.name));\n // Filter out fields that already exist OR are 'id' (which is immutable)\n const newFields = targetSchema.fields?.filter(f => !currentFields.has(f.name) && f.name !== 'id') || [];\n\n if (newFields.length > 0) {\n logger.info(`Updating collection ${tableName} with ${newFields.length} new fields`, {\n fields: newFields.map(f => f.name)\n });\n\n try {\n // Update collection with new fields\n await this.client.collections(tableName).update({\n fields: newFields\n });\n } catch (error) {\n logger.error(`Failed to update collection ${tableName}`, error as Error);\n }\n }\n }\n\n private getEmbeddingDimensions(): number {\n const embeddingConfig = this.config.features.embedding;\n \n if (embeddingConfig?.dimensions) {\n }\n return DEFAULT_EMBEDDING_DIMENSIONS;\n }\n}\n","import type { Client } from \"typesense\";\nimport type { NodeConfiguration } from \"typesense/lib/Typesense/Configuration.js\";\nimport { logger } from \"../../../core/logging/logger.js\";\nimport type { AgentConfig } from \"../../../shared/types/plugin-types.js\";\nimport { ensureConversationCollection } from \"../setup.js\";\n\n/**\n * Configuration for AgentManager\n * Simple interface that only requires what it needs\n */\nexport interface AgentManagerConfig {\n agents: AgentConfig[];\n}\n\nexport class AgentManager {\n constructor(\n private client: Client,\n private config: AgentManagerConfig\n ) {}\n\n /**\n * Synchronizes all configured RAG agents with Typesense\n */\n async syncAgents(): Promise<void> {\n // Get agents from configuration\n const agents = this.config.agents || [];\n\n if (agents.length === 0) return;\n\n logger.info(`Starting synchronization of ${agents.length} RAG agents...`);\n\n // Ensure history collections exist for all agents\n const historyCollections = new Set(agents.map(a => a.historyCollection || 'conversation_history'));\n for (const collectionName of historyCollections) {\n await ensureConversationCollection(this.client, collectionName);\n }\n\n // Sync each agent model\n for (const agent of agents) {\n await this.syncAgentModel(agent);\n }\n\n logger.info('Agent synchronization completed.');\n }\n\n private async syncAgentModel(agent: AgentConfig): Promise<boolean> {\n try {\n\n const modelConfig = {\n id: agent.slug,\n model_name: agent.llmModel,\n system_prompt: agent.systemPrompt,\n api_key: agent.apiKey,\n history_collection: agent.historyCollection || 'conversation_history',\n max_bytes: agent.maxContextBytes || 65536,\n ttl: agent.ttl || 86400,\n k_results: agent.kResults || 5,\n };\n\n // Direct API call logic\n return await this.upsertConversationModel(modelConfig);\n\n } catch (error) {\n logger.error(`Failed to sync agent ${agent.slug}`, error as Error);\n return false;\n }\n }\n\n private async upsertConversationModel(modelConfig: any): Promise<boolean> {\n // Get configuration from client\n const configuration = this.client.configuration;\n\n if (!configuration || !configuration.nodes || configuration.nodes.length === 0) {\n logger.error('Invalid Typesense client configuration');\n return false;\n }\n\n const node = configuration.nodes[0] as NodeConfiguration;\n const typesenseApiKey = configuration.apiKey;\n const baseUrl = `${node.protocol}://${node.host}:${node.port}`;\n\n try {\n // Try to create\n const createResponse = await fetch(`${baseUrl}/conversations/models`, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'X-TYPESENSE-API-KEY': typesenseApiKey || '',\n },\n body: JSON.stringify(modelConfig),\n });\n\n if (createResponse.ok) {\n logger.info(`Agent model created: ${modelConfig.id}`);\n return true;\n }\n\n if (createResponse.status === 409) {\n // Update if exists\n logger.debug(`Agent model ${modelConfig.id} exists, updating...`);\n const updateResponse = await fetch(`${baseUrl}/conversations/models/${modelConfig.id}`, {\n method: 'PUT',\n headers: {\n 'Content-Type': 'application/json',\n 'X-TYPESENSE-API-KEY': typesenseApiKey || '',\n },\n body: JSON.stringify(modelConfig),\n });\n\n if (updateResponse.ok) {\n logger.info(`Agent model updated: ${modelConfig.id}`);\n return true;\n } else {\n const err = await updateResponse.text();\n logger.error(`Failed to update agent ${modelConfig.id}: ${err}`);\n return false;\n }\n }\n\n const err = await createResponse.text();\n logger.error(`Failed to create agent ${modelConfig.id}: ${err}`);\n return false;\n\n } catch (networkError) {\n logger.error('Network error syncing agent model', networkError as Error);\n return false;\n }\n }\n}\n","/**\n * Composable Typesense RAG plugin factory for Payload CMS\n *\n * This plugin handles all Typesense-specific functionality:\n * - Search endpoints\n * - RAG endpoints\n * - Schema synchronization\n * - Agent synchronization\n *\n * It's designed to be used together with createIndexerPlugin from @nexo-labs/payload-indexer.\n *\n * @example\n * ```typescript\n * import { createIndexerPlugin } from '@nexo-labs/payload-indexer'\n * import { createTypesenseAdapter, createTypesenseRAGPlugin } from '@nexo-labs/payload-typesense'\n *\n * // 1. Create adapter\n * const adapter = createTypesenseAdapter(typesenseConnection)\n *\n * // 2. Create indexer plugin (sync hooks + embedding)\n * const { plugin: indexerPlugin, embeddingService } = createIndexerPlugin({\n * adapter,\n * features: { embedding: embeddingConfig, sync: { enabled: true } },\n * collections,\n * })\n *\n * // 3. Create Typesense RAG plugin (search + RAG + schema)\n * const typesenseRAGPlugin = createTypesenseRAGPlugin({\n * typesense: typesenseConnection,\n * embeddingConfig,\n * collections,\n * search: { enabled: true, defaults: { mode: 'semantic', perPage: 10 } },\n * agents: [...],\n * callbacks: {...},\n * })\n *\n * // 4. Export both plugins\n * export const plugins = [indexerPlugin, typesenseRAGPlugin]\n * ```\n */\n\nimport type { Config } from \"payload\";\nimport type { TypesenseRAGPluginConfig } from \"./rag-types.js\";\nimport { Logger } from \"@nexo-labs/payload-indexer\";\nimport { createTypesenseClient } from \"../core/client/typesense-client.js\";\nimport { createRAGPayloadHandlers } from \"../features/rag/endpoints.js\";\nimport { createSearchEndpoints } from \"../features/search/endpoints.js\";\nimport { SchemaManager } from \"../features/sync/services/schema-manager.js\";\nimport { AgentManager } from \"../features/rag/services/agent-manager.js\";\n\n/**\n * Creates a composable Typesense RAG plugin for Payload CMS\n *\n * This plugin handles all Typesense-specific features:\n * - Search endpoints (semantic, hybrid, keyword)\n * - RAG endpoints (chat, session management)\n * - Schema synchronization\n * - Agent synchronization\n *\n * @param config - Typesense RAG plugin configuration\n * @returns Payload config modifier function\n */\nexport function createTypesenseRAGPlugin(config: TypesenseRAGPluginConfig) {\n const logger = new Logger({ enabled: true, prefix: \"[payload-typesense]\" });\n\n return (payloadConfig: Config): Config => {\n // Create Typesense client\n const typesenseClient = createTypesenseClient(config.typesense);\n\n // 1. Add search endpoints if enabled\n if (config.search?.enabled) {\n const searchEndpoints = createSearchEndpoints(typesenseClient, {\n typesense: config.typesense,\n features: {\n embedding: config.embeddingConfig,\n search: config.search,\n },\n collections: config.collections || {},\n });\n\n payloadConfig.endpoints = [\n ...(payloadConfig.endpoints || []),\n ...searchEndpoints,\n ];\n\n logger.debug(\"Search endpoints registered\", {\n endpointsCount: searchEndpoints.length,\n });\n }\n\n // 2. Add RAG endpoints if agents and callbacks are configured\n if (config.agents && config.agents.length > 0 && config.callbacks) {\n const ragEndpoints = createRAGPayloadHandlers({\n typesense: config.typesense,\n embeddingConfig: config.embeddingConfig,\n agents: config.agents,\n callbacks: config.callbacks,\n hybrid: config.hybrid,\n hnsw: config.hnsw,\n advanced: config.advanced,\n });\n\n payloadConfig.endpoints = [\n ...(payloadConfig.endpoints || []),\n ...ragEndpoints,\n ];\n\n logger.debug(\"RAG endpoints registered\", {\n endpointsCount: ragEndpoints.length,\n agentsCount: config.agents.length,\n });\n }\n\n // 3. Initialize on startup (schema sync + agent sync)\n const incomingOnInit = payloadConfig.onInit;\n payloadConfig.onInit = async (payload) => {\n if (incomingOnInit) {\n await incomingOnInit(payload);\n }\n\n try {\n // A. Sync Typesense collections schema\n if (config.collections && Object.keys(config.collections).length > 0) {\n logger.info(\"Syncing Typesense collections schema...\");\n const schemaManager = new SchemaManager(typesenseClient, {\n typesense: config.typesense,\n features: {\n embedding: config.embeddingConfig,\n },\n collections: config.collections,\n });\n await schemaManager.syncCollections();\n }\n\n // B. Sync RAG agents\n if (config.agents && config.agents.length > 0) {\n logger.info(\"Initializing RAG agents...\");\n const agentManager = new AgentManager(typesenseClient, {\n agents: config.agents,\n });\n await agentManager.syncAgents();\n }\n } catch (error) {\n // Fail soft: Log error but don't crash Payload startup\n logger.error(\"Error initializing Typesense resources\", error as Error);\n }\n };\n\n return payloadConfig;\n };\n}\n\n","import type { Client } from \"typesense\";\nimport type { CollectionFieldSchema } from \"typesense/lib/Typesense/Collection.js\";\nimport type { CollectionCreateSchema } from \"typesense/lib/Typesense/Collections.js\";\nimport type {\n IndexerAdapter,\n IndexDocument,\n VectorSearchOptions,\n AdapterSearchResult,\n} from \"@nexo-labs/payload-indexer\";\nimport { logger } from \"../core/logging/logger.js\";\nimport type {\n TypesenseFieldMapping,\n TypesenseCollectionSchema,\n TypesenseFieldSchema,\n TypesenseSearchResult,\n TypesenseCollectionInfo,\n} from \"./types.js\";\n\n/**\n * Typesense implementation of the IndexerAdapter interface\n *\n * This adapter provides type-safe field definitions for Typesense.\n * When used with createIndexerPlugin, TypeScript will validate that\n * all field mappings in your collection config are valid TypesenseFieldMapping.\n *\n * @example\n * ```typescript\n * const adapter = createTypesenseAdapter(config);\n *\n * // TypeScript infers TFieldMapping = TypesenseFieldMapping\n * const { plugin } = createIndexerPlugin({\n * adapter,\n * collections: {\n * posts: [{\n * enabled: true,\n * fields: [\n * { name: 'title', type: 'string' }, // ✅ Valid\n * { name: 'views', type: 'int64' }, // ✅ Valid\n * { name: 'tags', type: 'string[]', facet: true }, // ✅ With faceting\n * ]\n * }]\n * }\n * });\n * ```\n */\nexport class TypesenseAdapter implements IndexerAdapter<TypesenseFieldMapping, TypesenseCollectionSchema> {\n readonly name = 'typesense';\n\n constructor(private client: Client) {}\n\n /**\n * Test connection to Typesense\n */\n async testConnection(): Promise<boolean> {\n try {\n await this.client.health.retrieve();\n return true;\n } catch (error) {\n logger.error(\"Typesense connection test failed\", error);\n return false;\n }\n }\n\n /**\n * Create or update a collection schema\n */\n async ensureCollection(schema: TypesenseCollectionSchema): Promise<void> {\n const typesenseSchema = this.convertToTypesenseSchema(schema);\n\n try {\n // Check if collection exists\n const existing = await this.client.collections(schema.name).retrieve() as TypesenseCollectionInfo;\n\n // Collection exists, add new fields if any\n await this.updateCollectionIfNeeded(schema.name, existing, typesenseSchema);\n } catch (error: unknown) {\n const typesenseError = error as { httpStatus?: number };\n if (typesenseError?.httpStatus === 404) {\n // Collection doesn't exist, create it\n logger.info(`Creating collection: ${schema.name}`);\n await this.client.collections().create(typesenseSchema);\n } else {\n throw error;\n }\n }\n }\n\n /**\n * Check if a collection exists\n */\n async collectionExists(collectionName: string): Promise<boolean> {\n try {\n await this.client.collections(collectionName).retrieve();\n return true;\n } catch (error: unknown) {\n const typesenseError = error as { httpStatus?: number };\n if (typesenseError?.httpStatus === 404) {\n return false;\n }\n throw error;\n }\n }\n\n /**\n * Delete a collection\n */\n async deleteCollection(collectionName: string): Promise<void> {\n try {\n await this.client.collections(collectionName).delete();\n logger.info(`Deleted collection: ${collectionName}`);\n } catch (error: unknown) {\n const typesenseError = error as { httpStatus?: number };\n if (typesenseError?.httpStatus !== 404) {\n throw error;\n }\n }\n }\n\n /**\n * Upsert a single document\n */\n async upsertDocument(collectionName: string, document: IndexDocument): Promise<void> {\n try {\n await this.client.collections(collectionName).documents().upsert(document);\n } catch (error) {\n logger.error(`Failed to upsert document ${document.id} to ${collectionName}`, error);\n throw error;\n }\n }\n\n /**\n * Upsert multiple documents (batch)\n */\n async upsertDocuments(collectionName: string, documents: IndexDocument[]): Promise<void> {\n if (documents.length === 0) return;\n\n try {\n await this.client.collections(collectionName).documents().import(documents, {\n action: 'upsert',\n });\n } catch (error) {\n logger.error(`Failed to batch upsert ${documents.length} documents to ${collectionName}`, error);\n throw error;\n }\n }\n\n /**\n * Delete a document by ID\n */\n async deleteDocument(collectionName: string, documentId: string): Promise<void> {\n try {\n await this.client.collections(collectionName).documents(documentId).delete();\n } catch (error: unknown) {\n const typesenseError = error as { httpStatus?: number };\n // Ignore 404 errors (document already deleted)\n if (typesenseError?.httpStatus !== 404) {\n logger.error(`Failed to delete document ${documentId} from ${collectionName}`, error);\n throw error;\n }\n }\n }\n\n /**\n * Delete documents matching a filter\n * Returns the number of deleted documents\n */\n async deleteDocumentsByFilter(\n collectionName: string,\n filter: Record<string, unknown>\n ): Promise<number> {\n const filterStr = this.buildFilterString(filter);\n\n try {\n const result = await this.client.collections(collectionName).documents().delete({\n filter_by: filterStr,\n });\n return result.num_deleted || 0;\n } catch (error) {\n logger.error(`Failed to delete documents by filter from ${collectionName}`, error, { filter });\n throw error;\n }\n }\n\n /**\n * Perform a vector search\n * @typeParam TDoc - The document type to return in results\n */\n async vectorSearch<TDoc = Record<string, unknown>>(\n collectionName: string,\n vector: number[],\n options: VectorSearchOptions = {}\n ): Promise<AdapterSearchResult<TDoc>[]> {\n const { limit = 10, filter, includeFields, excludeFields } = options;\n\n try {\n const searchParams: Record<string, unknown> = {\n q: '*',\n vector_query: `embedding:([${vector.join(',')}], k:${limit})`,\n };\n\n if (filter) {\n searchParams['filter_by'] = this.buildFilterString(filter);\n }\n\n if (includeFields) {\n searchParams['include_fields'] = includeFields.join(',');\n }\n\n if (excludeFields) {\n searchParams['exclude_fields'] = excludeFields.join(',');\n }\n\n const result = await this.client\n .collections(collectionName)\n .documents()\n .search(searchParams) as TypesenseSearchResult<TDoc>;\n\n return (result.hits || []).map(hit => ({\n id: String((hit.document as Record<string, unknown>)?.id || ''),\n score: hit.vector_distance ?? 0,\n document: hit.document,\n }));\n } catch (error) {\n logger.error(`Vector search failed on ${collectionName}`, error);\n throw error;\n }\n }\n\n // === Private helper methods ===\n\n /**\n * Convert generic schema to Typesense-specific schema\n */\n private convertToTypesenseSchema(schema: TypesenseCollectionSchema): CollectionCreateSchema {\n return {\n name: schema.name,\n fields: schema.fields.map(field => this.convertField(field)),\n default_sorting_field: schema.defaultSortingField,\n };\n }\n\n /**\n * Convert a single field schema to Typesense format\n */\n private convertField(field: TypesenseFieldSchema): CollectionFieldSchema {\n const typesenseField: CollectionFieldSchema = {\n name: field.name,\n type: field.type,\n facet: field.facet,\n index: field.index,\n optional: field.optional,\n };\n\n // Add vector dimensions for float[] embedding fields\n if (field.type === 'float[]' && field.vectorDimensions) {\n typesenseField.num_dim = field.vectorDimensions;\n }\n\n return typesenseField;\n }\n\n /**\n * Update collection with new fields if needed\n */\n private async updateCollectionIfNeeded(\n collectionName: string,\n currentSchema: TypesenseCollectionInfo,\n targetSchema: CollectionCreateSchema\n ): Promise<void> {\n if (!currentSchema?.fields) return;\n\n const currentFields = new Set(currentSchema.fields.map(f => f.name));\n const newFields = targetSchema.fields?.filter(\n f => !currentFields.has(f.name) && f.name !== 'id'\n ) || [];\n\n if (newFields.length > 0) {\n logger.info(`Updating collection ${collectionName} with ${newFields.length} new fields`, {\n fields: newFields.map(f => f.name)\n });\n\n try {\n await this.client.collections(collectionName).update({\n fields: newFields\n });\n } catch (error) {\n logger.error(`Failed to update collection ${collectionName}`, error);\n }\n }\n }\n\n /**\n * Build a Typesense filter string from a filter object\n */\n private buildFilterString(filter: Record<string, unknown>): string {\n const parts: string[] = [];\n\n for (const [key, value] of Object.entries(filter)) {\n if (Array.isArray(value)) {\n // Array values use 'IN' syntax\n parts.push(`${key}:[${value.map(v => String(v)).join(',')}]`);\n } else if (typeof value === 'string') {\n parts.push(`${key}:=${value}`);\n } else if (typeof value === 'number') {\n parts.push(`${key}:${value}`);\n } else if (typeof value === 'boolean') {\n parts.push(`${key}:${value}`);\n }\n }\n\n return parts.join(' && ');\n }\n}\n","/**\n * Factory function for creating a TypesenseAdapter\n */\n\nimport { Client } from \"typesense\";\nimport { TypesenseAdapter } from \"./typesense-adapter.js\";\nimport type { TypesenseConnectionConfig } from \"../shared/types/plugin-types.js\";\n\n/**\n * Creates a TypesenseAdapter instance with the provided configuration\n *\n * @param config - Typesense connection configuration\n * @returns A configured TypesenseAdapter instance\n *\n * @example\n * ```typescript\n * import { createTypesenseAdapter } from '@nexo-labs/payload-typesense';\n *\n * const adapter = createTypesenseAdapter({\n * apiKey: process.env.TYPESENSE_API_KEY!,\n * nodes: [{\n * host: 'localhost',\n * port: 8108,\n * protocol: 'http'\n * }]\n * });\n * ```\n */\nexport function createTypesenseAdapter(config: TypesenseConnectionConfig): TypesenseAdapter {\n const client = new Client({\n apiKey: config.apiKey,\n nodes: config.nodes,\n connectionTimeoutSeconds: config.connectionTimeoutSeconds ?? 10,\n retryIntervalSeconds: config.retryIntervalSeconds,\n numRetries: config.numRetries,\n });\n\n return new TypesenseAdapter(client);\n}\n\n/**\n * Creates a TypesenseAdapter from an existing Typesense Client\n * Useful when you already have a configured client instance\n *\n * @param client - Existing Typesense Client instance\n * @returns A TypesenseAdapter instance wrapping the provided client\n */\nexport function createTypesenseAdapterFromClient(client: Client): TypesenseAdapter {\n return new TypesenseAdapter(client);\n}\n","import type { Client } from \"typesense\";\nimport { getTypesenseCollectionName } from \"../../../core/utils/naming.js\";\nimport type { TableConfig } from \"@nexo-labs/payload-indexer\";\nimport { logger } from \"../../../core/logging/logger.js\";\n\n/**\n * Deletes a document from Typesense\n * Handles both direct document deletion and chunk deletion\n */\nexport const deleteDocumentFromTypesense = async (\n typesenseClient: Client,\n collectionSlug: string,\n docId: string,\n tableConfig: TableConfig\n) => {\n try {\n // Build table name from collection slug + tableSuffix\n const tableName = getTypesenseCollectionName(collectionSlug, tableConfig);\n\n logger.debug('Attempting to delete document from Typesense', {\n documentId: docId,\n collection: collectionSlug,\n tableName,\n });\n\n // Try to delete the document directly first\n try {\n await typesenseClient.collections(tableName).documents(docId).delete();\n logger.info('Document deleted from Typesense', {\n documentId: docId,\n tableName,\n });\n } catch (docDeleteError: unknown) {\n const typesenseError = docDeleteError as { httpStatus?: number };\n\n // If document doesn't exist, try to delete chunks by parent_doc_id\n if (typesenseError.httpStatus === 404) {\n logger.debug('Document not found, attempting to delete chunks', {\n documentId: docId,\n tableName,\n });\n\n try {\n await typesenseClient\n .collections(tableName)\n .documents()\n .delete({\n filter_by: `parent_doc_id:${docId}`,\n });\n logger.info('All chunks deleted for document', {\n documentId: docId,\n tableName,\n });\n } catch (chunkDeleteError: unknown) {\n const chunkError = chunkDeleteError as { httpStatus?: number };\n\n // Ignore 404 errors (collection might not exist)\n if (chunkError.httpStatus !== 404) {\n logger.error('Failed to delete chunks for document', chunkDeleteError as Error, {\n documentId: docId,\n tableName,\n });\n } else {\n logger.debug('No chunks found to delete', { documentId: docId });\n }\n }\n } else {\n throw docDeleteError;\n }\n }\n } catch (error: unknown) {\n // Build table name for error message\n const tableName = getTypesenseCollectionName(collectionSlug, tableConfig);\n\n logger.error('Failed to delete document from Typesense', error as Error, {\n documentId: docId,\n collection: collectionSlug,\n tableName,\n });\n\n // Note: We don't rethrow to allow the deletion process to continue\n }\n};\n"],"mappings":";;;;;;;AAIA,MAAa,yBAAyB,oBAA+C;AACnF,QAAO,IAAI,UAAU,OAAO;EAC1B,QAAQ,gBAAgB;EACxB,0BAA0B,gBAAgB,4BAA4B;EACtE,OAAO,gBAAgB;EACxB,CAAC;;AAGJ,MAAa,0BAA0B,OAAO,WAAqC;AACjF,KAAI;AACF,QAAM,OAAO,OAAO,UAAU;AAC9B,SAAO;UACA,QAAQ;AAEf,SAAO;;;;;;ACHX,IAAIA,eAA8B;AAClC,IAAIC,sBAAqC;AAEzC,IAAIC,eAA0C;AAC9C,IAAIC,sBAAqC;AAEzC,MAAM,mBAAmB,WAAmC;CAC1D,MAAM,MAAM,UAAU,QAAQ,IAAI;AAElC,KAAI,CAAC,IACH,QAAO;AAIT,KAAI,CAAC,gBAAgB,wBAAwB,KAAK;AAChD,iBAAe,IAAI,OAAO,EACxB,QAAQ,KACT,CAAC;AACF,wBAAsB;;AAGxB,QAAO;;AAGT,MAAM,mBAAmB,WAA+C;CACtE,MAAM,MAAM,UAAU,QAAQ,IAAI;AAElC,KAAI,CAAC,IACH,QAAO;AAIT,KAAI,CAAC,gBAAgB,wBAAwB,KAAK;AAChD,iBAAe,IAAI,mBAAmB,IAAI;AAC1C,wBAAsB;;AAGxB,QAAO;;;;;;;;AAST,MAAa,oBAAoB,OAC/B,MACA,WAC6B;AAC7B,KAAI,CAAC,QAAQ,KAAK,MAAM,CAAC,SAAS,2BAA2B;AAC3D,SAAO,MAAM,0DAA0D;AACvE,SAAO;;AAKT,MAFiB,QAAQ,QAAQ,cAEhB,SACf,QAAO,wBAAwB,MAAM,OAAO;KAE5C,QAAO,wBAAwB,MAAM,OAAO;;;;;AAOhD,MAAM,0BAA0B,OAC9B,MACA,WAC6B;CAC7B,MAAM,SAAS,gBAAgB,QAAQ,OAAO;AAE9C,KAAI,CAAC,QAAQ;AACX,SAAO,MAAM,+DAA+D;AAC5E,SAAO;;AAGT,KAAI;EACF,MAAM,QAAQ,QAAQ,SAAS,QAAQ,IAAI,0BAA0B;EACrE,MAAM,aAAa,QAAQ,cAAc;AAEzC,SAAO,MAAM,+BAA+B;GAAE;GAAO;GAAY,YAAY,KAAK;GAAQ,CAAC;EAQ3F,MAAM,aANW,MAAM,OAAO,WAAW,OAAO;GAC9C;GACA,OAAO,KAAK,MAAM;GAClB;GACD,CAAC,EAEyB,KAAK,IAAI;AAEpC,SAAO,MAAM,8BAA8B,EAAE,iBAAiB,WAAW,QAAQ,CAAC;AAElF,MACE,CAAC,aACD,CAAC,MAAM,QAAQ,UAAU,IACzB,UAAU,WAAW,YACrB;AACA,UAAO,KAAK,8CAA8C;IACxD,UAAU;IACV,UAAU,WAAW;IACtB,CAAC;AACF,UAAO;;AAGT,SAAO;UACA,OAAO;AACd,SAAO,MAAM,uCAAuC,OAAO;GACzD,YAAY,KAAK;GACjB,OAAO,QAAQ;GAChB,CAAC;AACF,SAAO;;;;;;AAOX,MAAM,0BAA0B,OAC9B,MACA,WAC6B;CAC7B,MAAM,SAAS,gBAAgB,QAAQ,OAAO;AAE9C,KAAI,CAAC,QAAQ;AACX,SAAO,MAAM,+DAA+D;AAC5E,SAAO;;AAGT,KAAI;EACF,MAAM,QAAQ,QAAQ,SAAS;EAC/B,MAAM,aAAa,QAAQ,cAAc;AAEzC,SAAO,MAAM,+BAA+B;GAAE;GAAO;GAAY,YAAY,KAAK;GAAQ,CAAC;EAQ3F,MAAM,aALS,MADQ,OAAO,mBAAmB,EAAE,OAAO,CAAC,CACvB,aAAa;GAC/C,SAAS;IAAE,MAAM;IAAQ,OAAO,CAAC,EAAE,MAAM,KAAK,MAAM,EAAE,CAAC;IAAE;GACzD,UAAU,SAAS;GACpB,CAAC,EAEuB,UAAU;AAEnC,SAAO,MAAM,8BAA8B,EAAE,iBAAiB,WAAW,QAAQ,CAAC;AAElF,MACE,CAAC,aACD,CAAC,MAAM,QAAQ,UAAU,IACzB,UAAU,WAAW,YACrB;AACA,UAAO,KAAK,8CAA8C;IACxD,UAAU;IACV,UAAU,WAAW;IACtB,CAAC;AACF,UAAO;;AAGT,SAAO;UACA,OAAO;AACd,SAAO,MAAM,uCAAuC,OAAO;GACzD,YAAY,KAAK;GACjB,OAAO,QAAQ;GAChB,CAAC;AACF,SAAO;;;;;;;;;;;;AAaX,MAAa,6BAA6B,OACxC,MACA,WACuC;AACvC,KAAI,CAAC,QAAQ,KAAK,MAAM,CAAC,SAAS,2BAA2B;AAC3D,SAAO,MAAM,0DAA0D;AACvE,SAAO;;AAKT,MAFiB,QAAQ,QAAQ,cAEhB,SACf,QAAO,iCAAiC,MAAM,OAAO;KAErD,QAAO,iCAAiC,MAAM,OAAO;;;;;AAOzD,MAAM,mCAAmC,OACvC,MACA,WACuC;CACvC,MAAM,SAAS,gBAAgB,QAAQ,OAAO;AAE9C,KAAI,CAAC,QAAQ;AACX,SAAO,MAAM,+DAA+D;AAC5E,SAAO;;AAGT,KAAI;EACF,MAAM,QAAQ,QAAQ,SAAS,QAAQ,IAAI,0BAA0B;EACrE,MAAM,aAAa,QAAQ,cAAc;AAEzC,SAAO,MAAM,mDAAmD;GAAE;GAAO;GAAY,CAAC;EAEtF,MAAM,WAAW,MAAM,OAAO,WAAW,OAAO;GAC9C;GACA,OAAO,KAAK,MAAM;GAClB;GACD,CAAC;EAEF,MAAM,YAAY,SAAS,KAAK,IAAI;AAEpC,MACE,CAAC,aACD,CAAC,MAAM,QAAQ,UAAU,IACzB,UAAU,WAAW,YACrB;AACA,UAAO,KAAK,8CAA8C;IACxD,UAAU;IACV,UAAU,WAAW;IACtB,CAAC;AACF,UAAO;;AAGT,SAAO;GACL;GACA,OAAO;IACL,cAAc,SAAS,OAAO,iBAAiB;IAC/C,aAAa,SAAS,OAAO,gBAAgB;IAC9C;GACF;UACM,OAAO;AACd,SAAO,MAAM,kDAAkD,OAAO;GACpE,YAAY,KAAK;GACjB,OAAO,QAAQ;GAChB,CAAC;AACF,SAAO;;;;;;;AAQX,MAAM,mCAAmC,OACvC,MACA,WACuC;CACvC,MAAM,kBAAkB,MAAM,wBAAwB,MAAM,OAAO;AAEnE,KAAI,CAAC,gBACH,QAAO;CAIT,MAAM,kBAAkB,KAAK,KAAK,KAAK,SAAS,EAAE;AAElD,QAAO;EACL,WAAW;EACX,OAAO;GACL,cAAc;GACd,aAAa;GACd;EACF;;;;;;;;;AAUH,MAAa,mCAAmC,OAC9C,OACA,WAC4C;AAC5C,KAAI,CAAC,SAAS,MAAM,WAAW,GAAG;AAChC,SAAO,MAAM,mDAAmD;AAChE,SAAO;;CAIT,MAAM,aAAa,MAAM,QAAO,MAAK,KAAK,EAAE,MAAM,CAAC,UAAU,0BAA0B;AAEvF,KAAI,WAAW,WAAW,GAAG;AAC3B,SAAO,MAAM,gEAAgE;AAC7E,SAAO;;AAKT,MAFiB,QAAQ,QAAQ,cAEhB,SACf,QAAO,uCAAuC,YAAY,OAAO;KAEjE,QAAO,uCAAuC,YAAY,OAAO;;;;;AAOrE,MAAM,yCAAyC,OAC7C,YACA,WAC4C;CAC5C,MAAM,SAAS,gBAAgB,QAAQ,OAAO;AAE9C,KAAI,CAAC,QAAQ;AACX,SAAO,MAAM,qEAAqE;AAClF,SAAO;;AAGT,KAAI;EACF,MAAM,QAAQ,QAAQ,SAAS,QAAQ,IAAI,0BAA0B;EACrE,MAAM,aAAa,QAAQ,cAAc;AAEzC,SAAO,MAAM,0DAA0D;GACrE;GACA;GACA,WAAW,WAAW;GACvB,CAAC;EAEF,MAAM,WAAW,MAAM,OAAO,WAAW,OAAO;GAC9C;GACA,OAAO,WAAW,KAAI,MAAK,EAAE,MAAM,CAAC;GACpC;GACD,CAAC;EAEF,MAAM,aAAa,SAAS,KAAK,KAAI,SAAQ,KAAK,UAAU;AAO5D,MAAI,CAJa,WAAW,OAC1B,QAAO,MAAM,QAAQ,IAAI,IAAI,IAAI,WAAW,WAC7C,EAEc;AACb,UAAO,KAAK,qDAAqD;IAC/D,UAAU;IACV,WAAW,WAAW;IACvB,CAAC;AACF,UAAO;;AAGT,SAAO,KAAK,kDAAkD;GAC5D,OAAO,WAAW;GAClB,aAAa,SAAS,OAAO,gBAAgB;GAC9C,CAAC;AAEF,SAAO;GACL;GACA,OAAO;IACL,cAAc,SAAS,OAAO,iBAAiB;IAC/C,aAAa,SAAS,OAAO,gBAAgB;IAC9C;GACF;UACM,OAAO;AACd,SAAO,MAAM,yDAAyD,OAAO;GAC3E,WAAW,WAAW;GACtB,OAAO,QAAQ;GAChB,CAAC;AACF,SAAO;;;;;;;AAQX,MAAM,yCAAyC,OAC7C,YACA,WAC4C;CAC5C,MAAM,SAAS,gBAAgB,QAAQ,OAAO;AAE9C,KAAI,CAAC,QAAQ;AACX,SAAO,MAAM,qEAAqE;AAClF,SAAO;;AAGT,KAAI;EACF,MAAM,QAAQ,QAAQ,SAAS;EAC/B,MAAM,aAAa,QAAQ,cAAc;AAEzC,SAAO,MAAM,0DAA0D;GACrE;GACA;GACA,WAAW,WAAW;GACvB,CAAC;EAEF,MAAM,iBAAiB,OAAO,mBAAmB,EAAE,OAAO,CAAC;EAC3D,MAAMC,aAAyB,EAAE;EACjC,IAAI,uBAAuB;AAG3B,OAAK,MAAM,QAAQ,YAAY;GAC7B,MAAM,SAAS,MAAM,eAAe,aAAa;IAC/C,SAAS;KAAE,MAAM;KAAQ,OAAO,CAAC,EAAE,MAAM,KAAK,MAAM,EAAE,CAAC;KAAE;IACzD,UAAU,SAAS;IACpB,CAAC;AAEF,cAAW,KAAK,OAAO,UAAU,OAAO;AACxC,2BAAwB,KAAK,KAAK,KAAK,SAAS,EAAE;;AAQpD,MAAI,CAJa,WAAW,OAC1B,QAAO,MAAM,QAAQ,IAAI,IAAI,IAAI,WAAW,WAC7C,EAEc;AACb,UAAO,KAAK,qDAAqD;IAC/D,UAAU;IACV,WAAW,WAAW;IACvB,CAAC;AACF,UAAO;;AAGT,SAAO,KAAK,kDAAkD;GAC5D,OAAO,WAAW;GAClB,iBAAiB;GAClB,CAAC;AAEF,SAAO;GACL;GACA,OAAO;IACL,cAAc;IACd,aAAa;IACd;GACF;UACM,OAAO;AACd,SAAO,MAAM,yDAAyD,OAAO;GAC3E,WAAW,WAAW;GACtB,OAAO,QAAQ;GAChB,CAAC;AACF,SAAO;;;;;;;;;;;;;;;;ACjaX,SAAgB,uBACd,QACA,qBACA,iBACK;CACL,MAAM,WAAW,gBAAgB,MAAM,GAAG,YAAY;CACtD,MAAM,eAAe,IAAI,IACvB,GAAG,SAAS,KAAK,gBAAgB,MAAM,GAAG,KAAK,GAAG,gBAAgB,MAAM,GAAG,KAAK,eACjF;AAGD,cAAa,aAAa,IAAI,KAAK,OAAO,YAAY;AACtD,cAAa,aAAa,IAAI,gBAAgB,OAAO;AACrD,cAAa,aAAa,IAAI,yBAAyB,oBAAoB;AAE3E,KAAI,OAAO,OACT,cAAa,aAAa,IAAI,mBAAmB,OAAO,OAAO;AAGjE,cAAa,aAAa,IAAI,uBAAuB,OAAO;AAE5D,QAAO;;;;;;;;AAST,SAAgB,yBAAyB,QAA8B;CACrE,MAAM,EACJ,mBACA,gBACA,mBACA,WAAW,IACX,iBAAiB,EAAE,KACjB;AAEJ,QAAO,kBAAkB,KAAK,eAAuB;EACnD,MAAMC,UAAkC;GACtC;GACA,UAAU;GACV,cAAc,eAAe,eAAe,KAAK,IAAI,CAAC,OAAO,SAAS;GACtE,gBAAgB;GAChB,GAAG,0BAA0B,eAAe;GAC7C;AAGD,MAAI,qBAAqB,kBAAkB,SAAS,EAElD,SAAQ,YAAY,kBADA,kBAAkB,KAAK,OAAe,IAAI,GAAG,GAAG,CAAC,KAAK,IAAI,CAC5B;AAGpD,SAAO;GACP;;;;;;;;AASJ,SAAS,0BAA0B,QAAoD;CACrF,MAAMC,SAA+B,EAAE;AAEvC,KAAI,OAAO,wBAAwB,OACjC,QAAO,wBAAwB,OAAO;AAGxC,KAAI,OAAO,aAAa,OACtB,QAAO,YAAY,OAAO;AAG5B,KAAI,OAAO,WAAW,OACpB,QAAO,SAAS,OAAO;AAGzB,KAAI,OAAO,wBAAwB,OACjC,QAAO,wBAAwB,OAAO;AAGxC,KAAI,OAAO,mBAAmB,OAC5B,QAAO,kBAAkB,OAAO;AAGlC,QAAO;;;;;;;;AAST,SAAgB,4BAA4B,QAA8B;AACxE,QAAO,EACL,UAAU,yBAAyB,OAAO,EAC3C;;;;;;;;;;AAWH,SAAgB,wBACd,QAAQ,IACR,gBAAgB,MAChB,cAAc,oBACd;AACA,QAAO;EACL;EACA,uBAAuB;EACvB,cAAc;EACf;;;;;;;;;;;AC1HH,SAAgB,uBAAuB,MAAwC;AAC7E,KAAI,CAAC,KAAK,WAAW,SAAS,CAC5B,QAAO;CAGT,MAAM,OAAO,KAAK,MAAM,EAAE;AAE1B,KAAI,SAAS,SACX,QAAO,EAAE,KAAK,UAAU;AAG1B,KAAI;EACF,MAAM,SAAS,KAAK,MAAM,KAAK;EAC/B,MAAMC,QAA2B,EAAE,KAAK,QAAQ;AAGhD,MAAI,OAAO,gBACT,OAAM,iBAAiB,OAAO;WACrB,OAAO,cAAc,gBAC9B,OAAM,iBAAiB,OAAO,aAAa;AAI7C,MAAI,OAAO,YAAY,OACrB,OAAM,UAAU,OAAO;WACd,OAAO,cAAc,OAC9B,OAAM,UAAU,OAAO,aAAa;AAItC,MAAI,OAAO,QACT,OAAM,UAAU,OAAO;AAGzB,SAAO;UACA,GAAG;AACV,WAAO,MAAM,mDAAmD,EAAW;AAC3E,SAAO;;;;;;;;;;AAWX,SAAgB,0BACd,SACA,sBACe;CACf,MAAMC,aAA4B,EAAE;AAEpC,MAAK,MAAM,UAAU,QACnB,KAAI,OAAO,KACT,MAAK,MAAM,OAAO,OAAO,MAAM;EAC7B,MAAM,MAAM,IAAI;EAChB,MAAM,QAAQ,IAAI,mBAAmB,IAAI,cAAc;EACvD,MAAM,iBAAiB,OAAO,gBAAgB,mBAAmB;EAEjE,MAAM,OAAO,uBACT,qBAAqB,eAAe,GACpC,uBAAuB,eAAe;EAE1C,MAAM,cAAc,IAAI,cAAc;EAEtC,MAAMC,SAAsB;GAC1B,IAAI,IAAI,MAAM;GACd,OAAO,IAAI,SAAS;GACpB,MAAM,IAAI,QAAQ;GAClB;GACA,YAAY,IAAI,eAAe;GAC/B,gBAAgB;GAChB,SAAS;GACT,SAAS,YAAY,UAAU,GAAG,IAAI,IAAI,YAAY,SAAS,MAAM,QAAQ;GAC9E;AAED,aAAW,KAAK,OAAO;;AAK7B,QAAO;;;;;;;;AAST,SAAgB,iBAAiB,SAA6C;CAC5E,IAAI,cAAc;AAElB,MAAK,MAAM,UAAU,QACnB,KAAI,OAAO,KACT,MAAK,MAAM,OAAO,OAAO,MAAM;EAC7B,MAAM,MAAM,IAAI;AAChB,kBAAgB,IAAI,cAAc,MAAM;;AAK9C,QAAO;;;;;;;;;;AAWT,eAAsB,0BACpB,UACA,SACA,sBACiC;CACjC,MAAM,SAAS,SAAS,KAAM,WAAW;CACzC,MAAM,UAAU,IAAI,aAAa;CAEjC,IAAI,SAAS;CACb,IAAIC,UAAyB,EAAE;CAC/B,IAAI,sBAAsB;CAC1B,IAAIC,iBAAgC;CACpC,IAAI,cAAc;CAClB,IAAI,cAAc;AAElB,QAAO,MAAM;EACX,MAAM,EAAE,MAAM,UAAU,MAAM,OAAO,MAAM;AAC3C,MAAI,KAAM;AAEV,YAAU,QAAQ,OAAO,OAAO,EAAE,QAAQ,MAAM,CAAC;EACjD,MAAM,QAAQ,OAAO,MAAM,KAAK;AAChC,WAAS,MAAM,KAAK,IAAI;AAExB,OAAK,MAAM,QAAQ,OAAO;GACxB,MAAM,QAAQ,uBAAuB,KAAK;AAC1C,OAAI,CAAC,MAAO;AAGZ,OAAI,QACF,SAAQ,MAAM;AAIhB,OAAI,CAAC,kBAAkB,MAAM,eAC3B,kBAAiB,MAAM;AAIzB,OAAI,CAAC,uBAAuB,MAAM,SAAS;AACzC,cAAU,0BAA0B,MAAM,SAAS,qBAAqB;AACxE,kBAAc,iBAAiB,MAAM,QAAQ;AAC7C,0BAAsB;;AAIxB,OAAI,MAAM,QACR,gBAAe,MAAM;;;AAK3B,QAAO;EACL;EACA;EACA;EACA;EACD;;;;;;;;;AAUH,SAAgB,uBACd,UACA,QAC4B;CAC5B,MAAM,SAAS,SAAS,KAAM,WAAW;CACzC,MAAM,UAAU,IAAI,aAAa;CACjC,MAAM,UAAU,IAAI,aAAa;CAEjC,IAAI,SAAS;AAEb,QAAO,IAAI,eAAe;EACxB,MAAM,MAAM,YAAY;AACtB,UAAO,MAAM;IACX,MAAM,EAAE,MAAM,UAAU,MAAM,OAAO,MAAM;AAC3C,QAAI,MAAM;AACR,gBAAW,OAAO;AAClB;;AAGF,cAAU,QAAQ,OAAO,OAAO,EAAE,QAAQ,MAAM,CAAC;IACjD,MAAM,QAAQ,OAAO,MAAM,KAAK;AAChC,aAAS,MAAM,KAAK,IAAI;AAExB,SAAK,MAAM,QAAQ,OAAO;KACxB,MAAM,QAAQ,uBAAuB,KAAK;AAE1C,SAAI,SAAS,OACX,QAAO,MAAM;AAIf,SAAI,KACF,YAAW,QAAQ,QAAQ,OAAO,OAAO,KAAK,CAAC;;;;EAKvD,SAAS;AACP,UAAO,QAAQ;;EAElB,CAAC;;;;;;;;AASJ,SAAS,uBAAuB,gBAAgC;AAC9D,KAAI,eAAe,SAAS,UAAU,CACpC,QAAO;AAET,KAAI,eAAe,SAAS,OAAO,CACjC,QAAO;AAET,KAAI,eAAe,SAAS,OAAO,CACjC,QAAO;AAET,KAAI,eAAe,SAAS,OAAO,CACjC,QAAO;AAET,QAAO;;;;;;;;;;;;AC9QT,eAAsB,6BACpB,QACA,iBAAyB,wBACP;AAClB,KAAI;AAEF,QAAM,OAAO,YAAY,eAAe,CAAC,UAAU;AACnD,WAAO,KAAK,0CAA0C,EAAE,YAAY,gBAAgB,CAAC;AACrF,SAAO;UACAC,OAAgB;AAEvB,MADuB,OACH,eAAe,KAAK;AACtC,YAAO,KAAK,oCAAoC,EAAE,YAAY,gBAAgB,CAAC;AAE/E,OAAI;AAIF,UAAM,OAAO,aAAa,CAAC,OAAO;KAChC,MAAM;KACN,QAAQ;MACN;OAAE,MAAM;OAAmB,MAAM;OAAU;MAC3C;OAAE,MAAM;OAAY,MAAM;OAAU;MACpC;OAAE,MAAM;OAAa,MAAM;OAAS;MACpC;OAAE,MAAM;OAAQ,MAAM;OAAU;MAChC;OAAE,MAAM;OAAW,MAAM;OAAU;MACpC;KACF,CAAC;AAEF,aAAO,KAAK,gDAAgD,EAAE,YAAY,gBAAgB,CAAC;AAC3F,WAAO;YACA,aAAa;AACpB,aAAO,MAAM,4CAA4C,aAAsB,EAC7E,YAAY,gBACb,CAAC;AACF,WAAO;;;AAIX,WAAO,MAAM,0CAA0C,OAAgB,EACrE,YAAY,gBACb,CAAC;AACF,SAAO;;;;;;;;AASX,SAAgB,sBAA2D;AACzE,QAAO;EACL,QAAQ;GACN,OAAO;GACP,eAAe;GACf,aAAa;GACd;EACD,MAAM;GACJ,gBAAgB;GAChB,GAAG;GACH,IAAI;GACJ,gBAAgB;GAChB,gBAAgB;GACjB;EACD,UAAU;GACR,qBAAqB;GACrB,UAAU;GACV,QAAQ;GACR,qBAAqB;GACrB,gBAAgB;GACjB;EACF;;;;;;;;AASH,SAAgB,2BAA2B,YAAmC;CAC5E,MAAM,WAAW,qBAAqB;AAEtC,KAAI,CAAC,WACH,QAAO;AAGT,QAAO;EACL,QAAQ;GAAE,GAAG,SAAS;GAAQ,GAAG,WAAW;GAAQ;EACpD,MAAM;GAAE,GAAG,SAAS;GAAM,GAAG,WAAW;GAAM;EAC9C,UAAU;GAAE,GAAG,SAAS;GAAU,GAAG,WAAW;GAAU;EAC3D;;;;;;;;;;;;;;;;;;;AC9BH,eAAsB,iBACpB,iBACA,cACA,SAC0B;CAE1B,MAAM,eAAe,uBACnB,SACA,aAAa,SACb,gBACD;CAGD,MAAM,cAAc,4BAA4B;EAC9C,aAAa,QAAQ;EACrB,gBAAgB,QAAQ;EACxB,mBAAmB,QAAQ;EAC3B,QAAQ,QAAQ;EAChB,mBAAmB,aAAa;EAChC,UAAU,aAAa,YAAY;EACnC,gBAAgB,aAAa;EAC9B,CAAC;CAGF,MAAM,WAAW,MAAM,MAAM,aAAa,UAAU,EAAE;EACpD,QAAQ;EACR,SAAS;GACP,gBAAgB;GAChB,uBAAuB,gBAAgB;GACxC;EACD,MAAM,KAAK,UAAU,YAAY;EAClC,CAAC;AAEF,KAAI,CAAC,SAAS,IAAI;EAChB,MAAM,YAAY,MAAM,SAAS,MAAM;AACvC,QAAM,IAAI,MAAM,4BAA4B,YAAY;;AAO1D,QAAO;EACL;EACA,aALkB,SAAS,QAAQ,IAAI,eAAe,EACvB,SAAS,oBAAoB,IAAI;EAKhE,SAAS,EAAE;EACZ;;;;;;;;;;;;;ACjFH,eAAsB,eACpB,QACA,QAC2B;CAC3B,MAAM,EAAE,SAAS,gBAAgB,qBAAqB;AAGtD,KAAI,oBAAoB,CAAC,iBAAiB,SAAS,eAAe,CAChE,OAAM,IAAI,MACR,uBAAuB,eAAe,oBAAoB,iBAAiB,KAAK,KAAK,GACtF;AAGH,KAAI;EAEF,MAAM,WAAY,MAAM,OACrB,YAAY,eAAe,CAC3B,UAAU,QAAQ,CAClB,UAAU;EAGb,MAAM,YAAY,SAAS,cAAc;AAEzC,MAAI,CAAC,UACH,OAAM,IAAI,MAAM,yBAAyB;AAG3C,SAAO;GACL,IAAI,SAAS;GACb,YAAY;GACZ,OAAO,SAAS;GAChB,MAAM,SAAS;GACf,aAAa,SAAS;GACtB,YAAY;GACb;UACMC,OAAgB;AAEvB,MAAI,SAAS,OAAO,UAAU,YAAY,gBAAgB,SAAS,MAAM,eAAe,IACtF,OAAM,IAAI,MAAM,oBAAoB,UAAU;AAEhD,QAAM;;;;;;;;;;;;;;AC3CV,eAAsB,iBACpB,SACA,QACA,SAAwB,EAAE,EACO;CACjC,MAAM,iBAAiB,OAAO,kBAAkB;CAChD,MAAM,WAAW,OAAO,uBAAuB,OAAU,KAAK;CAE9D,MAAM,aAAa,IAAI,KAAK,KAAK,KAAK,GAAG,SAAS;CAElD,MAAM,eAAe,MAAM,QAAQ,KAAK;EACtC,YAAY;EACZ,OAAO,EACL,KAAK;GACH,EACE,MAAM,EACJ,QAAQ,QACT,EACF;GACD,EACE,QAAQ,EACN,QAAQ,UACT,EACF;GACD,EACE,eAAe,EACb,cAAc,WAAW,aAAa,EACvC,EACF;GACF,EACF;EACD,MAAM;EACN,OAAO;EACR,CAAC;AAEF,KAAI,CAAC,aAAa,KAAK,OACrB,QAAO;AAGT,QAAO,aAAa,KAAK;;;;;;;;;;;AAY3B,eAAsB,2BACpB,SACA,QACA,gBACA,SAAwB,EAAE,EACO;CACjC,MAAM,iBAAiB,OAAO,kBAAkB;CAEhD,MAAM,eAAe,MAAM,QAAQ,KAAK;EACtC,YAAY;EACZ,OAAO,EACL,KAAK,CACH,EACE,iBAAiB,EACf,QAAQ,gBACT,EACF,EACD,EACE,MAAM,EACJ,QAAQ,QACT,EACF,CACF,EACF;EACD,OAAO;EACR,CAAC;AAEF,KAAI,CAAC,aAAa,KAAK,OACrB,QAAO;AAGT,QAAO,aAAa,KAAK;;;;;;;;;;;AAY3B,eAAsB,aACpB,SACA,QACA,gBACA,SAAwB,EAAE,EACO;CACjC,MAAM,iBAAiB,OAAO,kBAAkB;CAEhD,MAAM,eAAe,MAAM,QAAQ,KAAK;EACtC,YAAY;EACZ,OAAO,EACL,KAAK,CACH,EACE,iBAAiB,EACf,QAAQ,gBACT,EACF,EACD,EACE,MAAM,EACJ,QAAQ,QACT,EACF,CACF,EACF;EACD,OAAO;EACR,CAAC;AAEF,KAAI,CAAC,aAAa,KAAK,OACrB,QAAO;CAGT,MAAM,UAAU,aAAa,KAAK;AAClC,KAAI,CAAC,QACH,QAAO;AAET,OAAM,QAAQ,OAAO;EACnB,YAAY;EACZ,OAAO,EACL,iBAAiB,EACf,QAAQ,gBACT,EACF;EACD,MAAM;GACJ,QAAQ;GACR,4BAAW,IAAI,MAAM,EAAC,aAAa;GACpC;EACF,CAAC;AAEF,QAAO;EACL,iBAAiB,QAAQ;EACzB,UAAU,QAAQ,YAAY,EAAE;EAChC,QAAQ;EACR,cAAc,QAAQ;EACtB,YAAY,QAAQ;EACpB,eAAe,QAAQ;EACxB;;;;;;;;;;;AC5KH,SAAgB,eAAe,OAAyB;AACtD,QAAO,SAAS,KAAK,UAAU,MAAM,CAAC;;;;;;;;;AAUxC,SAAgB,aACd,YACA,SACA,OACM;CACN,MAAM,OAAO,eAAe,MAAM;AAClC,YAAW,QAAQ,QAAQ,OAAO,KAAK,CAAC;;;;;;;;;;;;;;;;;ACkB1C,eAAsB,gBACpB,SACA,QACA,gBACA,aACA,kBACA,SACA,UACA,iBAAiC,iBAClB;AACf,KAAI;EAEF,MAAM,WAAW,MAAM,QAAQ,KAAK;GAClC,YAAY;GACZ,OAAO,EACL,iBAAiB,EACf,QAAQ,gBACT,EACF;GACD,OAAO;GACR,CAAC;EAEF,MAAMC,iBAAyC;GAC7C,MAAM;GACN,SAAS;GACT,4BAAW,IAAI,MAAM,EAAC,aAAa;GACpC;EAED,MAAMC,sBAA8C;GAClD,MAAM;GACN,SAAS;GACT,4BAAW,IAAI,MAAM,EAAC,aAAa;GACnC,SAAS,QAAQ,KAAK,OAAO;IAC3B,IAAI,EAAE;IACN,OAAO,EAAE;IACT,MAAM,EAAE;IACR,aAAa,EAAE;IACf,MAAM,EAAE;IACT,EAAE;GACJ;AAED,MAAI,SAAS,KAAK,SAAS,KAAK,SAAS,KAAK,GAE5C,OAAM,sBACJ,SACA,SAAS,KAAK,IACd,gBACA,qBACA,UACA,eACD;MAGD,OAAM,iBACJ,SACA,QACA,gBACA,gBACA,qBACA,UACA,eACD;UAEI,OAAO;AACd,WAAO,MAAM,6BAA6B,OAAgB;GACxD;GACA;GACD,CAAC;;;;;;AAQN,eAAe,sBACb,SACA,SACA,gBACA,qBACA,UACA,gBACe;CACf,MAAM,mBAAoB,QAAQ,YAAyC,EAAE;CAC7E,MAAM,mBAAoB,QAAQ,YAAgC,EAAE;CAEpE,MAAM,WAAW;EAAC,GAAG;EAAkB;EAAgB;EAAoB;CAC3E,MAAM,cAAc,CAAC,GAAG,kBAAkB,GAAG,SAAS;CACtD,MAAM,eACH,QAAQ,gBAAgB,KAAK,SAAS,QAAQ,KAAK,MAAM,MAAM,EAAE,OAAO,OAAO,EAAE;CACpF,MAAM,aACH,QAAQ,cAAc,KAAK,SAAS,QAAQ,KAAK,MAAM,OAAO,EAAE,YAAY,IAAI,EAAE;AAErF,OAAM,QAAQ,OAAO;EACnB,YAAY;EACZ,IAAI,QAAQ;EACZ,MAAM;GACJ;GACA,UAAU;GACV,cAAc;GACd,YAAY;GACZ,gCAAe,IAAI,MAAM,EAAC,aAAa;GACvC,QAAQ;GACT;EACF,CAAC;AAEF,UAAO,KAAK,qCAAqC;EAC/C,WAAW,QAAQ;EACnB,gBAAgB,QAAQ;EACxB;EACA;EACD,CAAC;;;;;AAMJ,eAAe,iBACb,SACA,QACA,gBACA,gBACA,qBACA,UACA,gBACe;CACf,MAAM,cAAc,SAAS,QAAQ,KAAK,MAAM,MAAM,EAAE,OAAO,OAAO,EAAE;CACxE,MAAM,YAAY,SAAS,QAAQ,KAAK,MAAM,OAAO,EAAE,YAAY,IAAI,EAAE;AAEzE,OAAM,QAAQ,OAAO;EACnB,YAAY;EACZ,MAAM;GACJ,MAAM;GACN,iBAAiB;GACjB,QAAQ;GACR,UAAU,CAAC,gBAAgB,oBAAoB;GAC/C;GACA,cAAc;GACd,YAAY;GACZ,gCAAe,IAAI,MAAM,EAAC,aAAa;GACxC;EACF,CAAC;AAEF,UAAO,KAAK,yCAAyC;EACnD;EACA;EACA;EACA;EACD,CAAC;;;;;;;;AChMJ,MAAa,gBAAgB,MAAW,YAA2B;AAC/D,QAAO,IAAI,SAAS,KAAK,UAAU,KAAK,EAAE;EACxC,SAAS,EAAE,gBAAgB,oBAAoB;EAC/C,GAAG;EACJ,CAAC;;;;;AAMN,eAAsB,oBACpB,SACA,QAWA;AAEA,KAAI,CAAC,MAAM,OAAO,iBAAiB,QAAQ,CACzC,QAAO;EACL,SAAS;EACT,OAAO,aAAa,EAAE,OAAO,kDAAkD,EAAE,EAAE,QAAQ,KAAK,CAAC;EAClG;AAIH,KAAI,CAAC,QAAQ,OAAO,CAAC,QAAQ,KAC3B,QAAO;EACL,SAAS;EACT,OAAO,aAAa,EAAE,OAAO,iBAAiB,EAAE,EAAE,QAAQ,KAAK,CAAC;EACjE;CAGH,MAAM,EAAE,IAAI,QAAQ,UAAU,QAAQ;CACtC,MAAM,YAAY,SAAS;CAC3B,MAAM,UAAU,MAAM,OAAO,YAAY;CACzC,MAAM,OAAO,MAAM,QAAQ,QAAQ;AAGnC,KAAI,CAAC,KACH,QAAO;EACL,SAAS;EACT,OAAO,aAAa,EAAE,OAAO,kBAAkB,EAAE,EAAE,QAAQ,KAAK,CAAC;EAClE;AAIH,KAAI,CAAC,KAAK,WAAW,OAAO,KAAK,YAAY,YAAY,KAAK,QAAQ,MAAM,KAAK,GAC/E,QAAO;EACL,SAAS;EACT,OAAO,aAAa,EAAE,OAAO,2BAA2B,EAAE,EAAE,QAAQ,KAAK,CAAC;EAC3E;AAKH,QAAO;EACL,SAAS;EACT;EACA;EACA;EACA,aAPkB,KAAK,QAAQ,MAAM;EAQrC;EACD;;;;;;;;AC5DH,eAAsB,8BACpB,aACA,QACA,iBACmB;AACnB,QAAO,MAAM,4CAA4C;CAEzD,MAAM,kBAAkB,OAAO;AAE/B,KAAI,CAAC,gBACD,OAAM,IAAI,MAAM,kCAAkC;CAGtD,IAAI;CAGJ,MAAM,eAAe,gBAAgB;CACrC,MAAM,SAAS,gBAAgB;CAC/B,MAAM,QAAQ,gBAAgB;CAC9B,MAAM,aAAa,gBAAgB;CAEnC,MAAM,gBAAgB,IAAI,OAAO;EAAE,SAAS;EAAM,QAAQ;EAAmB,CAAC;AAE9E,KAAI,iBAAiB,SACjB,YAAW,IAAI,wBAAwB;EACnC,MAAM;EACE;EACD;EACK;EACf,EAA0B,cAAc;KAExC,YAAW,IAAI,wBAAwB;EACpC,MAAM;EACE;EACD;EACK;EACf,EAA0B,cAAc;AAG7B,KAAI,qBAAqB,UAAU,eAAe,gBAAgB;CAMlF,MAAM,kBAAkB,MAAM,SAAS,kBAAkB,YAAY;AAErE,KAAI,CAAC,gBACH,OAAM,IAAI,MAAM,+BAA+B;CAKjD,MAAM,YAAY,SAAS;AAE3B,KAAI,OAAO,yBAAyB;EAClC,MAAM,oBAAoB,OAAO,wBAC/B,WACA,gBAAgB,MAAM,YACvB;AACD,kBAAgB,KAAK,kBAAkB;AAEvC,SAAO,KAAK,oCAAoC;GAC9C,OAAO;GACP,aAAa,gBAAgB,MAAM;GACnC,SAAS,kBAAkB;GAC5B,CAAC;;AAGJ,QAAO,gBAAgB;;;;;;;;AC7EzB,eAAsB,wBACpB,QACA,SACA,QACA,gBACA,aACA,kBACA,SACA,iBACe;AACf,KAAI,CAAC,kBAAkB,CAAC,OAAO,gBAC7B;AAGF,OAAM,OAAO,gBACX,SACA,QACA,gBACA,aACA,kBACA,SACA,iBACA,OAAO,eACR;AAED,UAAO,KAAK,oCAAoC,EAC9C,gBACD,CAAC;;;;;;;;AC3BJ,eAAsB,yBACpB,QACA,SACA,QACA,WACA,aAC0B;AAC1B,KAAI,CAAC,OAAO,0BAA0B,CAAC,OAAO,gBAC5C,QAAO;CAKT,MAAM,uBAF2B,OAAO,uBAAuB,YAAY,GAChD,OAAO,uBAAuB,YAAY,GAAG;CAGxE,MAAM,aAAa,MAAM,OAAO,gBAAgB,SAAS,QAAQ,qBAAqB;AAEtF,KAAI,CAAC,WAAW,SAAS;AACvB,WAAO,KAAK,iCAAiC;GAC3C;GACA,OAAO,WAAW;GAClB,MAAM,WAAW;GACjB,WAAW,WAAW;GACvB,CAAC;AACF,SAAO,aACL;GACE,OAAO;GACP,YAAY;IACV,OAAO,WAAW;IAClB,MAAM,WAAW;IACjB,WAAW,WAAW;IACtB,UAAU,WAAW;IACtB;GACF,EACD,EAAE,QAAQ,KAAK,CAChB;;AAGH,UAAO,KAAK,sDAAsD;EAChE;EACA;EACA,OAAO,WAAW;EAClB,MAAM,WAAW;EACjB,WAAW,WAAW;EACvB,CAAC;AAEF,QAAO;;;;;;;;AC9CT,SAAgB,oBAAoB,iBAGlC;CACA,MAAM,kBAAkB,gBAAgB,QACrC,KAAK,UAAU,MAAM,MAAM,OAAO,OACnC,EACD;CACD,MAAM,eAAe,gBAAgB,QAClC,KAAK,UAAU,OAAO,MAAM,YAAY,IACzC,EACD;AAED,UAAO,KAAK,gCAAgC;EAC1C,aAAa;EACb,cAAc;EACf,CAAC;AAEF,QAAO;EAAE,aAAa;EAAiB;EAAc;;;;;AAMvD,eAAsB,uBACpB,QACA,SACA,QACA,aACA,cACA,WACe;AACf,KAAI,CAAC,OAAO,kBACV;CAGF,MAAM,aAAa,MAAM,OAAO,kBAAkB,SAAS,OAAO;AAElE,WAAU;EACR,MAAM;EACN,MAAM;GACJ,aAAa;GACb,UAAU;GACV,aAAa,WAAW;GACxB,YAAY,WAAW;GACvB,iBAAiB,WAAW;GAC5B,UAAU,WAAW;GACtB;EACF,CAAC;;;;;;;;ACsCJ,SAAgB,sBAAsB,QAA4B;AAChE,QAAO,eAAe,KAAK,SAAyB;AAClD,MAAI;GAEF,MAAM,YAAY,MAAM,oBAAoB,SAAS,OAAO;AAC5D,OAAI,CAAC,UAAU,QACb,QAAO,UAAU;GAGnB,MAAM,EAAE,QAAQ,WAAW,SAAS,aAAa,SAAS;GAG1D,IAAIC;GACJ,MAAM,YAAY,KAAK;AAEvB,OAAI,aAAa,OAAO,KAAK,QAAQ;IACjC,MAAM,QAAQ,OAAO,IAAI,OAAO,MAAK,MAAK,EAAE,SAAS,UAAU;AAC/D,QAAI,CAAC,MACH,QAAO,IAAI,SAAS,KAAK,UAAU,EAAE,OAAO,oBAAoB,aAAa,CAAC,EAAE,EAAE,QAAQ,KAAK,CAAC;AAElG,mBAAe;KACX,SAAS,MAAM;KACf,mBAAmB,MAAM;KACzB,UAAU,MAAM;KAChB,gBAAgB,OAAO,IAAI;KAC9B;cACM,OAAO,KAAK,UAAU,OAAO,IAAI,OAAO,SAAS,GAAG;IAE3D,MAAM,QAAQ,OAAO,IAAI,OAAO;AAChC,QAAI,CAAC,MAAO,OAAM,IAAI,MAAM,0BAA0B;AACtD,mBAAe;KACX,SAAS,MAAM;KACf,mBAAmB,MAAM;KACzB,UAAU,MAAM;KAChB,gBAAgB,OAAO,IAAI;KAC9B;SAED,QAAO,IAAI,SAAS,KAAK,UAAU,EAAE,OAAO,kCAAkC,CAAC,EAAE,EAAE,QAAQ,KAAK,CAAC;GAIrG,MAAM,kBAAkB,MAAM,yBAC5B,QACA,SACA,QACA,WACA,YACD;AACD,OAAI,gBACF,QAAO;AAGT,YAAO,KAAK,2BAA2B;IACrC;IACA,QAAQ,KAAK,UAAU;IACvB,WAAW,aAAa;IACxB,SAAS,aAAa;IACtB,YAAY,CAAC,CAAC,KAAK;IACnB,sBAAsB,CAAC,CAAC,KAAK;IAC7B,eAAe,YAAY;IAC5B,CAAC;GAGF,MAAM,UAAU,IAAI,aAAa;GACjC,MAAM,SAAS,IAAI,eAAe,EAChC,MAAM,MAAM,YAAY;IACtB,MAAMC,kBAAmC,EAAE;IAC3C,IAAI,uBAAuB;IAC3B,IAAIC,wBAAuC;IAC3C,IAAIC,iBAAgC,EAAE;AAEtC,QAAI;KACF,MAAM,aAAa,UAAoB,aAAa,YAAY,SAAS,MAAM;KAG/E,MAAM,iBAAiB,MAAM,8BAC3B,aACA,QACA,gBACD;KAGD,MAAM,eAAe,MAAM,iBACzB,OAAO,WACP,cACA;MACE;MACA;MACA,QAAQ,KAAK;MACb,mBAAmB,KAAK;MACzB,CACF;KAGD,MAAM,eAAe,aAAa,eAAe,aAAa,SAAS,OACnE,MAAM,OAAO,wBAAwB,aAAa,UAAU,YAAY,QAAQ,GAChF,MAAM,OAAO,2BACX,MAAM,aAAa,SAAS,MAAM,EAClC,YACA,QACD;AAGL,4BAAuB,aAAa;AACpC,6BAAwB,aAAa;AACrC,sBAAiB,aAAa;AAC9B,qBAAgB,KAAK,aAAa,YAAY;KAG9C,MAAM,EAAE,aAAa,iBAAiB,iBACpC,oBAAoB,gBAAgB;AAGtC,WAAM,uBACJ,QACA,SACA,QACA,iBACA,cACA,UACD;AAGD,WAAM,wBACJ,QACA,SACA,QACA,uBACA,aACA,sBACA,gBACA,gBACD;AAED,cAAO,KAAK,uCAAuC;MACjD;MACA,gBAAgB;MAChB,aAAa;MACd,CAAC;AACF,gBAAW,OAAO;aACX,OAAO;AACd,cAAO,MAAM,8BAA8B,OAAgB;MACzD;MACA,QAAQ,KAAK;MACd,CAAC;AACF,kBAAa,YAAY,SAAS;MAChC,MAAM;MACN,MAAM,EACJ,OAAO,iBAAiB,QAAQ,MAAM,UAAU,qBACjD;MACF,CAAC;AACF,gBAAW,OAAO;;MAGvB,CAAC;AAEF,UAAO,IAAI,SAAS,QAAQ,EAC1B,SAAS;IACP,gBAAgB;IAChB,iBAAiB;IACjB,YAAY;IACb,EACF,CAAC;WACK,OAAO;AACd,YAAO,MAAM,8BAA8B,OAAgB,EACzD,QAAQ,QAAQ,MAAM,IACvB,CAAC;AAEF,UAAO,IAAI,SACT,KAAK,UAAU;IACb,OAAO;IACP,SAAS,iBAAiB,QAAQ,MAAM,UAAU;IACnD,CAAC,EACF;IACE,QAAQ;IACR,SAAS,EAAE,gBAAgB,oBAAoB;IAChD,CACF;;;;;;;;;;;;;;;ACtQP,SAAgBC,sBAAoB,gBAAgC;AAClE,KAAI,mBAAmB,oBAAqB,QAAO;AACnD,KAAI,mBAAmB,aAAc,QAAO;AAC5C,QAAO;;;;;;AAOT,SAAgB,uBAAuB,MAAsB;CAE3D,MAAM,QAAQ,KAAK,MAAM,CAAC,MAAM,MAAM,CAAC;AACvC,QAAO,KAAK,KAAK,QAAQ,IAAI;;;;;;;;;;;;;ACP/B,eAAsB,+BACpB,UACA,YACA,SAMC;AACD,UAAO,MAAM,uCAAuC;AAEpD,KAAI,CAAC,SAAS,KACZ,OAAM,IAAI,MAAM,wBAAwB;CAG1C,MAAM,SAAS,SAAS,KAAK,WAAW;CACxC,MAAM,UAAU,IAAI,aAAa;CACjC,IAAI,SAAS;CACb,IAAIC,UAAyB,EAAE;CAC/B,IAAI,sBAAsB;CAC1B,IAAIC,iBAAgC;CACpC,IAAI,cAAc;CAClB,IAAI,uBAAuB;AAE3B,KAAI;AACF,SAAO,MAAM;GACX,MAAM,EAAE,MAAM,UAAU,MAAM,OAAO,MAAM;AAC3C,OAAI,MAAM;AACR,aAAO,MAAM,+BAA+B;AAC5C;;AAGF,aAAU,QAAQ,OAAO,OAAO,EAAE,QAAQ,MAAM,CAAC;GACjD,MAAM,QAAQ,OAAO,MAAM,KAAK;AAChC,YAAS,MAAM,KAAK,IAAI;AAExB,QAAK,MAAM,QAAQ,OAAO;IACxB,MAAM,QAAQ,uBAAuB,KAAK;AAC1C,QAAI,CAAC,MAAO;AAGZ,QAAI,MAAM,QAAQ,UAAU;AAC1B,kBAAa,YAAY,SAAS;MAAE,MAAM;MAAQ,MAAM;MAAI,CAAC;AAC7D;;AAIF,QAAI,CAAC,kBAAkB,MAAM,gBAAgB;AAC3C,sBAAiB,MAAM;AACvB,cAAO,MAAM,4BAA4B,EAAE,gBAAgB,CAAC;AAC5D,kBAAa,YAAY,SAAS;MAAE,MAAM;MAAmB,MAAM;MAAgB,CAAC;;AAItF,QAAI,CAAC,uBAAuB,MAAM,SAAS;AACzC,eAAU,0BAA0B,MAAM,SAASC,sBAAoB;AACvE,mBAAc,iBAAiB,MAAM,QAAQ;AAE7C,SAAI,QAAQ,SAAS,EACnB,cAAa,YAAY,SAAS;MAAE,MAAM;MAAW,MAAM;MAAS,CAAC;AAGvE,2BAAsB;;AAIxB,QAAI,MAAM,SAAS;AACjB,6BAAwB,MAAM;AAC9B,kBAAa,YAAY,SAAS;MAAE,MAAM;MAAS,MAAM,MAAM;MAAS,CAAC;;;;WAIvE;AACR,SAAO,aAAa;;CAItB,MAAM,iBAAiB,uBAAuB,YAAY;CAC1D,MAAM,kBAAkB,uBAAuB,qBAAqB;CAGpE,MAAMC,cAA6B;EACjC,SAAS;EACT,OAAO;EACP,QAAQ;GACN,OAAO;GACP,QAAQ;GACR,OAAO,iBAAiB;GACzB;EACD,UAAW,iBAAiB,QAAe,kBAAkB;EAC7D,4BAAW,IAAI,MAAM,EAAC,aAAa;EACpC;AAED,UAAO,KAAK,uBAAuB;EACjC,aAAa;EACb,cAAc;EACd,aAAa,YAAY,OAAO;EAChC,SAAS,YAAY;EACtB,CAAC;AAEF,QAAO;EACL;EACA;EACA;EACA;EACD;;;;;;;;;;;;;AC1GH,eAAsB,kCACpB,MACA,YACA,SAMC;AACD,UAAO,MAAM,qDAAqD;CAGlE,MAAM,YAAY;CAQlB,IAAIC,iBAAgC;AACpC,KAAI,UAAU,cAAc,gBAC1B,kBAAiB,UAAU,aAAa;UAC/B,UAAU,gBACnB,kBAAiB,UAAU;CAG7B,IAAI,aAAa;AACjB,KAAI,UAAU,cAAc,OAC1B,cAAa,UAAU,aAAa;UAC3B,UAAU,YAAY,UAAU,QACzC,cAAa,UAAU,YAAY,UAAU,WAAW;CAG1D,MAAM,UAAU,0BAA2B,UAAU,WAAW,EAAE,EAAiCC,sBAAoB;CACvH,MAAM,cAAc,iBAAkB,UAAU,WAAW,EAAE,CAAgC;AAG7F,KAAI,YAAY;EACd,MAAM,QAAQ,WAAW,MAAM,IAAI;AACnC,OAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;GACrC,MAAM,QAAQ,MAAM,IAAI,MAAM,KAAK,MAAM,MAAM;AAC/C,OAAI,MACF,cAAa,YAAY,SAAS;IAAE,MAAM;IAAS,MAAM;IAAO,CAAC;;;AAKvE,KAAI,eACF,cAAa,YAAY,SAAS;EAAE,MAAM;EAAmB,MAAM;EAAgB,CAAC;AAGtF,KAAI,QAAQ,SAAS,EACnB,cAAa,YAAY,SAAS;EAAE,MAAM;EAAW,MAAM;EAAS,CAAC;AAGvE,cAAa,YAAY,SAAS;EAAE,MAAM;EAAQ,MAAM;EAAI,CAAC;CAG7D,MAAM,iBAAiB,uBAAuB,YAAY;CAC1D,MAAM,kBAAkB,uBAAuB,WAAW;CAE1D,MAAMC,cAA6B;EACjC,SAAS;EACT,OAAO;EACP,QAAQ;GACN,OAAO;GACP,QAAQ;GACR,OAAO,iBAAiB;GACzB;EACD,UAAW,iBAAiB,QAAe,kBAAkB;EAC7D,4BAAW,IAAI,MAAM,EAAC,aAAa;EACpC;AAED,QAAO;EACL,sBAAsB;EACtB;EACA;EACA;EACD;;;;;;;;;;;;ACxEH,SAAgB,wBAAwB,QAA+B;AACrE,QAAO,eAAe,IAAI,SAAyB;AACjD,MAAI;AACF,OAAI,CAAC,MAAM,OAAO,iBAAiB,QAAQ,CACzC,QAAO,aAAa,EAAE,OAAO,kDAAkD,EAAE,EAAE,QAAQ,KAAK,CAAC;GAEnG,MAAM,SAAS,QAAQ,MAAM;AAE7B,OAAI,CAAC,QAAQ,OAAO,CAAC,OACnB,QAAO,aAAa,EAAE,OAAO,iBAAiB,EAAE,EAAE,QAAQ,KAAK,CAAC;GAGlE,MAAM,EAAE,iBAAiB,IAAI,IAAI,QAAQ,IAAI;GAC7C,MAAM,WAAW,aAAa,IAAI,SAAS,KAAK;GAChD,MAAM,iBAAiB,aAAa,IAAI,iBAAiB;GAGzD,MAAM,UAAU,MAAM,OAAO,YAAY;AAGzC,OAAI,UAAU;IACZ,MAAMC,YAAU,MAAM,iBAAiB,SAAS,QAAQ,OAAO,cAAc;AAE7E,QAAI,CAACA,UACH,QAAO,aAAa,EAAE,OAAO,yBAAyB,EAAE,EAAE,QAAQ,KAAK,CAAC;AAG1E,WAAO,aAAaA,UAAQ;;AAI9B,OAAI,CAAC,eACH,QAAO,aACL,EAAE,OAAO,6CAA6C,EACtD,EAAE,QAAQ,KAAK,CAChB;GAGH,MAAM,UAAU,MAAM,2BACpB,SACA,QACA,gBACA,OAAO,cACR;AAED,OAAI,CAAC,QACH,QAAO,aAAa,EAAE,OAAO,iCAAiC,EAAE,EAAE,QAAQ,KAAK,CAAC;AAGlF,UAAO,aAAa,QAAQ;WACrB,OAAO;AACd,YAAO,MAAM,iCAAiC,OAAgB,EAC5D,QAAQ,QAAQ,MAAM,IACvB,CAAC;AAEF,UAAO,aACL;IACE,OAAO;IACP,SAAS,iBAAiB,QAAQ,MAAM,UAAU;IACnD,EACD,EAAE,QAAQ,KAAK,CAChB;;;;;;;;;;AAWP,SAAgB,2BAA2B,QAA+B;AACxE,QAAO,eAAe,OAAO,SAAyB;AACpD,MAAI;AACF,OAAI,CAAC,MAAM,OAAO,iBAAiB,QAAQ,CACzC,QAAO,aAAa,EAAE,OAAO,kDAAkD,EAAE,EAAE,QAAQ,KAAK,CAAC;GAEnG,MAAM,SAAS,QAAQ,MAAM;AAC7B,OAAI,CAAC,QAAQ,OAAO,CAAC,OACnB,QAAO,aAAa,EAAE,OAAO,iBAAiB,EAAE,EAAE,QAAQ,KAAK,CAAC;GAGlE,MAAM,EAAE,iBAAiB,IAAI,IAAI,QAAQ,IAAI;GAC7C,MAAM,iBAAiB,aAAa,IAAI,iBAAiB;AAEzD,OAAI,CAAC,eACH,QAAO,aACL,EAAE,OAAO,yCAAyC,EAClD,EAAE,QAAQ,KAAK,CAChB;GAIH,MAAM,UAAU,MAAM,OAAO,YAAY;AAEzC,YAAO,KAAK,wBAAwB;IAAE;IAAgB;IAAQ,CAAC;GAE/D,MAAM,UAAU,MAAM,aAAa,SAAS,QAAQ,gBAAgB,OAAO,cAAc;AAEzF,OAAI,CAAC,QACH,QAAO,aACL,EAAE,OAAO,sDAAsD,EAC/D,EAAE,QAAQ,KAAK,CAChB;AAGH,YAAO,KAAK,oCAAoC;IAC9C;IACA,aAAa,QAAQ;IACrB,WAAW,QAAQ;IACpB,CAAC;AAEF,UAAO,aAAa;IAClB,SAAS;IACT,SAAS;IACT,SAAS;KACP,iBAAiB;KACjB,QAAQ;KACR,cAAc,QAAQ;KACtB,YAAY,QAAQ;KACrB;IACF,CAAC;WACK,OAAO;AACd,YAAO,MAAM,8BAA8B,OAAgB;IACzD,gBAAgB,QAAQ,MAAM,IAAI,IAAI,QAAQ,IAAI,CAAC,aAAa,IAAI,iBAAiB,GAAG;IACxF,QAAQ,QAAQ,MAAM;IACvB,CAAC;AAEF,UAAO,aACL;IACE,OAAO;IACP,SAAS,iBAAiB,QAAQ,MAAM,UAAU;IACnD,EACD,EAAE,QAAQ,KAAK,CAChB;;;;;;;;;;;;;ACtIP,SAAgB,uBAAuB,QAA8B;AACnE,QAAO,eAAe,IACpB,SACA;AACA,MAAI;AACF,OAAI,CAAC,MAAM,OAAO,iBAAiB,QAAQ,CACzC,QAAO,aAAa,EAAE,OAAO,iDAAiD,EAAE,EAAE,QAAQ,KAAK,CAAC;AAElG,OAAI,CAAC,QAAQ,OAAO,CAAC,QAAQ,KAC3B,QAAO,aAAa,EAAE,OAAO,iBAAiB,EAAE,EAAE,QAAQ,KAAK,CAAC;GAElE,MAAM,KAAK,QAAQ,aAAa;GAEhC,MAAM,iBADM,IAAI,IAAI,QAAQ,IAAI,CACL,aAAa,IAAI,aAAa;AAGzD,OAAI,CAAC,GACH,QAAO,aAAa,EAAE,OAAO,+BAA+B,EAAE,EAAE,QAAQ,KAAK,CAAC;AAIhF,OAAI,CAAC,eACH,QAAO,aACL;IACE,OAAO;IACP,aAAa,OAAO;IACrB,EACD,EAAE,QAAQ,KAAK,CAChB;AAcH,UAAO,aAPW,MAAM,eAHT,sBAAsB,OAAO,UAAU,EAGP;IAC7C,SAAS;IACT;IACA,kBAAkB,OAAO;IAC1B,CAAC,CAG4B;WACvBC,OAAgB;AACvB,YAAO,MAAM,wBAAwB,OAAgB;IACnD,SAAS,QAAQ,aAAa;IAC9B,YAAY,QAAQ,MAAM,IAAI,IAAI,QAAQ,IAAI,CAAC,aAAa,IAAI,aAAa,GAAG;IACjF,CAAC;AAGF,OAAI,iBAAiB,OAAO;AAC1B,QAAI,MAAM,QAAQ,SAAS,qBAAqB,CAC9C,QAAO,aACL;KACE,OAAO,MAAM;KACb,aAAa,OAAO;KACrB,EACD,EAAE,QAAQ,KAAK,CAChB;AAEH,QAAI,MAAM,QAAQ,SAAS,YAAY,CACrC,QAAO,aAAa,EAAE,OAAO,uBAAuB,EAAE,EAAE,QAAQ,KAAK,CAAC;;AAI1E,UAAO,aACL;IACE,OAAO;IACP,SAAS,iBAAiB,QAAQ,MAAM,UAAU;IACnD,EACD,EAAE,QAAQ,KAAK,CAChB;;;;;;;ACtFP,SAAgB,uBAAuB,QAA8B;AACnE,QAAO,eAAe,MAAM;AAC1B,MAAI;AASA,UAAO,aAAa,EAAE,SARP,OAAO,WAAW,UAAU,EAAE,EAGjB,KAAI,WAAU;IACtC,MAAM,MAAM;IACZ,MAAM,MAAM,QAAQ,MAAM;IAC7B,EAAE,EAEyC,EAAE,EAAE,QAAQ,KAAK,CAAC;WACzD,OAAO;AACZ,UAAO,aAAa,EAAE,OAAO,yBAAyB,EAAE,EAAE,QAAQ,KAAK,CAAC;;;;;;;;;;;;ACFhF,SAAgB,yBACd,QACwI;CACxI,MAAMC,YAAoJ,EAAE;AAG5J,KAAI,CAAC,OAAO,UAAU,OAAO,OAAO,WAAW,KAAK,CAAC,OAAO,UAC1D,QAAO;CAGT,MAAM,EAAE,QAAQ,WAAW,cAAc;CAGzC,MAAM,mBAAmB,OAAO,SAAQ,UAAS,MAAM,kBAAkB,IAAI,EAAE;CAC/E,MAAM,mBAAmB,MAAM,KAAK,IAAI,IAAI,iBAAiB,CAAC;CAG9D,MAAM,mBAAmB;EACvB,SAAS;EACT;EACA;EACA,QAAQ,OAAO;EACf,MAAM,OAAO;EACb,UAAU,OAAO;EAClB;AAGD,WAAU,KAAK;EACb,MAAM;EACN,QAAQ;EACR,SAAS,sBAAsB;GAC7B,gBAAgB;GAChB,kBAAkB,UAAU;GAC5B;GACA,KAAK;GACL,YAAY,UAAU;GACtB,iBAAiB,UAAU;GAC3B,mBAAmB,UAAU;GAC7B,iBAAiB,UAAU;GAC3B,yBAAyB;GACzB,4BAA4B;GAC5B,yBAAyB,UAAU;GACnC,wBAAwB,UAAU;GAClC,iBAAiB,OAAO;GACzB,CAAC;EACH,CAAC;AAEF,WAAU,KAAK;EACb,MAAM;EACN,QAAQ;EACR,SAAS,wBAAwB;GAC/B,YAAY,UAAU;GACtB,kBAAkB,UAAU;GAC7B,CAAC;EACH,CAAC;AAEF,WAAU,KAAK;EACb,MAAM;EACN,QAAQ;EACR,SAAS,2BAA2B;GAClC,YAAY,UAAU;GACtB,kBAAkB,UAAU;GAC7B,CAAC;EACH,CAAC;AAEF,WAAU,KAAK;EACb,MAAM;EACN,QAAQ;EACR,SAAS,uBAAuB;GAC9B;GACA,kBAAkB,UAAU;GAC5B;GACD,CAAC;EACH,CAAC;AAEF,WAAU,KAAK;EACb,MAAM;EACN,QAAQ;EACR,SAAS,uBAAuB;GAC9B,WAAW;GACX,kBAAkB,UAAU;GAC7B,CAAC;EACH,CAAC;AAEF,QAAO;;;;;;;;AClGT,MAAa,4BACX,kBACmB;AACnB,cAAa;AACX,MAAI;GAEF,MAAMC,cAA8C,EAAE;AACtD,QAAK,MAAM,CAAC,MAAM,iBAAiB,OAAO,QACxC,cAAc,eAAe,EAAE,CAChC,CACC,KAAI,MAAM,QAAQ,aAAa,EAAE;IAE/B,MAAM,qBAAqB,aAAa,MAAM,WAAW,OAAO,QAAQ;AACxE,QAAI,oBAAoB;KAEtB,IAAIC,SAA+D,EAAE;AACrE,cAAS,mBAAmB;KAC5B,MAAM,cAAc,OAAO,QAAO,MAAK,EAAE,MAAM,CAAC,KAAI,MAAK,EAAE,KAAK;KAChE,MAAM,eAAe,OAAO,QAAO,MAAK,EAAE,UAAU,MAAM,CAAC,KAAI,MAAK,EAAE,KAAK;AAO3E,iBAAY,KAAK;MACf;MACA,aACE,mBAAmB,eACnB,KAAK,OAAO,EAAE,CAAC,aAAa,GAAG,KAAK,MAAM,EAAE;MAC9C;MACA;MACD,CAAC;;;AAKR,UAAO,SAAS,KAAK;IACnB,aAAa;IACb;IACD,CAAC;WACK,QAAQ;AAEf,UAAO,SAAS,KACd,EAAE,OAAO,6BAA6B,EACtC,EAAE,QAAQ,KAAK,CAChB;;;;;;;AC7CP,IAAM,cAAN,MAA+B;CAC7B,AAAQ,wBAAQ,IAAI,KAA4B;CAChD,AAAiB;CACjB,AAAiB;CAEjB,YAAY,UAAwB,EAAE,EAAE;AACtC,OAAK,aAAa,QAAQ,OAAO,MAAS;AAC1C,OAAK,UAAU,QAAQ,WAAW;;;;;CAMpC,AAAQ,YAAY,OAAe,YAAqB,QAAsC;EAC5F,MAAM,UAAU,GAAG,cAAc,YAAY,GAAG;AAChD,MAAI,OAKF,QAAO,GAAG,QAAQ,GAJG,OAAO,KAAK,OAAO,CACrC,MAAM,CACN,KAAI,QAAO,GAAG,IAAI,GAAG,OAAO,OAAO,CACnC,KAAK,IAAI;AAGd,SAAO;;;;;CAMT,UAAgB;EACd,MAAM,MAAM,KAAK,KAAK;AACtB,OAAK,MAAM,CAAC,KAAK,UAAU,KAAK,MAAM,SAAS,CAC7C,KAAI,MAAM,MAAM,YAAY,MAAM,IAChC,MAAK,MAAM,OAAO,IAAI;;;;;CAQ5B,MAAM,SAAwB;AAC5B,MAAI,CAAC,SAAS;AACZ,QAAK,MAAM,OAAO;AAClB;;AAGF,OAAK,MAAM,OAAO,KAAK,MAAM,MAAM,CACjC,KAAI,IAAI,SAAS,QAAQ,CACvB,MAAK,MAAM,OAAO,IAAI;;;;;CAQ5B,IAAI,OAAe,YAAqB,QAAwC;EAC9E,MAAM,MAAM,KAAK,YAAY,OAAO,cAAc,IAAI,OAAO;EAC7D,MAAM,QAAQ,KAAK,MAAM,IAAI,IAAI;AAEjC,MAAI,CAAC,MACH,QAAO;AAIT,MAAI,KAAK,KAAK,GAAG,MAAM,YAAY,MAAM,KAAK;AAC5C,QAAK,MAAM,OAAO,IAAI;AACtB,UAAO;;AAGT,SAAO,MAAM;;;;;CAMf,WAAgE;AAC9D,SAAO;GACL,SAAS,KAAK;GACd,MAAM,KAAK,MAAM;GAClB;;;;;CAMH,IAAI,OAAe,YAAqB,QAAuC;AAC7E,SAAO,KAAK,IAAI,OAAO,YAAY,OAAO,KAAK;;;;;CAMjD,IACE,OACA,MACA,YACA,QACA,KACM;EACN,MAAM,MAAM,KAAK,YAAY,OAAO,cAAc,IAAI,OAAO;AAG7D,MAAI,KAAK,MAAM,QAAQ,KAAK,SAAS;GACnC,MAAM,YAAY,KAAK,MAAM,MAAM,CAAC,MAAM,CAAC;AAC3C,OAAI,UACF,MAAK,MAAM,OAAO,UAAU;;AAIhC,OAAK,MAAM,IAAI,KAAK;GAClB;GACA,WAAW,KAAK,KAAK;GACrB,KAAK,OAAO,KAAK;GAClB,CAAC;;;AAKN,MAAa,cAAc,IAAI,YAAY;CACzC,SAAS;CACT,KAAK,MAAS;CACf,CAAC;AAGF,kBAAkB;AAChB,aAAY,SAAS;GACpB,MAAU,IAAK;;;;;;;;;;;;AC9HlB,MAAa,YAAY;AACzB,MAAa,eAAe;AAC5B,MAAa,mBAAmB;AAChC,MAAa,gBAAgB;;;;AAK7B,MAAa,wBAAwB,CAAC,SAAS,UAAU;;;;AAKzD,MAAa,4BAA4B;;;;AAKzC,MAAa,gCAAgC;;;;AAK7C,MAAa,oBAAoB;;;;;;;ACxBjC,MAAa,6CACX,SACA,gBACA,WAC2B;AAC3B,QAAO;EACL,YAAY;EACZ,aAAa,QAAQ,eAAe;EACpC,MAAM;EACN,OAAO,QAAQ;EACf,MAAM,QAAQ,MAAM,KAAK,SAAoB;GACzC,GAAG;GACH,YAAY;GACZ,aAAa,QAAQ,eAAe;GACpC,MAAM;GACN,UAAW,IAAI,YAAY,EAAE;GAC9B,EAAE,IAAI,EAAE;EACZ;;;;;AAMH,MAAa,6BACX,SACA,YAKyB;CACzB,MAAM,EAAE,MAAM,UAAU,UAAU;CAElC,MAAM,eAAe,QAAQ,SAAS,WAAW,OAAO,QAAQ,EAAE,CAAC;CACnE,MAAM,aAAa,QAAQ,QACxB,KAAK,WAAW,OAAO,OAAO,SAAS,IACxC,EACD;AAGD,cAAa,MAAM,GAAG,OAAO,EAAE,cAAc,MAAM,EAAE,cAAc,GAAG;AAkBtE,QAhB2C;EACzC,aAAa,QAAQ,KAAK,OAAO;GAC/B,YAAY,EAAE;GACd,aAAa,EAAE;GACf,OAAO,EAAE;GACT,OAAO,EAAE,SAAS;GAClB,MAAM,EAAE;GACT,EAAE;EACH,OAAO;EACP,MAAM,aAAa,MAAM,GAAG,SAAS;EACrC;EACA,gBAAgB;GAAE;GAAU;GAAO;EACnC,eAAe;EACf,gBAAgB;EACjB;;;;;;;;ACpDH,MAAa,gCACX,OACA,YAO4B;CAC5B,MAAM,EACJ,MACA,UACA,eAAe,uBACf,SACA,mBACE;AAeJ,QAbwC;EACtC,uBAAuB,aAAa,KAAK,IAAI;EAC7C,WAAW;EACX;EACA;EACA,GAAG;EACH,UAAU,aAAa,KAAK,IAAI;EAChC,mBAAmB;EACnB,uBAAuB;EACP;EACP;EACV;;;;;;;;AC/BH,MAAa,8BAA8B,OACzC,iBACA,gBACA,QACA,YASoC;AACpC,KAAI;EACF,MAAMC,eAMF;GACF,MAAM,QAAQ;GACd,UAAU,QAAQ;GACnB;AAGD,MAAI,QAAQ,aACV,cAAa,eAAe,QAAQ;WAC3B,QAAQ;GACf,IAAIC,SAA6D,EAAE;AACnE,YAAS,OAAO;GAGhB,MAAM,eAAe,OAChB,QAAO,MACJ,EAAE,UAAU,UACX,EAAE,SAAS,YAAY,EAAE,SAAS,YACtC,CACA,KAAI,MAAK,EAAE,KAAK;AACrB,OAAI,aAAa,SAAS,EACtB,cAAa,eAAe;;AAIpC,MAAI,QAAQ,QACV,cAAa,UAAU,QAAQ;AAGjC,MAAI,QAAQ,eACV,cAAa,iBAAiB,QAAQ;EAGxC,MAAM,mBAAmB,6BACvB,QAAQ,OACR,aACD;AAID,MAAI,CAAC,QAAQ,gBACX,KAAI;AAOF,SALyB,MAAM,gBAC5B,YAAY,eAAe,CAC3B,UAAU,EAEuB,QAAQ,KAAI,MAAK,EAAE,KAAK,IAAI,EAAE,EACnD,SAAS,WAAW,CAEjC,kBAAiB,YAAY;WAGxBC,aAAsB;AAUjC,SAAO,0CALS,MAAM,gBACnB,YAAY,eAAe,CAC3B,WAAW,CACX,OAAO,iBAAiB,EAIzB,gBACA,OACD;UACM,OAAO;AACd,SAAO;GACL,YAAY;GACZ,aAAa,QAAQ,eAAe;GACpC,OAAO,iBAAiB,QAAQ,MAAM,UAAU;GAChD,OAAO;GACP,MAAM,EAAE;GACR,MAAM;GACP;;;;;;ACjGL,MAAa,0CAA0C,OACrD,iBACA,oBACA,OACA,YACkC;AAClC,UAAO,KAAK,kDAAkD;EAC5D;EACA,aAAa,mBAAmB,KAAK,CAAC,UAAU,KAAK;EACtD,CAAC;CAGF,MAAM,uBAAuB,QAAQ,WACjC,QAAQ,SAAS,MAAM,IAAI,CAAC,KAAI,MAAK,EAAE,MAAM,CAAC,GAC9C;CAEJ,MAAM,iBAAiB,mBAAmB,IACxC,OAAO,CAAC,gBAAgB,YAAY;AAClC,MAAI;AA+BF,UA9Be,MAAM,4BACnB,iBACA,gBACA,QACA;IACE;IACA,MAAM,QAAQ;IACd,UAAU,QAAQ;IAClB,GAAI,uBACA,EAAE,cAAc,sBAAsB,UAC/B;AAEL,SAAI,CAAC,OAAQ,QAAO,EAAE;KACtB,IAAIC,SAA6D,EAAE;AACnE,cAAS,OAAO;KAGhB,MAAM,eAAe,OAChB,QAAO,MACJ,EAAE,UAAU,UACX,EAAE,SAAS,YAAY,EAAE,SAAS,YACtC,CACA,KAAI,MAAK,EAAE,KAAK;AACrB,YAAO,aAAa,SAAS,IAAI,EAAE,cAAc,GAAG,EAAE;QACpD;IAER,GAAI,QAAQ,WAAW,EAAE,SAAS,QAAQ,SAAS;IACnD,GAAI,QAAQ,kBAAkB,EAAE,gBAAgB,QAAQ,gBAAgB;IACzE,CACF;WAEM,OAAO;AACd,YAAO,MAAM,8BAA8B,OAAgB;IACzD,YAAY;IACZ;IACD,CAAC;AACF,SAAM;;GAGX;CAGD,MAAM,iBAAiB,0BADP,MAAM,QAAQ,IAAI,eAAe,EACS;EACxD,MAAM,QAAQ;EACd,UAAU,QAAQ;EAClB;EACD,CAAC;AAEF,aAAY,IAAI,OAAO,gBAAgB,aAAa,QAAQ;AAC5D,QAAO;;;;;;;;AC7BT,MAAa,8BACX,oBACA,oBACA,YACyB;CACzB,MAAM,EACJ,WAAW,kBACX,OAAO,cACP,GACA,OACA,WACE;CAyCJ,MAAMC,WAvCa,mBAAmB,SAAS,KAAK,QAAmC,UAA2C;AAChI,MAAI,CAAC,mBAAmB,OACtB,QAAO;EAET,MAAM,CAAC,gBAAgB,UAAU,mBAAmB;AAEpD,SAAO;GACL,YAAY;GACZ,aAAa,QAAQ,eAAe;GACpC,MAAM;GACN,OAAO,OAAO,SAAS;GACvB,OAAO,OAAO,SAAS;GACvB,MACE,OAAO,MAAM,KAAK,QAAmB;IACnC,MAAM,MAAM,IAAI,YAAY,EAAE;IAC9B,MAAM,OAAO,IAAI,aACb,OAAO,IAAI,WAAW,CAAC,UAAU,GAAG,IAAI,GAAG,QAC3C,IAAI,cACF,OAAO,IAAI,YAAY,CAAC,UAAU,GAAG,IAAI,GAAG,QAC5C,IAAI;AAEV,WAAO;KACL,GAAG;KACH,YAAY;KACZ,aAAa,QAAQ,eAAe;KACpC,MAAM;KACN,UAAU;MACR,GAAG;MACH;MAEA,GAAI,IAAI,aAAa,EAAE,YAAY,IAAI,YAAY,GAAG,EAAE;MACzD;KACD,iBAAiB,IAAI;KACrB,YAAY,IAAI;KACjB;KACD,IAAI,EAAE;GACX;GACD,IAAI,EAAE,EAEuC,QAAQ,MAAsD,MAAM,KAAK;CAGxH,MAAM,eAAe,QAAQ,SAAS,WAAW,OAAO,KAAK;CAC7D,MAAM,aAAa,QAAQ,QACxB,KAAK,WAAW,MAAM,OAAO,OAC9B,EACD;AAGD,cAAa,MAAM,GAAG,MAAM;AAG1B,UAFkB,EAAE,mBAAmB,aACrB,EAAE,mBAAmB;GAEvC;AAuBF,QArB2C;EACzC,aAAa,QAAQ,KAAK,OAAyB;GACjD,YAAY,EAAE;GACd,aAAa,EAAE;GACf,OAAO,EAAE;GACT,OAAO,EAAE,SAAS;GAClB,MAAM,EAAE;GACT,EAAE;EACH,OAAO;EACP,MAAM,aAAa,MAAM,GAAG,SAAS;EACrC;EACA,gBAAgB;GACX;GACH;GACA,OAAO,SAAS;GAChB,QAAQ,SAAS,aAAa;GAC/B;EACD,eAAe;EACf,gBAAgB;EACjB;;;;;;;;AC1HH,MAAa,2BACX,cACA,YAC4B;CAC5B,MAAM,EACJ,OACA,IAAI,WACJ,SAAS,OACT,QAAQ,eACR,OAAO,cACP,WAAW,kBACX,WACA,SACA,iBACE;CAEJ,MAAMC,eAAwC;EAC5C,GAAG;EACH,cAAc,eAAe,aAAa,KAAK,IAAI,CAAC,OAAO,EAAE;EAC7D;EACA;EACA,gBAAgB;EACjB;AAGD,KAAI,UAAU,OAAO;AACnB,eAAa,IAAI;AACjB,eAAa,WAAW,cAAc,KAAK,IAAI,IAAI,sBAAsB,KAAK,IAAI;AAClF,eAAa,eAAe,eAAe,aAAa,KAAK,IAAI,CAAC,OAAO,EAAE,UAAU,MAAM;;AAI7F,KAAI,UACF,cAAa,YAAY;AAI3B,KAAI,QACF,cAAa,UAAU;AAGzB,QAAO;;;;;;;;AC9CT,MAAa,0CACX,cACA,oBACA,YACmC;CACnC,MAAM,EACJ,OACA,GACA,QACA,OACA,MACA,UACA,WACA,YACE;AAEJ,QAAO,mBAAmB,KAAK,CAAC,gBAAgB,YAAY;EAE1D,IAAIC;AACJ,MAAI,QAAQ;GACR,IAAIC,SAA6D,EAAE;AACnE,YAAS,OAAO;GAGhB,MAAM,YAAY,OACb,QAAO,MACJ,EAAE,UAAU,UACX,EAAE,SAAS,YAAY,EAAE,SAAS,YACtC,CACA,KAAI,MAAK,EAAE,KAAK;AACrB,OAAI,UAAU,SAAS,EACnB,gBAAe;;AAqBvB,SAAO;GACL,YAAY;GACZ,GAjB6B,wBAAwB,cAAc;IACnE,GAAI,UAAU,UAAa,EAAE,OAAO;IACpC,GAAI,MAAM,UAAa,EAAE,GAAG;IAC5B,GAAI,WAAW,UAAa,EAAE,QAAQ;IACtC,GAAI,UAAU,UAAa,EAAE,OAAO;IACpC,GAAI,SAAS,UAAa,EAAE,MAAM;IAClC,GAAI,aAAa,UAAa,EAAE,UAAU;IAE1C,GAAI,YAAY,UAAa,EAAE,SAAS;IACxC,GAAI,iBAAiB,UAAa,EAClB,cACf;IACF,CAAC;GAMA,YAAY;GACb;GACD;;;;;;;;AC1DJ,MAAa,sBAAsB,OACjC,OACA,QACA,oBAC6B;AAE7B,KAAI,UAAU,MAAM,QAAQ,OAAO,IAAI,OAAO,SAAS,EACrD,QAAO;AAIT,KAAI,OAAO;EACT,MAAM,eAAe,MAAM,kBAAkB,OAAO,gBAAgB;AACpE,MAAI,CAAC,gBAAgB,aAAa,WAAW,EAC3C,QAAO;AAET,SAAO;;AAGT,QAAO;;;;;ACbT,IAAa,gBAAb,MAA2B;CACzB,YACE,AAAQC,iBACR,AAAQC,eACR;EAFQ;EACA;;CAGV,MAAM,cACJ,OACA,mBACA,SAC+B;EAE/B,MAAM,WAAW,UAAU,MAAM,GAAG,KAAK,UAAU,QAAQ,CAAC,GAAG,kBAAkB,KAAI,MAAK,EAAE,GAAG,CAAC,KAAK,IAAI;EACzG,MAAM,eAAe,YAAY,IAAI,OAAO,UAAU,QAAQ;AAC9D,MAAI,aAAc,QAAO;AAKzB,OAHmB,QAAQ,QAAQ,gBAGhB,SAChB,QAAO,KAAK,yBAAyB,OAAO,mBAAmB,QAAQ;EAI1E,MAAM,eAAe,MAAM,oBACzB,OACA,QACA,KAAK,cAAc,SAAS,UAC7B;AAED,MAAI,CAAC,aAEH,QAAO,KAAK,yBAAyB,OAAO,mBAAmB,QAAQ;AAGzE,MAAI;GAEA,MAAM,UAAU,MAAM,KAAK,oBAAoB,OAAO,cAAc,mBAAmB,QAAQ;AAC/F,eAAY,IAAI,OAAO,SAAS,UAAU,QAAQ;AAClD,UAAO;WACF,OAAO;AACZ,YAAO,MAAM,qDAAqD,MAAe;AACjF,UAAO,KAAK,yBAAyB,OAAO,mBAAmB,QAAQ;;;CAI7E,MAAc,yBACZ,OACA,mBACA,SAC+B;AAC7B,SAAO,wCACL,KAAK,iBACL,mBACA,OACA,QACD;;CAGL,MAAc,oBACZ,OACA,cACA,mBACA,SAC+B;EAC7B,MAAM,WAAW,uCACf,cACA,mBACA;GACE;GACA,GAAG,KAAK,IAAI,IAAI,UAAU;GAC1B,QAAQ;GACR,OAAO;GACP,MAAM,QAAQ;GACd,UAAU,QAAQ;GAClB,GAAI,QAAQ,YAAY,UAAa,EAAE,SAAS,QAAQ,SAAS;GAClE,CACF;AAED,MAAI,SAAS,WAAW,EACpB,QAAO;GACH,aAAa,EAAE;GACf,OAAO;GACP,MAAM,EAAE;GACR,MAAM,QAAQ;GACd,gBAAgB;IACZ,UAAU,QAAQ;IACX;IACV;GACD,eAAe;GACf,gBAAgB;GACnB;AAKL,SAAO,2BAFoB,MAAM,KAAK,gBAAgB,YAAY,QAAQ,EAAE,UAAU,CAAC,EAIrF,mBACA;GACE,UAAU,QAAQ;GAClB,MAAM,QAAQ;GACd,GAAG;GACH;GACD,CACF;;;;;;;;;AChHP,SAAS,oBAAoB,gBAAgC;AAC3D,KAAI,eAAe,SAAS,UAAU,CAAE,QAAO;AAC/C,KAAI,eAAe,SAAS,OAAO,CAAE,QAAO;AAC5C,QAAO;;;;;AAiBT,SAAgB,wBAAwB,MAAiE;AACvG,KAAI,CAAC,KAAK,KACR,QAAO,EAAE,WAAW,EAAE,EAAE;AAiB1B,QAAO,EAAE,WAdS,KAAK,KAAK,KAAK,QAAmB;EAClD,MAAM,MAAM,IAAI,YAAY,EAAE;EAC9B,MAAM,kBAAkB,IAAI,cAAc,IAAI;EAC9C,MAAM,aAAa,OAAO,oBAAoB,WAAW,kBAAkB;AAE3E,SAAO;GACL,IAAI,OAAO,IAAI,MAAM,GAAG;GACxB,OAAO,OAAO,IAAI,SAAS,aAAa;GACxC,MAAM,OAAO,IAAI,QAAQ,GAAG;GAC5B,MAAM,oBAAoB,WAAW;GACzB;GACb;GACD,EAEkB;;;;;;;;;;;;;;;;AC/BtB,MAAa,8BACX,gBACA,gBACW;AACX,QAAO,YAAY,aAAa;;;;;ACdlC,IAAa,2BAAb,MAAsC;CACpC,AAAQ;CAER,YAAY,AAAQC,eAAoC;EAApC;AAClB,OAAK,oBAAoB,KAAK,qBAAqB,cAAc;;CAGnE,AAAQ,qBACN,eACU;EACV,MAAM,oBAAoB,cAAc,SAAS,QAAQ,UAAU,UAAU,EAAE;EAC/E,MAAMC,oCAAiC,IAAI,KAAK;EAChD,MAAMC,gCAA6B,IAAI,KAAK;AAE5C,OAAK,MAAM,CAAC,gBAAgB,iBAAiB,OAAO,QAClD,cAAc,eAAe,EAAE,CAChC,CACC,KAAI,MAAM,QAAQ,aAAa,CAC7B,MAAK,MAAM,eAAe,cAAc;AACtC,OAAI,CAAC,YAAY,QAAS;GAE1B,MAAM,YAAY,2BAA2B,gBAAgB,YAAY;AACzE,iBAAc,IAAI,UAAU;AAG5B,OAAI,kBAAkB,WAAW,GAAG;AAClC,sBAAkB,IAAI,UAAU;AAChC;;AAKF,OAAI,kBAAkB,SAAS,UAAU,CACvC,mBAAkB,IAAI,UAAU;;AAMxC,SAAO,MAAM,KAAK,kBAAkB;;;;;;;CAStC,oBACE,oBACA,sBACU;AAEV,MAAI,CAAC,oBAAoB;AACvB,OAAI,wBAAwB,qBAAqB,SAAS,EAExD,QAAO,qBAAqB,QAAQ,MAClC,KAAK,kBAAkB,SAAS,EAAE,CACnC;AAGH,UAAO,KAAK;;EAGd,MAAMC,eAAyB,EAAE;EACjC,MAAM,eACJ,KAAK,cAAc,cAAc,uBAAuB,EAAE;AAE5D,MAAI,MAAM,QAAQ,aAAa,EAC7B;QAAK,MAAM,UAAU,aACnB,KAAI,OAAO,SAAS;IAClB,MAAM,YAAY,2BAChB,oBACA,OACD;AACD,QAAI,KAAK,kBAAkB,SAAS,UAAU,CAC5C,cAAa,KAAK,UAAU;;;AAMpC,SAAO;;;;;;ACjFX,IAAa,qBAAb,MAAgC;CAC9B,YAAY,AAAQC,eAAoC;EAApC;;;;;;CAMpB,mBACE,kBAC8B;EAC9B,MAAMC,gBAA8C,EAAE;AAGtD,OAAK,MAAM,CAAC,MAAM,YAAY,OAAO,QACnC,KAAK,cAAc,eAAe,EAAE,CACrC,EAAE;AACD,OAAI,CAAC,MAAM,QAAQ,QAAQ,CAAE;AAE7B,QAAK,MAAM,UAAU,SAAS;AAC5B,QAAI,CAAC,OAAO,QAAS;IAErB,MAAM,YAAY,2BAA2B,MAAM,OAAO;AAG1D,QAAI,iBAAiB,SAAS,UAAU,CACtC,eAAc,KAAK,CAAC,WAAW,OAAO,CAAC;;;AAK7C,SAAO;;;;;;;;;;;;AClBX,SAAgB,oBAAoB,QAA0B;AAC5D,QAAO,OAAO,KAAK,OAAO,UAAU,GAAG,QAAQ,EAAE,IAAI,QAAQ,CAAC,KAAK,KAAK;;;;;AAM1E,MAAM,qBAAqB,EAAE,OAAO;CAClC,QAAQ,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,UAAU;CACtC,SAAS,EAAE,OAAO,EAAE,QAAQ,EAAE,EAAE,KAAK,CAAC,CAAC,UAAU;CACjD,kBAAkB,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,UAAU;CAChD,WAAW,EAAE,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,IAAI,EAAE,CAAC,UAAU,CAAC,QAAQ,EAAE;CAC/D,MAAM,EAAE,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,UAAU,CAAC,QAAQ,EAAE;CACnD,UAAU,EAAE,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,IAAI,IAAI,CAAC,UAAU,CAAC,QAAQ,GAAG;CACjE,GAAG,EAAE,QAAQ,CAAC,IAAI,GAAG,oCAAkC;CACvD,mBAAmB,EAAE,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,IAAI,IAAI,CAAC,UAAU,CAAC,QAAQ,GAAG;CAC1E,SAAS,EAAE,QAAQ,CAAC,UAAU;CAC9B,uBAAuB,EAAE,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,UAAU,CAAC,QAAQ,EAAE;CACrE,CAAC;;;;AAOF,SAAgB,qBAAqB,QAAmC;AACtE,KAAI;AAEF,SAAO;GACL,MAFsB,mBAAmB,MAAM,OAAO;GAGtD,SAAS;GACV;UACM,OAAO;AACd,MAAI,iBAAiB,EAAE,SAMrB,QAAO;GACL,QANa,MAAM,OAAO,KAAK,QAAQ;AAEvC,WAAO,GADM,IAAI,KAAK,SAAS,IAAI,GAAG,IAAI,KAAK,KAAK,IAAI,CAAC,MAAM,KAC9C,IAAI;KACrB;GAIA,SAAS;GACV;AAGH,SAAO;GACL,QAAQ,CAAC,mCAAmC;GAC5C,SAAS;GACV;;;;;;;;;AC3DL,MAAa,yBACX,YAC0D;CAC1D,IAAIC;CACJ,IAAIC;AAEJ,KAAI,QAAQ,OAAO,OAAO,QAAQ,QAAQ,UAAU;EAElD,MAAM,YADM,IAAI,IAAI,QAAQ,IAAI,CACV,SAAS,MAAM,IAAI;EACzC,MAAM,cAAc,UAAU,QAAQ,SAAS;AAC/C,MAAI,gBAAgB,MAAM,UAAU,cAAc,IAAI;AACpD,oBAAiB,UAAU,cAAc,MAAM;AAC/C,uBAAoB,OAAO,eAAe;SACrC;AACL,oBAAiB;AACjB,uBAAoB;;QAEjB;EAGL,MAAM,sBADS,QAAQ,aACa;AACpC,mBAAiB,OAAO,uBAAuB,GAAG;AAClD,sBAAoB;;AAGtB,QAAO;EAAE;EAAgB;EAAmB;;;;;;;;AC3B9C,MAAa,uBACX,UAYG;CACH,MAAM,IAAI,OAAO,OAAO,KAAK,GAAG;CAChC,MAAM,YAAY,OAAO;CACzB,MAAM,eAAe,OAAO;CAC5B,MAAM,OAAO,YAAY,SAAS,OAAO,UAAU,EAAE,GAAG,GAAG;CAC3D,MAAM,WAAW,eAAe,SAAS,OAAO,aAAa,EAAE,GAAG,GAAG;CACrE,MAAM,UAAU,OAAO;CACvB,MAAM,OAAO,OAAO;CAGpB,MAAM,kBAAkB,OAAO;CAC/B,MAAMC,cAAoC,kBACtC,MAAM,QAAQ,gBAAgB,GAC5B,gBAAgB,KAAI,MAAK,OAAO,EAAE,CAAC,GACnC,CAAC,OAAO,gBAAgB,CAAC,GAC3B;CAEJ,MAAM,iBAAiB,OAAO;CAC9B,MAAM,WAAW,OAAO;CACxB,MAAM,cAAc,OAAO;CAC3B,MAAM,SAAS,gBAAgB,UAAU,gBAAgB,QAAQ,gBAAgB;CAEjF,MAAMC,SAAmB,EAAE;AAG3B,KAAI,MAAM,KAAK,IAAI,OAAO,EACxB,QAAO,KAAK,yBAAyB;AAEvC,KAAI,MAAM,SAAS,IAAI,WAAW,KAAK,WAAW,IAChD,QAAO,KAAK,6BAA6B;CAG3C,MAAMC,SAWF;EACF;EACA;EACA;EACD;AAED,KAAI,QACF,QAAO,UAAU;AAGnB,KAAI,KACF,QAAO,OAAO;AAGhB,KAAI,eAAe,YAAY,SAAS,EACtC,QAAO,cAAc;AAGvB,KAAI,eACF,QAAO,iBAAiB;AAG1B,KAAI,SACF,QAAO,WAAW;AAGpB,KAAI,OACF,QAAO,SAAS;AAGlB,KAAI,OAAO,SAAS,EAClB,QAAO,SAAS;AAGlB,QAAO;;;;;;;;ACtET,SAAgB,sBAAsB,SAA2C;CAC/E,MAAM,EAAE,UAAU;CAClB,MAAM,EAAE,gBAAgB,sBAAsB,sBAAsB,QAAQ;CAC5E,MAAM,eAAe,oBAAoB,MAAiC;AAG1E,KAAI,aAAa,UAAU,aAAa,OAAO,SAAS,EACtD,QAAO;EACL,SAAS;EACT,OAAO,SAAS,KAAK,EAAE,OAAO,aAAa,OAAO,IAAI,EAAE,EAAE,QAAQ,KAAK,CAAC;EACzE;CAIH,MAAM,aAAa,qBAAqB;EACtC,MAAM,aAAa;EACnB,UAAU,aAAa;EACvB,GAAG,aAAa;EAChB,SAAS,aAAa;EACvB,CAAC;AAEF,KAAI,CAAC,WAAW,QACd,QAAO;EACL,SAAS;EACT,OAAO,SAAS,KACd;GACE,SAAS,oBAAoB,WAAW,UAAU,EAAE,CAAC;GACrD,OAAO;GACR,EACD,EAAE,QAAQ,KAAK,CAChB;EACF;AAGH,QAAO;EAAE,SAAS;EAAM;EAAgB;EAAmB;EAAc;;;;;;;;AC3C3E,MAAa,uBACX,iBACA,kBACmB;CACnB,MAAM,gBAAgB,IAAI,cAAc,iBAAiB,cAAc;CACvE,MAAM,iBAAiB,IAAI,yBAAyB,cAAc;CAClE,MAAM,eAAe,IAAI,mBAAmB,cAAc;AAE1D,QAAO,OAAO,YAA4B;AACxC,MAAI;GAEF,MAAM,YAAY,sBAAsB,QAAQ;AAChD,OAAI,CAAC,UAAU,QAAS,QAAO,UAAU;GAEzC,MAAM,EAAE,gBAAgB,iBAAiB;GAGzC,MAAM,oBAAoB,eAAe,oBACvC,gBACA,aAAa,YACd;AAGD,OAAI,kBAAkB,WAAW,GAAG;AAIlC,QAHsB,CAAC,kBACqB,aAAa,eAAe,aAAa,YAAY,SAAS,EAGrG,QAAO,SAAS,KAAK,EAAE,OAAO,iDAAiD,EAAE,EAAE,QAAQ,KAAK,CAAC;AAEtG,WAAO,SAAS,KAAK,EAAE,OAAO,yCAAyC,EAAE,EAAE,QAAQ,KAAK,CAAC;;AAG3F,OAAI,CAAC,aAAa,KAAK,aAAa,EAAE,MAAM,KAAK,GAC9C,QAAO,SAAS,KAAK,EAAE,OAAO,qCAAmC,EAAE,EAAE,QAAQ,KAAK,CAAC;GAItF,MAAM,gBAAgB,aAAa,mBAAmB,kBAAkB;GAGxE,MAAM,eAAe,MAAM,cAAc,cACvC,aAAa,GACb,eACA;IACI,SAAS,EAAE;IACX,MAAM,aAAa;IACnB,UAAU,aAAa;IACvB,SAAS,aAAa;IACtB,MAAM,aAAa;IACnB,gBAAgB,aAAa;IAC7B,UAAU,aAAa;IAC1B,CACF;AAGD,OAAI,aAAa,OACf,QAAO,SAAS,KAAK,wBAAwB,aAAa,CAAC;AAG7D,UAAO,SAAS,KAAK,aAAa;WAE3B,OAAO;AACd,UAAO,SAAS,KACd;IACE,SAAS,iBAAiB,QAAQ,MAAM,UAAU;IAClD,OAAO;IACR,EACD,EAAE,QAAQ,KAAK,CAChB;;;;;;;AC1EP,MAAa,yBACX,iBACA,kBACG;AACH,QAAO;EACL;GACE,SAAS,yBAAyB,cAAc;GAChD,QAAQ;GACR,MAAM;GACP;EACD;GACE,SAAS,oBAAoB,iBAAiB,cAAc;GAC5D,QAAQ;GACR,MAAM;GACP;EACD;GACE,SAAS,oBAAoB,iBAAiB,cAAc;GAC5D,QAAQ;GACR,MAAM;GACP;EACF;;;;;;;;;;;;ACjBH,MAAaC,iCAA+B;;;;AAS5C,MAAa,8BAA8B;;;;AAK3C,MAAa,uBAAuB;;;;AASpC,MAAa,uBAAuB,MAAS;;;;AAS7C,MAAa,yBAAyB;;;;AAKtC,MAAa,4BAA4B;;;;AAKzC,MAAa,0BAA0B;;;;AAKvC,MAAa,wBAAwB;;;;;;;ACxCrC,MAAM,sBAAsB;CAC1B;EAAE,MAAM;EAAM,MAAM;EAAmB;CACvC;EAAE,MAAM;EAAQ,MAAM;EAAmB;CACzC;EAAE,MAAM;EAAa,MAAM;EAAkB;CAC7C;EAAE,MAAM;EAAa,MAAM;EAAkB;CAC9C;;;;;;AAOD,MAAM,qBACJ,WAAoB,MACpB,aAAqBC,oCACjB;CACJ,MAAM;CACN,MAAM;CACN,SAAS;CACT,GAAI,YAAY,EAAE,UAAU,MAAM;CACnC;;;;AAKD,MAAM,4BAA4B,WAAmD;AACjF,QAAO,OAAO,KAAI,WAAU;EACxB,MAAM,MAAM;EACZ,MAAM,MAAM,SAAS,SAAS,WAAW,MAAM;EAC/C,OAAO,MAAM;EACb,OAAO,MAAM;EACb,UAAU,MAAM;EACnB,EAAE;;;;;AAMP,MAAM,uBAAuB;CAC3B;EAAE,MAAM;EAAiB,MAAM;EAAmB,OAAO;EAAM;CAC/D;EAAE,MAAM;EAAe,MAAM;EAAkB;CAC/C;EAAE,MAAM;EAAc,MAAM;EAAmB;CAC/C;EAAE,MAAM;EAAY,MAAM;EAAiB;CAC3C;EAAE,MAAM;EAAW,MAAM;EAAqB,OAAO;EAAM,UAAU;EAAM;CAC5E;;;;AAKD,MAAa,4BACX,gBACA,aACA,sBAA8BA,mCAC3B;CACH,MAAM,SAAS,YAAY,SAAS,yBAAyB,YAAY,OAAO,GAAG,EAAE;CAGrF,MAAM,iBAAiB,IAAI,IAAI,CAC7B,GAAG,OAAO,KAAI,MAAK,EAAE,KAAK,EAC1B,GAAG,gBAAgB,CAAC,KAAI,MAAK,EAAE,KAAK,CACrC,CAAC;AAKF,QAAO;EACL,MAAM;EACN,QAAQ;GACN,GALe,eAAe,CAAC,QAAO,MAAK,CAAC,eAAe,IAAI,EAAE,KAAK,CAAC;GAMvE,GAAG,gBAAgB;GACnB,GAAG;GACH,kBAAkB,OAAO,oBAAoB;GAC9C;EACF;;;;;AAMH,MAAa,mCACX,gBACA,aACA,sBAA8BA,mCAC3B;CACH,MAAM,eAAe,yBAAyB,YAAY,OAAO;CAGjE,MAAM,iBAAiB,IAAI,IAAI,aAAa,KAAI,MAAK,EAAE,KAAK,CAAC;AAK7D,QAAO;EACL,MAAM;EACN,QAAQ;GACN,GALe,eAAe,CAAC,QAAO,MAAK,CAAC,eAAe,IAAI,EAAE,KAAK,CAAC;GAMvE,GAAG;GAEH,kBAAkB,MAAM,oBAAoB;GAC7C;EACF;;;;;AC3GH,IAAa,gBAAb,MAA2B;CACzB,YACE,AAAQC,QACR,AAAQC,QACR;EAFQ;EACA;;;;;CAMV,MAAM,kBAAiC;AACrC,MAAI,CAAC,KAAK,OAAO,YAAa;AAE9B,WAAO,KAAK,qCAAqC;EAEjD,MAAM,sBAAsB,KAAK,wBAAwB;AAEzD,OAAK,MAAM,CAAC,gBAAgB,iBAAiB,OAAO,QAAQ,KAAK,OAAO,YAAY,EAAE;AACpF,OAAI,CAAC,aAAc;AAEnB,QAAK,MAAM,eAAe,cAAc;AACtC,QAAI,CAAC,YAAY,QAAS;AAE1B,UAAM,KAAK,UAAU,gBAAgB,aAAa,oBAAoB;;;AAI1E,WAAO,KAAK,oCAAoC;;;;;CAMlD,MAAc,UACZ,gBACA,aACA,qBACe;EACf,MAAM,YAAY,2BAA2B,gBAAgB,YAAY;EAGzE,IAAIC;AAEJ,MAAI,YAAY,WAAW,SACvB,gBAAe,yBAAyB,WAAW,aAAa,oBAAoB;MAEpF,gBAAe,gCAAgC,WAAW,aAAa,oBAAoB;AAG/F,MAAI;GAEF,MAAM,aAAa,MAAM,KAAK,OAAO,YAAY,UAAU,CAAC,UAAU;AAItE,SAAM,KAAK,uBAAuB,WAAW,YAAY,aAAa;WAE/DC,OAAgB;AAEvB,OADwB,OACJ,eAAe,KAAK;AAEtC,aAAO,KAAK,wBAAwB,YAAY;AAChD,UAAM,KAAK,OAAO,aAAa,CAAC,OAAO,aAAa;UAC/C;AACL,aAAO,MAAM,6BAA6B,aAAa,MAAe;AACtE,UAAM;;;;CAKZ,MAAc,uBACZ,WACA,eACA,cACe;AACf,MAAI,CAAC,iBAAiB,CAAC,cAAc,OAAQ;EAE7C,MAAM,gBAAgB,IAAI,IAAI,cAAc,OAAO,KAAK,MAAW,EAAE,KAAK,CAAC;EAE3E,MAAM,YAAY,aAAa,QAAQ,QAAO,MAAK,CAAC,cAAc,IAAI,EAAE,KAAK,IAAI,EAAE,SAAS,KAAK,IAAI,EAAE;AAEvG,MAAI,UAAU,SAAS,GAAG;AACxB,YAAO,KAAK,uBAAuB,UAAU,QAAQ,UAAU,OAAO,cAAc,EAClF,QAAQ,UAAU,KAAI,MAAK,EAAE,KAAK,EACnC,CAAC;AAEF,OAAI;AAEF,UAAM,KAAK,OAAO,YAAY,UAAU,CAAC,OAAO,EAC9C,QAAQ,WACT,CAAC;YACK,OAAO;AACd,aAAO,MAAM,+BAA+B,aAAa,MAAe;;;;CAK9E,AAAQ,yBAAiC;AAGvC,MAFwB,KAAK,OAAO,SAAS,WAExB,YAAY;AAEjC,SAAOC;;;;;;ACnGX,IAAa,eAAb,MAA0B;CACxB,YACE,AAAQC,QACR,AAAQC,QACR;EAFQ;EACA;;;;;CAMV,MAAM,aAA4B;EAEhC,MAAM,SAAS,KAAK,OAAO,UAAU,EAAE;AAEvC,MAAI,OAAO,WAAW,EAAG;AAEzB,WAAO,KAAK,+BAA+B,OAAO,OAAO,gBAAgB;EAGzE,MAAM,qBAAqB,IAAI,IAAI,OAAO,KAAI,MAAK,EAAE,qBAAqB,uBAAuB,CAAC;AAClG,OAAK,MAAM,kBAAkB,mBAC3B,OAAM,6BAA6B,KAAK,QAAQ,eAAe;AAIjE,OAAK,MAAM,SAAS,OAClB,OAAM,KAAK,eAAe,MAAM;AAGlC,WAAO,KAAK,mCAAmC;;CAGjD,MAAc,eAAe,OAAsC;AACjE,MAAI;GAEF,MAAM,cAAc;IAClB,IAAI,MAAM;IACV,YAAY,MAAM;IAClB,eAAe,MAAM;IACrB,SAAS,MAAM;IACf,oBAAoB,MAAM,qBAAqB;IAC/C,WAAW,MAAM,mBAAmB;IACpC,KAAK,MAAM,OAAO;IAClB,WAAW,MAAM,YAAY;IAC9B;AAGD,UAAO,MAAM,KAAK,wBAAwB,YAAY;WAE/C,OAAO;AACd,YAAO,MAAM,wBAAwB,MAAM,QAAQ,MAAe;AAClE,UAAO;;;CAIX,MAAc,wBAAwB,aAAoC;EAExE,MAAM,gBAAgB,KAAK,OAAO;AAElC,MAAI,CAAC,iBAAiB,CAAC,cAAc,SAAS,cAAc,MAAM,WAAW,GAAG;AAC9E,YAAO,MAAM,yCAAyC;AACtD,UAAO;;EAGT,MAAM,OAAO,cAAc,MAAM;EACjC,MAAM,kBAAkB,cAAc;EACtC,MAAM,UAAU,GAAG,KAAK,SAAS,KAAK,KAAK,KAAK,GAAG,KAAK;AAExD,MAAI;GAEF,MAAM,iBAAiB,MAAM,MAAM,GAAG,QAAQ,wBAAwB;IACpE,QAAQ;IACR,SAAS;KACP,gBAAgB;KAChB,uBAAuB,mBAAmB;KAC3C;IACD,MAAM,KAAK,UAAU,YAAY;IAClC,CAAC;AAEF,OAAI,eAAe,IAAI;AACrB,aAAO,KAAK,wBAAwB,YAAY,KAAK;AACrD,WAAO;;AAGT,OAAI,eAAe,WAAW,KAAK;AAEjC,aAAO,MAAM,eAAe,YAAY,GAAG,sBAAsB;IACjE,MAAM,iBAAiB,MAAM,MAAM,GAAG,QAAQ,wBAAwB,YAAY,MAAM;KACtF,QAAQ;KACR,SAAS;MACP,gBAAgB;MAChB,uBAAuB,mBAAmB;MAC3C;KACD,MAAM,KAAK,UAAU,YAAY;KAClC,CAAC;AAEF,QAAI,eAAe,IAAI;AACpB,cAAO,KAAK,wBAAwB,YAAY,KAAK;AACrD,YAAO;WACH;KACH,MAAMC,QAAM,MAAM,eAAe,MAAM;AACvC,cAAO,MAAM,0BAA0B,YAAY,GAAG,IAAIA,QAAM;AAChE,YAAO;;;GAIb,MAAM,MAAM,MAAM,eAAe,MAAM;AACvC,YAAO,MAAM,0BAA0B,YAAY,GAAG,IAAI,MAAM;AAChE,UAAO;WAEA,cAAc;AACrB,YAAO,MAAM,qCAAqC,aAAsB;AACxE,UAAO;;;;;;;;;;;;;;;;;;;AC/Db,SAAgB,yBAAyB,QAAkC;CACzE,MAAMC,WAAS,IAAI,OAAO;EAAE,SAAS;EAAM,QAAQ;EAAuB,CAAC;AAE3E,SAAQ,kBAAkC;EAExC,MAAM,kBAAkB,sBAAsB,OAAO,UAAU;AAG/D,MAAI,OAAO,QAAQ,SAAS;GAC1B,MAAM,kBAAkB,sBAAsB,iBAAiB;IAC7D,WAAW,OAAO;IAClB,UAAU;KACR,WAAW,OAAO;KAClB,QAAQ,OAAO;KAChB;IACD,aAAa,OAAO,eAAe,EAAE;IACtC,CAAC;AAEF,iBAAc,YAAY,CACxB,GAAI,cAAc,aAAa,EAAE,EACjC,GAAG,gBACJ;AAED,YAAO,MAAM,+BAA+B,EAC1C,gBAAgB,gBAAgB,QACjC,CAAC;;AAIJ,MAAI,OAAO,UAAU,OAAO,OAAO,SAAS,KAAK,OAAO,WAAW;GACjE,MAAM,eAAe,yBAAyB;IAC5C,WAAW,OAAO;IAClB,iBAAiB,OAAO;IACxB,QAAQ,OAAO;IACf,WAAW,OAAO;IAClB,QAAQ,OAAO;IACf,MAAM,OAAO;IACb,UAAU,OAAO;IAClB,CAAC;AAEF,iBAAc,YAAY,CACxB,GAAI,cAAc,aAAa,EAAE,EACjC,GAAG,aACJ;AAED,YAAO,MAAM,4BAA4B;IACvC,gBAAgB,aAAa;IAC7B,aAAa,OAAO,OAAO;IAC5B,CAAC;;EAIJ,MAAM,iBAAiB,cAAc;AACrC,gBAAc,SAAS,OAAO,YAAY;AACxC,OAAI,eACF,OAAM,eAAe,QAAQ;AAG/B,OAAI;AAEF,QAAI,OAAO,eAAe,OAAO,KAAK,OAAO,YAAY,CAAC,SAAS,GAAG;AACpE,cAAO,KAAK,0CAA0C;AAQtD,WAPsB,IAAI,cAAc,iBAAiB;MACvD,WAAW,OAAO;MAClB,UAAU,EACR,WAAW,OAAO,iBACnB;MACD,aAAa,OAAO;MACrB,CAAC,CACkB,iBAAiB;;AAIvC,QAAI,OAAO,UAAU,OAAO,OAAO,SAAS,GAAG;AAC7C,cAAO,KAAK,6BAA6B;AAIzC,WAHqB,IAAI,aAAa,iBAAiB,EACrD,QAAQ,OAAO,QAChB,CAAC,CACiB,YAAY;;YAE1B,OAAO;AAEd,aAAO,MAAM,0CAA0C,MAAe;;;AAI1E,SAAO;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACvGX,IAAa,mBAAb,MAA0G;CACxG,AAAS,OAAO;CAEhB,YAAY,AAAQC,QAAgB;EAAhB;;;;;CAKpB,MAAM,iBAAmC;AACvC,MAAI;AACF,SAAM,KAAK,OAAO,OAAO,UAAU;AACnC,UAAO;WACA,OAAO;AACd,YAAO,MAAM,oCAAoC,MAAM;AACvD,UAAO;;;;;;CAOX,MAAM,iBAAiB,QAAkD;EACvE,MAAM,kBAAkB,KAAK,yBAAyB,OAAO;AAE7D,MAAI;GAEF,MAAM,WAAW,MAAM,KAAK,OAAO,YAAY,OAAO,KAAK,CAAC,UAAU;AAGtE,SAAM,KAAK,yBAAyB,OAAO,MAAM,UAAU,gBAAgB;WACpEC,OAAgB;AAEvB,OADuB,OACH,eAAe,KAAK;AAEtC,aAAO,KAAK,wBAAwB,OAAO,OAAO;AAClD,UAAM,KAAK,OAAO,aAAa,CAAC,OAAO,gBAAgB;SAEvD,OAAM;;;;;;CAQZ,MAAM,iBAAiB,gBAA0C;AAC/D,MAAI;AACF,SAAM,KAAK,OAAO,YAAY,eAAe,CAAC,UAAU;AACxD,UAAO;WACAA,OAAgB;AAEvB,OADuB,OACH,eAAe,IACjC,QAAO;AAET,SAAM;;;;;;CAOV,MAAM,iBAAiB,gBAAuC;AAC5D,MAAI;AACF,SAAM,KAAK,OAAO,YAAY,eAAe,CAAC,QAAQ;AACtD,YAAO,KAAK,uBAAuB,iBAAiB;WAC7CA,OAAgB;AAEvB,OADuB,OACH,eAAe,IACjC,OAAM;;;;;;CAQZ,MAAM,eAAe,gBAAwB,UAAwC;AACnF,MAAI;AACF,SAAM,KAAK,OAAO,YAAY,eAAe,CAAC,WAAW,CAAC,OAAO,SAAS;WACnE,OAAO;AACd,YAAO,MAAM,6BAA6B,SAAS,GAAG,MAAM,kBAAkB,MAAM;AACpF,SAAM;;;;;;CAOV,MAAM,gBAAgB,gBAAwB,WAA2C;AACvF,MAAI,UAAU,WAAW,EAAG;AAE5B,MAAI;AACF,SAAM,KAAK,OAAO,YAAY,eAAe,CAAC,WAAW,CAAC,OAAO,WAAW,EAC1E,QAAQ,UACT,CAAC;WACK,OAAO;AACd,YAAO,MAAM,0BAA0B,UAAU,OAAO,gBAAgB,kBAAkB,MAAM;AAChG,SAAM;;;;;;CAOV,MAAM,eAAe,gBAAwB,YAAmC;AAC9E,MAAI;AACF,SAAM,KAAK,OAAO,YAAY,eAAe,CAAC,UAAU,WAAW,CAAC,QAAQ;WACrEA,OAAgB;AAGvB,OAFuB,OAEH,eAAe,KAAK;AACtC,aAAO,MAAM,6BAA6B,WAAW,QAAQ,kBAAkB,MAAM;AACrF,UAAM;;;;;;;;CASZ,MAAM,wBACJ,gBACA,QACiB;EACjB,MAAM,YAAY,KAAK,kBAAkB,OAAO;AAEhD,MAAI;AAIF,WAHe,MAAM,KAAK,OAAO,YAAY,eAAe,CAAC,WAAW,CAAC,OAAO,EAC9E,WAAW,WACZ,CAAC,EACY,eAAe;WACtB,OAAO;AACd,YAAO,MAAM,6CAA6C,kBAAkB,OAAO,EAAE,QAAQ,CAAC;AAC9F,SAAM;;;;;;;CAQV,MAAM,aACJ,gBACA,QACA,UAA+B,EAAE,EACK;EACtC,MAAM,EAAE,QAAQ,IAAI,QAAQ,eAAe,kBAAkB;AAE7D,MAAI;GACF,MAAMC,eAAwC;IAC5C,GAAG;IACH,cAAc,eAAe,OAAO,KAAK,IAAI,CAAC,OAAO,MAAM;IAC5D;AAED,OAAI,OACF,cAAa,eAAe,KAAK,kBAAkB,OAAO;AAG5D,OAAI,cACF,cAAa,oBAAoB,cAAc,KAAK,IAAI;AAG1D,OAAI,cACF,cAAa,oBAAoB,cAAc,KAAK,IAAI;AAQ1D,YALe,MAAM,KAAK,OACvB,YAAY,eAAe,CAC3B,WAAW,CACX,OAAO,aAAa,EAER,QAAQ,EAAE,EAAE,KAAI,SAAQ;IACrC,IAAI,OAAQ,IAAI,UAAsC,MAAM,GAAG;IAC/D,OAAO,IAAI,mBAAmB;IAC9B,UAAU,IAAI;IACf,EAAE;WACI,OAAO;AACd,YAAO,MAAM,2BAA2B,kBAAkB,MAAM;AAChE,SAAM;;;;;;CASV,AAAQ,yBAAyB,QAA2D;AAC1F,SAAO;GACL,MAAM,OAAO;GACb,QAAQ,OAAO,OAAO,KAAI,UAAS,KAAK,aAAa,MAAM,CAAC;GAC5D,uBAAuB,OAAO;GAC/B;;;;;CAMH,AAAQ,aAAa,OAAoD;EACvE,MAAMC,iBAAwC;GAC5C,MAAM,MAAM;GACZ,MAAM,MAAM;GACZ,OAAO,MAAM;GACb,OAAO,MAAM;GACb,UAAU,MAAM;GACjB;AAGD,MAAI,MAAM,SAAS,aAAa,MAAM,iBACpC,gBAAe,UAAU,MAAM;AAGjC,SAAO;;;;;CAMT,MAAc,yBACZ,gBACA,eACA,cACe;AACf,MAAI,CAAC,eAAe,OAAQ;EAE5B,MAAM,gBAAgB,IAAI,IAAI,cAAc,OAAO,KAAI,MAAK,EAAE,KAAK,CAAC;EACpE,MAAM,YAAY,aAAa,QAAQ,QACrC,MAAK,CAAC,cAAc,IAAI,EAAE,KAAK,IAAI,EAAE,SAAS,KAC/C,IAAI,EAAE;AAEP,MAAI,UAAU,SAAS,GAAG;AACxB,YAAO,KAAK,uBAAuB,eAAe,QAAQ,UAAU,OAAO,cAAc,EACvF,QAAQ,UAAU,KAAI,MAAK,EAAE,KAAK,EACnC,CAAC;AAEF,OAAI;AACF,UAAM,KAAK,OAAO,YAAY,eAAe,CAAC,OAAO,EACnD,QAAQ,WACT,CAAC;YACK,OAAO;AACd,aAAO,MAAM,+BAA+B,kBAAkB,MAAM;;;;;;;CAQ1E,AAAQ,kBAAkB,QAAyC;EACjE,MAAMC,QAAkB,EAAE;AAE1B,OAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,OAAO,CAC/C,KAAI,MAAM,QAAQ,MAAM,CAEtB,OAAM,KAAK,GAAG,IAAI,IAAI,MAAM,KAAI,MAAK,OAAO,EAAE,CAAC,CAAC,KAAK,IAAI,CAAC,GAAG;WACpD,OAAO,UAAU,SAC1B,OAAM,KAAK,GAAG,IAAI,IAAI,QAAQ;WACrB,OAAO,UAAU,SAC1B,OAAM,KAAK,GAAG,IAAI,GAAG,QAAQ;WACpB,OAAO,UAAU,UAC1B,OAAM,KAAK,GAAG,IAAI,GAAG,QAAQ;AAIjC,SAAO,MAAM,KAAK,OAAO;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC1R7B,SAAgB,uBAAuB,QAAqD;AAS1F,QAAO,IAAI,iBARI,IAAI,OAAO;EACxB,QAAQ,OAAO;EACf,OAAO,OAAO;EACd,0BAA0B,OAAO,4BAA4B;EAC7D,sBAAsB,OAAO;EAC7B,YAAY,OAAO;EACpB,CAAC,CAEiC;;;;;;;;;AAUrC,SAAgB,iCAAiC,QAAkC;AACjF,QAAO,IAAI,iBAAiB,OAAO;;;;;;;;;ACvCrC,MAAa,8BAA8B,OACzC,iBACA,gBACA,OACA,gBACG;AACH,KAAI;EAEF,MAAM,YAAY,2BAA2B,gBAAgB,YAAY;AAEzE,WAAO,MAAM,gDAAgD;GAC3D,YAAY;GACZ,YAAY;GACZ;GACD,CAAC;AAGF,MAAI;AACF,SAAM,gBAAgB,YAAY,UAAU,CAAC,UAAU,MAAM,CAAC,QAAQ;AACtE,YAAO,KAAK,mCAAmC;IAC7C,YAAY;IACZ;IACD,CAAC;WACKC,gBAAyB;AAIhC,OAHuB,eAGJ,eAAe,KAAK;AACrC,aAAO,MAAM,mDAAmD;KAC9D,YAAY;KACZ;KACD,CAAC;AAEF,QAAI;AACF,WAAM,gBACH,YAAY,UAAU,CACtB,WAAW,CACX,OAAO,EACN,WAAW,iBAAiB,SAC7B,CAAC;AACJ,cAAO,KAAK,mCAAmC;MAC7C,YAAY;MACZ;MACD,CAAC;aACKC,kBAA2B;AAIlC,SAHmB,iBAGJ,eAAe,IAC5B,UAAO,MAAM,wCAAwC,kBAA2B;MAC9E,YAAY;MACZ;MACD,CAAC;SAEF,UAAO,MAAM,6BAA6B,EAAE,YAAY,OAAO,CAAC;;SAIpE,OAAM;;UAGHC,OAAgB;EAEvB,MAAM,YAAY,2BAA2B,gBAAgB,YAAY;AAEzE,WAAO,MAAM,4CAA4C,OAAgB;GACvE,YAAY;GACZ,YAAY;GACZ;GACD,CAAC"}
|
|
1
|
+
{"version":3,"file":"index.mjs","names":["openaiClient: OpenAI | null","currentOpenAIApiKey: string | null","geminiClient: GoogleGenerativeAI | null","currentGeminiApiKey: string | null","embeddings: number[][]","request: TypesenseSearchRequest","params: AdvancedSearchParams","event: ConversationEvent","allSources: ChunkSource[]","source: ChunkSource","sources: ChunkSource[]","conversationId: string | null","error: unknown","error: unknown","newUserMessage: ChatMessageWithSources","newAssistantMessage: ChatMessageWithSources","searchConfig: RAGSearchConfig","spendingEntries: SpendingEntry[]","conversationIdCapture: string | null","sourcesCapture: ChunkSource[]","resolveDocumentType","sources: ChunkSource[]","conversationId: string | null","resolveDocumentType","llmSpending: SpendingEntry","conversationId: string | null","resolveDocumentType","llmSpending: SpendingEntry","session","error: unknown","endpoints: Array<{ path: string; method: 'connect' | 'delete' | 'get' | 'head' | 'options' | 'patch' | 'post' | 'put'; handler: PayloadHandler }>","collections: Array<Record<string, unknown>>","fields: { name: string; facet?: boolean; index?: boolean }[]","buildOptions: {\n page: number;\n per_page: number;\n searchFields?: string[];\n sort_by?: string;\n exclude_fields?: string;\n }","fields: { name: string; index?: boolean; type?: string }[]","schemaError: unknown","fields: { name: string; index?: boolean; type?: string }[]","results: CollectionResult[]","searchParams: Record<string, unknown>","searchFields: string[] | undefined","fields: { name: string; index?: boolean; type?: string }[]","typesenseClient: Client","pluginOptions: ModularPluginConfig","pluginOptions: ModularPluginConfig","allowedTableNames: Set<string>","allTableNames: Set<string>","targetTables: string[]","pluginOptions: ModularPluginConfig","searchConfigs: Array<[string, TableConfig]>","collectionName: string","collectionNameStr: string","collections: string[] | undefined","errors: string[]","result: {\n q: string;\n page: number;\n per_page: number;\n sort_by?: string;\n mode?: 'simple' | 'semantic';\n collections?: string[];\n exclude_fields?: string;\n query_by?: string;\n simple?: boolean;\n errors?: string[];\n }","DEFAULT_EMBEDDING_DIMENSIONS","DEFAULT_EMBEDDING_DIMENSIONS","client: Client","config: ModularPluginConfig","targetSchema: CollectionCreateSchema","error: unknown","DEFAULT_EMBEDDING_DIMENSIONS","client: Client","config: AgentManagerConfig","err","logger","client: Client","error: unknown","searchParams: Record<string, unknown>","typesenseField: CollectionFieldSchema","parts: string[]","docDeleteError: unknown","chunkDeleteError: unknown","error: unknown"],"sources":["../src/core/client/typesense-client.ts","../src/features/embedding/embeddings.ts","../src/features/rag/query-builder.ts","../src/features/rag/stream-handler.ts","../src/features/rag/setup.ts","../src/features/rag/handlers/rag-search-handler.ts","../src/features/rag/handlers/chunk-fetch-handler.ts","../src/features/rag/handlers/session-handlers.ts","../src/features/rag/utils/sse-utils.ts","../src/features/rag/chat-session-repository.ts","../src/features/rag/endpoints/chat/validators/request-validator.ts","../src/features/rag/endpoints/chat/handlers/embedding-handler.ts","../src/features/rag/endpoints/chat/handlers/session-handler.ts","../src/features/rag/endpoints/chat/handlers/token-limit-handler.ts","../src/features/rag/endpoints/chat/handlers/usage-stats-handler.ts","../src/features/rag/endpoints/chat/route.ts","../src/features/rag/stream-handlers/utils.ts","../src/features/rag/stream-handlers/streaming-handler.ts","../src/features/rag/stream-handlers/non-streaming-handler.ts","../src/features/rag/endpoints/chat/session/route.ts","../src/features/rag/endpoints/chunks/[id]/route.ts","../src/features/rag/endpoints/chat/agents/route.ts","../src/features/rag/endpoints.ts","../src/features/search/endpoints/handlers/collections-handler.ts","../src/shared/cache/cache.ts","../src/features/search/constants.ts","../src/features/search/results/process-traditional-results.ts","../src/features/search/traditional/build-params.ts","../src/features/search/traditional/search-collection.ts","../src/features/search/endpoints/handlers/executors/traditional-multi-collection-search.ts","../src/features/search/results/process-vector-results.ts","../src/features/search/vector/build-params.ts","../src/features/search/vector/build-multi-collection-params.ts","../src/features/search/vector/generate-vector.ts","../src/features/search/services/search-service.ts","../src/features/search/endpoints/handlers/utils/document-transformer.ts","../src/core/utils/naming.ts","../src/features/search/endpoints/handlers/utils/target-resolver.ts","../src/features/search/endpoints/handlers/utils/config-mapper.ts","../src/core/config/config-validation.ts","../src/features/search/utils/extract-collection-name.ts","../src/features/search/utils/extract-search-params.ts","../src/features/search/endpoints/handlers/validators/search-request-validator.ts","../src/features/search/endpoints/handlers/search-handler.ts","../src/features/search/endpoints.ts","../src/core/config/constants.ts","../src/shared/schema/collection-schemas.ts","../src/features/sync/services/schema-manager.ts","../src/features/rag/services/agent-manager.ts","../src/plugin/create-rag-plugin.ts","../src/adapter/typesense-adapter.ts","../src/adapter/create-adapter.ts","../src/features/sync/services/document-delete.ts"],"sourcesContent":["import Typesense from 'typesense'\nimport type { Client } from 'typesense'\nimport type { TypesenseConnectionConfig } from '../../shared/types/plugin-types.js'\n\nexport const createTypesenseClient = (typesenseConfig: TypesenseConnectionConfig) => {\n return new Typesense.Client({\n apiKey: typesenseConfig.apiKey,\n connectionTimeoutSeconds: typesenseConfig.connectionTimeoutSeconds || 2,\n nodes: typesenseConfig.nodes,\n })\n}\n\nexport const testTypesenseConnection = async (client: Client): Promise<boolean> => {\n try {\n await client.health.retrieve()\n return true\n } catch (_error) {\n // Handle Typesense connection error\n return false\n }\n}\n","import OpenAI from \"openai\";\nimport { GoogleGenerativeAI, TaskType } from \"@google/generative-ai\";\nimport type {\n EmbeddingProviderConfig,\n EmbeddingWithUsage,\n BatchEmbeddingWithUsage,\n} from \"../../shared/types/plugin-types.js\";\nimport {\n logger,\n DEFAULT_EMBEDDING_DIMENSIONS,\n DEFAULT_EMBEDDING_MODEL,\n DEFAULT_GEMINI_EMBEDDING_MODEL,\n MIN_EMBEDDING_TEXT_LENGTH,\n} from \"@nexo-labs/payload-indexer\";\n\nlet openaiClient: OpenAI | null = null;\nlet currentOpenAIApiKey: string | null = null;\n\nlet geminiClient: GoogleGenerativeAI | null = null;\nlet currentGeminiApiKey: string | null = null;\n\nconst getOpenAIClient = (apiKey?: string): OpenAI | null => {\n const key = apiKey || process.env.OPENAI_API_KEY;\n\n if (!key) {\n return null;\n }\n\n // Recreate client if API key changed\n if (!openaiClient || currentOpenAIApiKey !== key) {\n openaiClient = new OpenAI({\n apiKey: key,\n });\n currentOpenAIApiKey = key;\n }\n\n return openaiClient;\n};\n\nconst getGeminiClient = (apiKey?: string): GoogleGenerativeAI | null => {\n const key = apiKey || process.env.GOOGLE_API_KEY;\n\n if (!key) {\n return null;\n }\n\n // Recreate client if API key changed\n if (!geminiClient || currentGeminiApiKey !== key) {\n geminiClient = new GoogleGenerativeAI(key);\n currentGeminiApiKey = key;\n }\n\n return geminiClient;\n};\n\n/**\n * Generates an embedding for the given text using OpenAI or Gemini API\n * @param text - The text to generate an embedding for\n * @param config - Optional embedding configuration (provider, model, dimensions, apiKey)\n * @returns The embedding vector as an array of numbers, or null if generation fails\n */\nexport const generateEmbedding = async (\n text: string,\n config?: EmbeddingProviderConfig\n): Promise<number[] | null> => {\n if (!text || text.trim().length < MIN_EMBEDDING_TEXT_LENGTH) {\n logger.debug('Skipping embedding generation for empty or invalid text');\n return null;\n }\n\n const provider = config?.type || 'openai';\n\n if (provider === 'gemini') {\n return generateGeminiEmbedding(text, config);\n } else {\n return generateOpenAIEmbedding(text, config);\n }\n};\n\n/**\n * Generates an embedding using OpenAI API\n */\nconst generateOpenAIEmbedding = async (\n text: string,\n config?: EmbeddingProviderConfig\n): Promise<number[] | null> => {\n const client = getOpenAIClient(config?.apiKey);\n\n if (!client) {\n logger.debug('OpenAI API key not configured, skipping embedding generation');\n return null;\n }\n\n try {\n const model = config?.model || process.env.OPENAI_EMBEDDING_MODEL || DEFAULT_EMBEDDING_MODEL;\n const dimensions = config?.dimensions || DEFAULT_EMBEDDING_DIMENSIONS;\n\n logger.debug('Generating OpenAI embedding', { model, dimensions, textLength: text.length });\n\n const response = await client.embeddings.create({\n model,\n input: text.trim(),\n dimensions,\n });\n\n const embedding = response.data[0]?.embedding;\n\n logger.debug('OpenAI embedding generated', { embeddingLength: embedding?.length });\n\n if (\n !embedding ||\n !Array.isArray(embedding) ||\n embedding.length !== dimensions\n ) {\n logger.warn('Generated embedding has invalid dimensions', {\n expected: dimensions,\n received: embedding?.length,\n });\n return null;\n }\n\n return embedding;\n } catch (error) {\n logger.error('Failed to generate OpenAI embedding', error, {\n textLength: text.length,\n model: config?.model,\n });\n return null;\n }\n};\n\n/**\n * Generates an embedding using Google Gemini API\n */\nconst generateGeminiEmbedding = async (\n text: string,\n config?: EmbeddingProviderConfig\n): Promise<number[] | null> => {\n const client = getGeminiClient(config?.apiKey);\n\n if (!client) {\n logger.debug('Google API key not configured, skipping embedding generation');\n return null;\n }\n\n try {\n const model = config?.model || DEFAULT_GEMINI_EMBEDDING_MODEL;\n const dimensions = config?.dimensions || DEFAULT_EMBEDDING_DIMENSIONS;\n\n logger.debug('Generating Gemini embedding', { model, dimensions, textLength: text.length });\n\n const embeddingModel = client.getGenerativeModel({ model });\n const result = await embeddingModel.embedContent({\n content: { role: \"user\", parts: [{ text: text.trim() }] },\n taskType: TaskType.RETRIEVAL_DOCUMENT,\n });\n\n const embedding = result.embedding.values;\n\n logger.debug('Gemini embedding generated', { embeddingLength: embedding?.length });\n\n if (\n !embedding ||\n !Array.isArray(embedding) ||\n embedding.length !== dimensions\n ) {\n logger.warn('Generated embedding has invalid dimensions', {\n expected: dimensions,\n received: embedding?.length,\n });\n return null;\n }\n\n return embedding;\n } catch (error) {\n logger.error('Failed to generate Gemini embedding', error, {\n textLength: text.length,\n model: config?.model,\n });\n return null;\n }\n};\n\n/**\n * Generate embedding with usage tracking\n *\n * This function returns both the embedding and usage information (tokens used)\n *\n * @param text - The text to generate an embedding for\n * @param config - Optional embedding configuration\n * @returns Embedding with usage information, or null if generation fails\n */\nexport const generateEmbeddingWithUsage = async (\n text: string,\n config?: EmbeddingProviderConfig\n): Promise<EmbeddingWithUsage | null> => {\n if (!text || text.trim().length < MIN_EMBEDDING_TEXT_LENGTH) {\n logger.debug('Skipping embedding generation for empty or invalid text');\n return null;\n }\n\n const provider = config?.type || 'openai';\n\n if (provider === 'gemini') {\n return generateGeminiEmbeddingWithUsage(text, config);\n } else {\n return generateOpenAIEmbeddingWithUsage(text, config);\n }\n};\n\n/**\n * Generate OpenAI embedding with usage tracking\n */\nconst generateOpenAIEmbeddingWithUsage = async (\n text: string,\n config?: EmbeddingProviderConfig\n): Promise<EmbeddingWithUsage | null> => {\n const client = getOpenAIClient(config?.apiKey);\n\n if (!client) {\n logger.debug('OpenAI API key not configured, skipping embedding generation');\n return null;\n }\n\n try {\n const model = config?.model || process.env.OPENAI_EMBEDDING_MODEL || DEFAULT_EMBEDDING_MODEL;\n const dimensions = config?.dimensions || DEFAULT_EMBEDDING_DIMENSIONS;\n\n logger.debug('Generating OpenAI embedding with usage tracking', { model, dimensions });\n\n const response = await client.embeddings.create({\n model,\n input: text.trim(),\n dimensions,\n });\n\n const embedding = response.data[0]?.embedding;\n\n if (\n !embedding ||\n !Array.isArray(embedding) ||\n embedding.length !== dimensions\n ) {\n logger.warn('Generated embedding has invalid dimensions', {\n expected: dimensions,\n received: embedding?.length,\n });\n return null;\n }\n\n return {\n embedding,\n usage: {\n promptTokens: response.usage?.prompt_tokens || 0,\n totalTokens: response.usage?.total_tokens || 0,\n },\n };\n } catch (error) {\n logger.error('Failed to generate OpenAI embedding with usage', error, {\n textLength: text.length,\n model: config?.model,\n });\n return null;\n }\n};\n\n/**\n * Generate Gemini embedding with usage tracking\n * Note: Gemini doesn't provide token usage, so we estimate it\n */\nconst generateGeminiEmbeddingWithUsage = async (\n text: string,\n config?: EmbeddingProviderConfig\n): Promise<EmbeddingWithUsage | null> => {\n const embeddingResult = await generateGeminiEmbedding(text, config);\n\n if (!embeddingResult) {\n return null;\n }\n\n // Estimate tokens (rough approximation: 1 token ≈ 4 characters)\n const estimatedTokens = Math.ceil(text.length / 4);\n\n return {\n embedding: embeddingResult,\n usage: {\n promptTokens: estimatedTokens,\n totalTokens: estimatedTokens,\n },\n };\n};\n\n/**\n * Generate embeddings for multiple texts with usage tracking (batch)\n *\n * @param texts - Array of texts to generate embeddings for\n * @param config - Optional embedding configuration\n * @returns Embeddings with total usage information, or null if generation fails\n */\nexport const generateEmbeddingsBatchWithUsage = async (\n texts: string[],\n config?: EmbeddingProviderConfig\n): Promise<BatchEmbeddingWithUsage | null> => {\n if (!texts || texts.length === 0) {\n logger.debug('No texts provided for batch embedding generation');\n return null;\n }\n\n // Filter out empty texts\n const validTexts = texts.filter(t => t && t.trim().length >= MIN_EMBEDDING_TEXT_LENGTH);\n\n if (validTexts.length === 0) {\n logger.debug('No valid texts after filtering for batch embedding generation');\n return null;\n }\n\n const provider = config?.type || 'openai';\n\n if (provider === 'gemini') {\n return generateGeminiBatchEmbeddingsWithUsage(validTexts, config);\n } else {\n return generateOpenAIBatchEmbeddingsWithUsage(validTexts, config);\n }\n};\n\n/**\n * Generate OpenAI batch embeddings with usage tracking\n */\nconst generateOpenAIBatchEmbeddingsWithUsage = async (\n validTexts: string[],\n config?: EmbeddingProviderConfig\n): Promise<BatchEmbeddingWithUsage | null> => {\n const client = getOpenAIClient(config?.apiKey);\n\n if (!client) {\n logger.debug('OpenAI API key not configured, skipping batch embedding generation');\n return null;\n }\n\n try {\n const model = config?.model || process.env.OPENAI_EMBEDDING_MODEL || DEFAULT_EMBEDDING_MODEL;\n const dimensions = config?.dimensions || DEFAULT_EMBEDDING_DIMENSIONS;\n\n logger.debug('Generating OpenAI batch embeddings with usage tracking', {\n model,\n dimensions,\n batchSize: validTexts.length,\n });\n\n const response = await client.embeddings.create({\n model,\n input: validTexts.map(t => t.trim()),\n dimensions,\n });\n\n const embeddings = response.data.map(item => item.embedding);\n\n // Validate all embeddings\n const allValid = embeddings.every(\n emb => Array.isArray(emb) && emb.length === dimensions\n );\n\n if (!allValid) {\n logger.warn('Some generated embeddings have invalid dimensions', {\n expected: dimensions,\n batchSize: embeddings.length,\n });\n return null;\n }\n\n logger.info('OpenAI batch embeddings generated successfully', {\n count: embeddings.length,\n totalTokens: response.usage?.total_tokens || 0,\n });\n\n return {\n embeddings,\n usage: {\n promptTokens: response.usage?.prompt_tokens || 0,\n totalTokens: response.usage?.total_tokens || 0,\n },\n };\n } catch (error) {\n logger.error('Failed to generate OpenAI batch embeddings with usage', error, {\n batchSize: validTexts.length,\n model: config?.model,\n });\n return null;\n }\n};\n\n/**\n * Generate Gemini batch embeddings with usage tracking\n * Note: Gemini API handles one text at a time, so we batch them sequentially\n */\nconst generateGeminiBatchEmbeddingsWithUsage = async (\n validTexts: string[],\n config?: EmbeddingProviderConfig\n): Promise<BatchEmbeddingWithUsage | null> => {\n const client = getGeminiClient(config?.apiKey);\n\n if (!client) {\n logger.debug('Google API key not configured, skipping batch embedding generation');\n return null;\n }\n\n try {\n const model = config?.model || DEFAULT_GEMINI_EMBEDDING_MODEL;\n const dimensions = config?.dimensions || DEFAULT_EMBEDDING_DIMENSIONS;\n\n logger.debug('Generating Gemini batch embeddings with usage tracking', {\n model,\n dimensions,\n batchSize: validTexts.length,\n });\n\n const embeddingModel = client.getGenerativeModel({ model });\n const embeddings: number[][] = [];\n let totalEstimatedTokens = 0;\n\n // Process each text sequentially\n for (const text of validTexts) {\n const result = await embeddingModel.embedContent({\n content: { role: \"user\", parts: [{ text: text.trim() }] },\n taskType: TaskType.RETRIEVAL_DOCUMENT,\n });\n\n embeddings.push(result.embedding.values);\n totalEstimatedTokens += Math.ceil(text.length / 4);\n }\n\n // Validate all embeddings\n const allValid = embeddings.every(\n emb => Array.isArray(emb) && emb.length === dimensions\n );\n\n if (!allValid) {\n logger.warn('Some generated embeddings have invalid dimensions', {\n expected: dimensions,\n batchSize: embeddings.length,\n });\n return null;\n }\n\n logger.info('Gemini batch embeddings generated successfully', {\n count: embeddings.length,\n estimatedTokens: totalEstimatedTokens,\n });\n\n return {\n embeddings,\n usage: {\n promptTokens: totalEstimatedTokens,\n totalTokens: totalEstimatedTokens,\n },\n };\n } catch (error) {\n logger.error('Failed to generate Gemini batch embeddings with usage', error, {\n batchSize: validTexts.length,\n model: config?.model,\n });\n return null;\n }\n};\n","/**\n * Query builder utilities for Typesense Conversational RAG\n */\n\nimport type { TypesenseConnectionConfig } from '../../index.js'\nimport { TypesenseQueryConfig, AdvancedSearchConfig } from '../../shared/index.js'\n\n/**\n * Typesense search request object\n */\ninterface TypesenseSearchRequest {\n collection: string\n query_by: string\n vector_query: string\n exclude_fields: string\n filter_by?: string\n typo_tokens_threshold?: number\n num_typos?: number\n prefix?: boolean\n drop_tokens_threshold?: number\n enable_stemming?: boolean\n}\n\n/**\n * Advanced search parameters object\n */\ninterface AdvancedSearchParams {\n typo_tokens_threshold?: number\n num_typos?: number\n prefix?: boolean\n drop_tokens_threshold?: number\n enable_stemming?: boolean\n}\n\n/**\n * Build the Typesense conversational search URL with all necessary parameters\n *\n * @param config - Query configuration\n * @param config.userMessage - The user's message/query\n * @param config.chatId - Optional conversation ID for follow-up questions\n * @param conversationModelId - The conversation model ID in Typesense\n * @param typesenseConfig - Typesense connection config\n * @returns URL for the Typesense multi_search endpoint with conversation parameters\n */\nexport function buildConversationalUrl(\n config: { userMessage: string; chatId?: string },\n conversationModelId: string,\n typesenseConfig: TypesenseConnectionConfig\n): URL {\n const protocol = typesenseConfig.nodes[0].protocol || 'http'\n const typesenseUrl = new URL(\n `${protocol}://${typesenseConfig.nodes[0].host}:${typesenseConfig.nodes[0].port}/multi_search`\n )\n\n // Add conversation parameters to URL\n typesenseUrl.searchParams.set('q', config.userMessage)\n typesenseUrl.searchParams.set('conversation', 'true')\n typesenseUrl.searchParams.set('conversation_model_id', conversationModelId)\n\n if (config.chatId) {\n typesenseUrl.searchParams.set('conversation_id', config.chatId)\n }\n\n typesenseUrl.searchParams.set('conversation_stream', 'true')\n\n return typesenseUrl\n}\n\n/**\n * Build multi-search requests for Typesense with hybrid search configuration\n *\n * @param config - Query configuration including embedding, collections, and filters\n * @returns Array of search requests for Typesense multi_search\n */\nexport function buildMultiSearchRequests(config: TypesenseQueryConfig) {\n const {\n searchCollections,\n queryEmbedding,\n selectedDocuments,\n kResults = 10,\n advancedConfig = {}\n } = config\n\n return searchCollections.map((collection: string) => {\n const request: TypesenseSearchRequest = {\n collection,\n query_by: 'chunk_text,title,headers',\n vector_query: `embedding:([${queryEmbedding.join(',')}], k:${kResults})`,\n exclude_fields: 'embedding',\n ...buildAdvancedSearchParams(advancedConfig),\n }\n\n // Add document filter if documents are selected\n if (selectedDocuments && selectedDocuments.length > 0) {\n const documentIds = selectedDocuments.map((id: string) => `\"${id}\"`).join(',')\n request.filter_by = `parent_doc_id:[${documentIds}]`\n }\n\n return request\n })\n}\n\n/**\n * Build advanced search parameters from config\n *\n * @param config - Advanced search configuration\n * @returns Object with advanced search parameters\n */\nfunction buildAdvancedSearchParams(config: AdvancedSearchConfig): AdvancedSearchParams {\n const params: AdvancedSearchParams = {}\n\n if (config.typoTokensThreshold !== undefined) {\n params.typo_tokens_threshold = config.typoTokensThreshold\n }\n\n if (config.numTypos !== undefined) {\n params.num_typos = config.numTypos\n }\n\n if (config.prefix !== undefined) {\n params.prefix = config.prefix\n }\n\n if (config.dropTokensThreshold !== undefined) {\n params.drop_tokens_threshold = config.dropTokensThreshold\n }\n\n if (config.enableStemming !== undefined) {\n params.enable_stemming = config.enableStemming\n }\n\n return params\n}\n\n/**\n * Build the complete Typesense request body for multi-search\n *\n * @param config - Query configuration\n * @returns Request body for Typesense multi_search endpoint\n */\nexport function buildMultiSearchRequestBody(config: TypesenseQueryConfig) {\n return {\n searches: buildMultiSearchRequests(config),\n }\n}\n\n/**\n * Build hybrid search parameters for combining semantic and keyword search\n *\n * @param alpha - Weight between semantic (1.0) and keyword (0.0) search\n * @param rerankMatches - Whether to rerank hybrid search results\n * @param queryFields - Fields to use for keyword search\n * @returns Object with hybrid search parameters\n */\nexport function buildHybridSearchParams(\n alpha = 0.9,\n rerankMatches = true,\n queryFields = 'chunk_text,title'\n) {\n return {\n alpha,\n rerank_hybrid_matches: rerankMatches,\n query_fields: queryFields,\n }\n}\n","/**\n * Stream handler utilities for Typesense Conversational RAG SSE events\n */\n\nimport { ChunkSource, TypesenseRAGChunkDocument, TypesenseRAGSearchResult } from '../../shared/index.js'\nimport { logger } from '../../core/logging/logger.js'\n\n/**\n * Parsed conversation event from Typesense SSE stream\n */\nexport interface ConversationEvent {\n /** Conversation ID */\n conversationId?: string\n /** Message token/chunk */\n message?: string\n /** Search results (only in first event) */\n results?: TypesenseRAGSearchResult[]\n /** Raw parsed data */\n raw?: unknown\n}\n\n/**\n * Stream processing result\n */\nexport interface StreamProcessingResult {\n /** Full assistant message */\n fullMessage: string\n /** Conversation ID */\n conversationId: string | null\n /** Extracted sources */\n sources: ChunkSource[]\n /** Context text (for token estimation) */\n contextText: string\n}\n\n/**\n * Parse a single SSE event from Typesense conversation stream\n *\n * @param line - Raw SSE event line\n * @returns Parsed conversation event or null if not parseable\n */\nexport function parseConversationEvent(line: string): ConversationEvent | null {\n if (!line.startsWith('data: ')) {\n return null\n }\n\n const data = line.slice(6)\n\n if (data === '[DONE]') {\n return { raw: '[DONE]' }\n }\n\n try {\n const parsed = JSON.parse(data)\n const event: ConversationEvent = { raw: parsed }\n\n // Extract conversation ID\n if (parsed.conversation_id) {\n event.conversationId = parsed.conversation_id\n } else if (parsed.conversation?.conversation_id) {\n event.conversationId = parsed.conversation.conversation_id\n }\n\n // Extract message/token\n if (parsed.message !== undefined) {\n event.message = parsed.message\n } else if (parsed.conversation?.answer) {\n event.message = parsed.conversation.answer\n }\n\n // Extract results (usually in first event)\n if (parsed.results) {\n event.results = parsed.results\n }\n\n return event\n } catch (e) {\n logger.error('Error parsing SSE data from conversation stream', e as Error)\n return null\n }\n}\n\n/**\n * Extract sources from Typesense search results\n *\n * @param results - Typesense multi-search results array\n * @param documentTypeResolver - Optional function to resolve document type from collection name\n * @returns Array of chunk sources with metadata\n */\nexport function extractSourcesFromResults(\n results: TypesenseRAGSearchResult[],\n documentTypeResolver?: (collectionName: string) => string\n): ChunkSource[] {\n const allSources: ChunkSource[] = []\n\n for (const result of results) {\n if (result.hits) {\n for (const hit of result.hits) {\n const doc = hit.document as TypesenseRAGChunkDocument\n const score = hit.vector_distance || hit.text_match || 0\n const collectionName = result.request_params?.collection_name || ''\n\n const type = documentTypeResolver\n ? documentTypeResolver(collectionName)\n : getDefaultDocumentType(collectionName)\n\n const fullContent = doc.chunk_text || ''\n\n const source: ChunkSource = {\n id: doc.id || '',\n title: doc.title || 'Sin título',\n slug: doc.slug || '',\n type,\n chunkIndex: doc.chunk_index ?? 0,\n relevanceScore: score,\n content: '', // Empty by default - can be loaded separately\n excerpt: fullContent.substring(0, 200) + (fullContent.length > 200 ? '...' : ''),\n }\n\n allSources.push(source)\n }\n }\n }\n\n return allSources\n}\n\n/**\n * Build context text from results (useful for token estimation)\n *\n * @param results - Typesense multi-search results array\n * @returns Combined context text from all chunks\n */\nexport function buildContextText(results: TypesenseRAGSearchResult[]): string {\n let contextText = ''\n\n for (const result of results) {\n if (result.hits) {\n for (const hit of result.hits) {\n const doc = hit.document as TypesenseRAGChunkDocument\n contextText += (doc.chunk_text || '') + '\\n'\n }\n }\n }\n\n return contextText\n}\n\n/**\n * Process a Typesense conversation stream\n *\n * @param response - Fetch Response with SSE stream\n * @param onEvent - Callback for each parsed event\n * @param documentTypeResolver - Optional function to resolve document type\n * @returns Processing result with full message, ID, and sources\n */\nexport async function processConversationStream(\n response: Response,\n onEvent?: (event: ConversationEvent) => void,\n documentTypeResolver?: (collectionName: string) => string\n): Promise<StreamProcessingResult> {\n const reader = response.body!.getReader()\n const decoder = new TextDecoder()\n\n let buffer = ''\n let sources: ChunkSource[] = []\n let hasCollectedSources = false\n let conversationId: string | null = null\n let contextText = ''\n let fullMessage = ''\n\n while (true) {\n const { done, value } = await reader.read()\n if (done) break\n\n buffer += decoder.decode(value, { stream: true })\n const lines = buffer.split('\\n')\n buffer = lines.pop() || ''\n\n for (const line of lines) {\n const event = parseConversationEvent(line)\n if (!event) continue\n\n // Notify callback\n if (onEvent) {\n onEvent(event)\n }\n\n // Capture conversation ID\n if (!conversationId && event.conversationId) {\n conversationId = event.conversationId\n }\n\n // Extract sources from first results\n if (!hasCollectedSources && event.results) {\n sources = extractSourcesFromResults(event.results, documentTypeResolver)\n contextText = buildContextText(event.results)\n hasCollectedSources = true\n }\n\n // Accumulate message\n if (event.message) {\n fullMessage += event.message\n }\n }\n }\n\n return {\n fullMessage,\n conversationId,\n sources,\n contextText,\n }\n}\n\n/**\n * Create a ReadableStream that forwards SSE events\n *\n * @param response - Fetch Response with SSE stream\n * @param onData - Callback for processing each event before forwarding\n * @returns ReadableStream for SSE events\n */\nexport function createSSEForwardStream(\n response: Response,\n onData?: (event: ConversationEvent) => void\n): ReadableStream<Uint8Array> {\n const reader = response.body!.getReader()\n const decoder = new TextDecoder()\n const encoder = new TextEncoder()\n\n let buffer = ''\n\n return new ReadableStream({\n async start(controller) {\n while (true) {\n const { done, value } = await reader.read()\n if (done) {\n controller.close()\n break\n }\n\n buffer += decoder.decode(value, { stream: true })\n const lines = buffer.split('\\n')\n buffer = lines.pop() || ''\n\n for (const line of lines) {\n const event = parseConversationEvent(line)\n\n if (event && onData) {\n onData(event)\n }\n\n // Forward original line\n if (line) {\n controller.enqueue(encoder.encode(line + '\\n'))\n }\n }\n }\n },\n cancel() {\n reader.cancel()\n }\n })\n}\n\n/**\n * Default document type resolver based on collection name\n *\n * @param collectionName - Name of the Typesense collection\n * @returns Document type string\n */\nfunction getDefaultDocumentType(collectionName: string): string {\n if (collectionName.includes('article')) {\n return 'article'\n }\n if (collectionName.includes('book')) {\n return 'book'\n }\n if (collectionName.includes('post')) {\n return 'post'\n }\n if (collectionName.includes('page')) {\n return 'page'\n }\n return 'document'\n}\n","/**\n * Setup utilities for Typesense Conversational RAG\n */\n\nimport type { Client } from 'typesense'\nimport { logger } from '../../core/logging/logger.js'\nimport { RAGConfig } from '../../shared/index.js'\n/**\n * Ensure conversation history collection exists\n *\n * @param client - Typesense client\n * @param collectionName - Name of the conversation history collection\n * @returns true if collection exists or was created successfully\n */\nexport async function ensureConversationCollection(\n client: Client,\n collectionName: string = 'conversation_history'\n): Promise<boolean> {\n try {\n // Check if collection exists\n await client.collections(collectionName).retrieve()\n logger.info('Conversation collection already exists', { collection: collectionName })\n return true\n } catch (error: unknown) {\n const typesenseError = error as { httpStatus?: number }\n if (typesenseError?.httpStatus === 404) {\n logger.info('Creating conversation collection', { collection: collectionName })\n\n try {\n // Create conversation collection\n // Note: Typesense manages conversation schema automatically\n // We just need to ensure the collection can be created\n await client.collections().create({\n name: collectionName,\n fields: [\n { name: 'conversation_id', type: 'string' },\n { name: 'model_id', type: 'string' },\n { name: 'timestamp', type: 'int32' },\n { name: 'role', type: 'string' },\n { name: 'message', type: 'string' }\n ],\n })\n\n logger.info('Conversation collection created successfully', { collection: collectionName })\n return true\n } catch (createError) {\n logger.error('Failed to create conversation collection', createError as Error, {\n collection: collectionName,\n })\n return false\n }\n }\n\n logger.error('Error checking conversation collection', error as Error, {\n collection: collectionName,\n })\n return false\n }\n}\n\n/**\n * Get default RAG configuration values\n *\n * @returns Default RAG configuration\n */\nexport function getDefaultRAGConfig(): Required<Omit<RAGConfig, 'agents'>> {\n return {\n hybrid: {\n alpha: 0.9,\n rerankMatches: true,\n queryFields: 'chunk_text,title',\n },\n hnsw: {\n efConstruction: 200,\n M: 16,\n ef: 100,\n maxConnections: 64,\n distanceMetric: 'cosine',\n },\n advanced: {\n typoTokensThreshold: 1,\n numTypos: 2,\n prefix: true,\n dropTokensThreshold: 1,\n enableStemming: true,\n },\n }\n}\n\n/**\n * Merge user RAG config with defaults\n *\n * @param userConfig - User-provided RAG configuration\n * @returns Merged configuration with defaults\n */\nexport function mergeRAGConfigWithDefaults(userConfig?: RAGConfig): RAGConfig {\n const defaults = getDefaultRAGConfig()\n\n if (!userConfig) {\n return defaults\n }\n\n return {\n hybrid: { ...defaults.hybrid, ...userConfig.hybrid },\n hnsw: { ...defaults.hnsw, ...userConfig.hnsw },\n advanced: { ...defaults.advanced, ...userConfig.advanced },\n }\n}\n","/**\n * RAG search handler\n *\n * Handles the execution of RAG conversational search against Typesense\n */\n\nimport type { TypesenseConnectionConfig } from '../../../index.js'\nimport { ChunkSource } from '../../../shared/index.js'\nimport {\n buildConversationalUrl,\n buildMultiSearchRequestBody,\n} from '../query-builder.js'\n\n/**\n * Configuration for RAG search\n */\nexport type RAGSearchConfig = {\n /** Collections to search in */\n searchCollections: string[]\n /** Conversation model ID */\n modelId: string\n /** Number of results to retrieve */\n kResults?: number\n /** Advanced search configuration */\n advancedConfig?: {\n typoTokensThreshold?: number\n numTypos?: number\n prefix?: boolean\n dropTokensThreshold?: number\n }\n}\n\n/**\n * Request parameters for RAG chat\n */\nexport type RAGChatRequest = {\n /** User's message */\n userMessage: string\n /** Query embedding vector */\n queryEmbedding: number[]\n /** Optional chat/conversation ID for follow-up messages */\n chatId?: string\n /** Optional selected document IDs to filter search */\n selectedDocuments?: string[]\n}\n\n/**\n * Result of a RAG search operation\n */\nexport type RAGSearchResult = {\n /** Full assistant message (for non-streaming responses) */\n fullAssistantMessage?: string\n /** Conversation ID from Typesense */\n conversationId?: string\n /** Sources/chunks used in the response */\n sources: ChunkSource[]\n /** Raw response from Typesense */\n response: Response\n /** Whether the response is streaming */\n isStreaming: boolean\n}\n\n/**\n * Execute a RAG conversational search\n *\n * This function handles the complete flow of executing a RAG search against Typesense:\n * 1. Builds the conversational URL\n * 2. Builds the multi-search request body\n * 3. Executes the request\n * 4. Returns the response with metadata\n *\n * @param typesenseConfig - Typesense connection configuration\n * @param searchConfig - RAG search configuration\n * @param request - Chat request parameters\n * @returns Promise with search results\n */\nexport async function executeRAGSearch(\n typesenseConfig: TypesenseConnectionConfig,\n searchConfig: RAGSearchConfig,\n request: RAGChatRequest,\n): Promise<RAGSearchResult> {\n // Build the Typesense conversational search URL\n const typesenseUrl = buildConversationalUrl(\n request,\n searchConfig.modelId,\n typesenseConfig\n )\n\n // Build the multi-search request body\n const requestBody = buildMultiSearchRequestBody({\n userMessage: request.userMessage,\n queryEmbedding: request.queryEmbedding,\n selectedDocuments: request.selectedDocuments,\n chatId: request.chatId,\n searchCollections: searchConfig.searchCollections,\n kResults: searchConfig.kResults || 10,\n advancedConfig: searchConfig.advancedConfig,\n })\n\n // Execute the search\n const response = await fetch(typesenseUrl.toString(), {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'X-TYPESENSE-API-KEY': typesenseConfig.apiKey,\n },\n body: JSON.stringify(requestBody),\n })\n\n if (!response.ok) {\n const errorText = await response.text()\n throw new Error(`Typesense search failed: ${errorText}`)\n }\n\n // Check if response is streaming\n const contentType = response.headers.get('content-type')\n const isStreaming = contentType?.includes('text/event-stream') || false\n\n return {\n response,\n isStreaming,\n sources: [], // Will be populated by stream/response handlers\n }\n}\n","/**\n * Chunk fetch handler\n *\n * Handles fetching individual chunk documents by ID from Typesense\n */\n\nimport type { Client } from 'typesense'\nimport { TypesenseRAGChunkDocument } from '../../../shared/index.js'\n\n/**\n * Configuration for fetching a chunk by ID\n */\nexport type ChunkFetchConfig = {\n /** Chunk document ID */\n chunkId: string\n /** Collection name */\n collectionName: string\n /** Valid collection names for validation */\n validCollections?: string[]\n}\n\n/**\n * Result of fetching a chunk\n */\nexport type ChunkFetchResult = {\n id: string\n chunk_text: string\n title?: string\n slug?: string\n chunk_index?: number\n collection: string\n}\n\n/**\n * Fetch a chunk document by ID from Typesense\n *\n * @param client - Typesense client instance\n * @param config - Chunk fetch configuration\n * @returns Promise with chunk data\n * @throws Error if chunk not found or collection is invalid\n */\nexport async function fetchChunkById(\n client: Client,\n config: ChunkFetchConfig,\n): Promise<ChunkFetchResult> {\n const { chunkId, collectionName, validCollections } = config\n\n // Validate collection if validCollections is provided\n if (validCollections && !validCollections.includes(collectionName)) {\n throw new Error(\n `Invalid collection: ${collectionName}. Must be one of: ${validCollections.join(', ')}`,\n )\n }\n\n try {\n // Retrieve the document from Typesense\n const document = (await client\n .collections(collectionName)\n .documents(chunkId)\n .retrieve()) as TypesenseRAGChunkDocument\n\n // Extract chunk data\n const chunkText = document.chunk_text || ''\n\n if (!chunkText) {\n throw new Error('Chunk contains no text')\n }\n\n return {\n id: document.id,\n chunk_text: chunkText,\n title: document.title,\n slug: document.slug,\n chunk_index: document.chunk_index,\n collection: collectionName,\n }\n } catch (error: unknown) {\n // Handle Typesense 404 errors\n if (error && typeof error === 'object' && 'httpStatus' in error && error.httpStatus === 404) {\n throw new Error(`Chunk not found: ${chunkId}`)\n }\n throw error\n }\n}\n","/**\n * Session management handlers\n *\n * Handles all chat session operations including getting, saving, and closing sessions\n */\n\nimport type { CollectionSlug, Payload } from 'payload'\n\n/**\n * Session data structure\n */\nexport type ChatSessionData = {\n conversation_id: string\n messages: Array<Record<string, unknown>>\n status: string\n total_tokens?: number\n total_cost?: number\n last_activity?: string\n}\n\n/**\n * Configuration for session operations\n */\nexport type SessionConfig = {\n /** Collection name for sessions */\n collectionName?: CollectionSlug\n /** Time window for active sessions in milliseconds */\n activeSessionWindow?: number\n}\n\n/**\n * Get active chat session for a user\n *\n * @param payload - Payload CMS instance\n * @param userId - User ID\n * @param config - Session configuration\n * @returns Promise with session data or null\n */\nexport async function getActiveSession(\n payload: Payload,\n userId: string | number,\n config: SessionConfig = {},\n): Promise<ChatSessionData | null> {\n const collectionName = config.collectionName || 'chat-sessions'\n const windowMs = config.activeSessionWindow || 24 * 60 * 60 * 1000 // 24 hours default\n\n const cutoffTime = new Date(Date.now() - windowMs)\n\n const chatSessions = await payload.find({\n collection: collectionName,\n where: {\n and: [\n {\n user: {\n equals: userId,\n },\n },\n {\n status: {\n equals: 'active',\n },\n },\n {\n last_activity: {\n greater_than: cutoffTime.toISOString(),\n },\n },\n ],\n },\n sort: '-last_activity',\n limit: 1,\n })\n\n if (!chatSessions.docs.length) {\n return null\n }\n\n return chatSessions.docs[0] as unknown as ChatSessionData\n}\n\n/**\n * Get session by conversation ID\n *\n * @param payload - Payload CMS instance\n * @param userId - User ID\n * @param conversationId - Conversation ID\n * @param config - Session configuration\n * @returns Promise with session data or null\n */\nexport async function getSessionByConversationId(\n payload: Payload,\n userId: string | number,\n conversationId: string,\n config: SessionConfig = {},\n): Promise<ChatSessionData | null> {\n const collectionName = config.collectionName || 'chat-sessions'\n\n const chatSessions = await payload.find({\n collection: collectionName,\n where: {\n and: [\n {\n conversation_id: {\n equals: conversationId,\n },\n },\n {\n user: {\n equals: userId,\n },\n },\n ],\n },\n limit: 1,\n })\n\n if (!chatSessions.docs.length) {\n return null\n }\n\n return chatSessions.docs[0] as unknown as ChatSessionData\n}\n\n/**\n * Close a chat session\n *\n * @param payload - Payload CMS instance\n * @param userId - User ID\n * @param conversationId - Conversation ID\n * @param config - Session configuration\n * @returns Promise with updated session data or null if not found\n */\nexport async function closeSession(\n payload: Payload,\n userId: string | number,\n conversationId: string,\n config: SessionConfig = {},\n): Promise<ChatSessionData | null> {\n const collectionName = config.collectionName || 'chat-sessions'\n\n const chatSessions = await payload.find({\n collection: collectionName,\n where: {\n and: [\n {\n conversation_id: {\n equals: conversationId,\n },\n },\n {\n user: {\n equals: userId,\n },\n },\n ],\n },\n limit: 1,\n })\n\n if (!chatSessions.docs.length) {\n return null\n }\n\n const session = chatSessions.docs[0] as unknown as ChatSessionData\n if (!session) {\n return null\n }\n await payload.update({\n collection: collectionName,\n where: {\n conversation_id: {\n equals: conversationId,\n },\n },\n data: {\n status: 'closed',\n closed_at: new Date().toISOString(),\n },\n })\n\n return {\n conversation_id: session.conversation_id,\n messages: session.messages || [],\n status: 'closed',\n total_tokens: session.total_tokens,\n total_cost: session.total_cost,\n last_activity: session.last_activity,\n }\n}\n","/**\n * Server-Sent Events (SSE) utilities\n *\n * Provides utilities for formatting and sending SSE events\n */\n\nimport { SSEEvent } from \"../../../shared/index.js\"\n\n\n/**\n * Helper to create an SSE event string\n *\n * @param event - SSE event object\n * @returns Formatted SSE event string\n */\nexport function formatSSEEvent(event: SSEEvent): string {\n return `data: ${JSON.stringify(event)}\\n\\n`\n}\n\n/**\n * Helper to send an SSE event through a controller\n *\n * @param controller - ReadableStreamDefaultController\n * @param encoder - TextEncoder instance\n * @param event - SSE event to send\n */\nexport function sendSSEEvent(\n controller: ReadableStreamDefaultController<Uint8Array>,\n encoder: TextEncoder,\n event: SSEEvent,\n): void {\n const data = formatSSEEvent(event)\n controller.enqueue(encoder.encode(data))\n}\n","/**\n * Chat Session Repository\n * Functions for managing chat sessions in PayloadCMS\n */\n\nimport type { CollectionSlug, Payload } from 'payload'\nimport { logger } from '../../core/logging/logger.js'\nimport { ChunkSource, SpendingEntry } from '../../shared/index.js'\n/**\n * Chat message format with optional sources\n */\nexport interface ChatMessageWithSources {\n role: 'user' | 'assistant'\n content: string\n timestamp: string\n sources?: Array<{\n id: string\n title: string\n type: string\n chunk_index: number\n slug?: string\n }>\n}\n\n/**\n * Chat session document structure\n */\ninterface ChatSessionDocument {\n id: string | number\n messages?: unknown\n spending?: unknown\n total_tokens?: number\n total_cost?: number\n conversation_id?: string\n status?: string\n last_activity?: Date | string\n}\n\n/**\n * Save or update chat session in PayloadCMS\n *\n * @param payload - Payload CMS instance\n * @param userId - User ID\n * @param conversationId - Conversation ID from Typesense\n * @param userMessage - User's message\n * @param assistantMessage - Assistant's response\n * @param sources - Source chunks used for the response\n * @param spending - Token spending entries\n * @param collectionName - Collection name for sessions (default: 'chat-sessions')\n */\nexport async function saveChatSession(\n payload: Payload,\n userId: string | number,\n conversationId: string,\n userMessage: string,\n assistantMessage: string,\n sources: ChunkSource[],\n spending: SpendingEntry[],\n collectionName: CollectionSlug = 'chat-sessions'\n): Promise<void> {\n try {\n // Check if session already exists\n const existing = await payload.find({\n collection: collectionName,\n where: {\n conversation_id: {\n equals: conversationId,\n },\n },\n limit: 1,\n })\n\n const newUserMessage: ChatMessageWithSources = {\n role: 'user',\n content: userMessage,\n timestamp: new Date().toISOString(),\n }\n\n const newAssistantMessage: ChatMessageWithSources = {\n role: 'assistant',\n content: assistantMessage,\n timestamp: new Date().toISOString(),\n sources: sources.map((s) => ({\n id: s.id,\n title: s.title,\n type: s.type,\n chunk_index: s.chunkIndex,\n slug: s.slug,\n })),\n }\n\n if (existing.docs.length > 0 && existing.docs[0]) {\n // Update existing session\n await updateExistingSession(\n payload,\n existing.docs[0] as ChatSessionDocument,\n newUserMessage,\n newAssistantMessage,\n spending,\n collectionName,\n )\n } else {\n // Create new session\n await createNewSession(\n payload,\n userId,\n conversationId,\n newUserMessage,\n newAssistantMessage,\n spending,\n collectionName,\n )\n }\n } catch (error) {\n logger.error('Error saving chat session', error as Error, {\n conversationId,\n userId,\n })\n // Don't fail the request if saving fails\n }\n}\n\n/**\n * Update an existing chat session\n */\nasync function updateExistingSession(\n payload: Payload,\n session: ChatSessionDocument,\n newUserMessage: ChatMessageWithSources,\n newAssistantMessage: ChatMessageWithSources,\n spending: SpendingEntry[],\n collectionName: CollectionSlug,\n): Promise<void> {\n const existingMessages = (session.messages as ChatMessageWithSources[]) || []\n const existingSpending = (session.spending as SpendingEntry[]) || []\n\n const messages = [...existingMessages, newUserMessage, newAssistantMessage]\n const allSpending = [...existingSpending, ...spending]\n const totalTokens =\n (session.total_tokens || 0) + spending.reduce((sum, e) => sum + e.tokens.total, 0)\n const totalCost =\n (session.total_cost || 0) + spending.reduce((sum, e) => sum + (e.cost_usd || 0), 0)\n\n await payload.update({\n collection: collectionName,\n id: session.id,\n data: {\n messages,\n spending: allSpending,\n total_tokens: totalTokens,\n total_cost: totalCost,\n last_activity: new Date().toISOString(),\n status: 'active',\n },\n })\n\n logger.info('Chat session updated successfully', {\n sessionId: session.id,\n conversationId: session.conversation_id,\n totalTokens,\n totalCost,\n })\n}\n\n/**\n * Create a new chat session\n */\nasync function createNewSession(\n payload: Payload,\n userId: string | number,\n conversationId: string,\n newUserMessage: ChatMessageWithSources,\n newAssistantMessage: ChatMessageWithSources,\n spending: SpendingEntry[],\n collectionName: CollectionSlug,\n): Promise<void> {\n const totalTokens = spending.reduce((sum, e) => sum + e.tokens.total, 0)\n const totalCost = spending.reduce((sum, e) => sum + (e.cost_usd || 0), 0)\n\n await payload.create({\n collection: collectionName,\n data: {\n user: userId as string,\n conversation_id: conversationId,\n status: 'active',\n messages: [newUserMessage, newAssistantMessage],\n spending,\n total_tokens: totalTokens,\n total_cost: totalCost,\n last_activity: new Date().toISOString(),\n },\n })\n\n logger.info('New chat session created successfully', {\n conversationId,\n userId,\n totalTokens,\n totalCost,\n })\n}\n","import type { Payload, PayloadRequest } from 'payload';\nimport type { ChatEndpointConfig } from '../route.js';\n\n/**\n * JSON Response helper\n */\nexport const jsonResponse = (data: any, options?: ResponseInit) => {\n return new Response(JSON.stringify(data), {\n headers: { 'Content-Type': 'application/json' },\n ...options,\n });\n };\n\n/**\n * Validates chat request and extracts required data\n */\nexport async function validateChatRequest(\n request: PayloadRequest,\n config: ChatEndpointConfig\n): Promise<\n | { success: false; error: Response }\n | {\n success: true;\n userId: string | number;\n userEmail: string;\n payload: Payload;\n userMessage: string;\n body: { message: string; chatId?: string; selectedDocuments?: string[]; agentSlug?: string };\n }\n> {\n // Check permissions\n if (!await config.checkPermissions(request)) {\n return {\n success: false,\n error: jsonResponse({ error: 'No tienes permisos para acceder a esta sesión.' }, { status: 403 }),\n };\n }\n\n // Validate request structure\n if (!request.url || !request.user) {\n return {\n success: false,\n error: jsonResponse({ error: 'URL not found' }, { status: 400 }),\n };\n }\n\n const { id: userId, email } = request.user;\n const userEmail = email || '';\n const payload = await config.getPayload();\n const body = await request.json?.();\n\n // Validate body exists\n if (!body) {\n return {\n success: false,\n error: jsonResponse({ error: 'Body not found' }, { status: 400 }),\n };\n }\n\n // Validate message\n if (!body.message || typeof body.message !== 'string' || body.message.trim() === '') {\n return {\n success: false,\n error: jsonResponse({ error: 'Se requiere un mensaje.' }, { status: 400 }),\n };\n }\n\n const userMessage = body.message.trim();\n\n return {\n success: true,\n userId,\n userEmail,\n payload,\n userMessage,\n body,\n };\n}\n","import type { ChatEndpointConfig } from '../route.js';\nimport {\n Logger,\n logger,\n DEFAULT_EMBEDDING_MODEL,\n EmbeddingServiceImpl,\n OpenAIEmbeddingProvider,\n GeminiEmbeddingProvider,\n type OpenAIProviderConfig,\n type GeminiProviderConfig,\n} from '@nexo-labs/payload-indexer';\nimport { SpendingEntry } from '../../../../../shared/index.js';\n\n/**\n * Generates embedding and tracks usage\n */\nexport async function generateEmbeddingWithTracking(\n userMessage: string,\n config: ChatEndpointConfig,\n spendingEntries: SpendingEntry[]\n): Promise<number[]> {\n logger.debug('Generating embeddings for semantic search');\n\n const embeddingConfig = config.embeddingConfig;\n \n if (!embeddingConfig) {\n throw new Error('Embedding configuration missing');\n }\n\n let provider;\n \n // Use the strongly typed nested provider configuration\n const providerType = embeddingConfig.type;\n const apiKey = embeddingConfig.apiKey;\n const model = embeddingConfig.model;\n const dimensions = embeddingConfig.dimensions;\n\n const serviceLogger = new Logger({ enabled: true, prefix: '[rag-embedding]' });\n\n if (providerType === 'gemini') {\n provider = new GeminiEmbeddingProvider({\n type: 'gemini',\n apiKey: apiKey,\n model: model,\n dimensions: dimensions\n } as GeminiProviderConfig, serviceLogger);\n } else {\n provider = new OpenAIEmbeddingProvider({\n type: 'openai',\n apiKey: apiKey,\n model: model,\n dimensions: dimensions\n } as OpenAIProviderConfig, serviceLogger);\n }\n\n const service = new EmbeddingServiceImpl(provider, serviceLogger, embeddingConfig);\n \n // We need usage info. The new service interface returns only embedding or array of embeddings.\n // We need to extend service or provider to return usage or get it from provider directly.\n // Let's use provider directly for now to get usage which we know returns EmbeddingResult\n \n const resultWithUsage = await provider.generateEmbedding(userMessage);\n\n if (!resultWithUsage) {\n throw new Error('Failed to generate embedding');\n }\n\n // Track embedding spending if function provided\n // We use model from config or default\n const modelUsed = model || DEFAULT_EMBEDDING_MODEL;\n\n if (config.createEmbeddingSpending) {\n const embeddingSpending = config.createEmbeddingSpending(\n modelUsed,\n resultWithUsage.usage.totalTokens\n );\n spendingEntries.push(embeddingSpending);\n\n logger.info('Embedding generated successfully', {\n model: modelUsed,\n totalTokens: resultWithUsage.usage.totalTokens,\n costUsd: embeddingSpending.cost_usd,\n });\n }\n\n return resultWithUsage.embedding;\n}\n","import type { Payload } from 'payload';\nimport type { ChatEndpointConfig } from '../route.js';\nimport { logger } from '../../../../../core/logging/logger.js';\nimport { ChunkSource, SpendingEntry } from '../../../../../shared/index.js';\n\n/**\n * Saves chat session if function is provided\n */\nexport async function saveChatSessionIfNeeded(\n config: ChatEndpointConfig,\n payload: Payload,\n userId: string | number,\n conversationId: string | null,\n userMessage: string,\n assistantMessage: string,\n sources: ChunkSource[],\n spendingEntries: SpendingEntry[]\n): Promise<void> {\n if (!conversationId || !config.saveChatSession) {\n return;\n }\n\n await config.saveChatSession(\n payload,\n userId,\n conversationId,\n userMessage,\n assistantMessage,\n sources,\n spendingEntries,\n config.collectionName\n );\n\n logger.info('Chat session saved to PayloadCMS', {\n conversationId,\n });\n}\n","import type { Payload } from 'payload';\nimport type { ChatEndpointConfig } from '../route.js';\nimport { jsonResponse } from '../validators/index.js';\nimport { logger } from '../../../../../core/logging/logger.js';\n\n/**\n * Checks token limits before processing request\n */\nexport async function checkTokenLimitsIfNeeded(\n config: ChatEndpointConfig,\n payload: Payload,\n userId: string | number,\n userEmail: string,\n userMessage: string\n): Promise<Response | null> {\n if (!config.estimateTokensFromText || !config.checkTokenLimit) {\n return null; // No token limit check needed\n }\n\n const estimatedEmbeddingTokens = config.estimateTokensFromText(userMessage);\n const estimatedLLMTokens = config.estimateTokensFromText(userMessage) * 10;\n const estimatedTotalTokens = estimatedEmbeddingTokens + estimatedLLMTokens;\n\n const limitCheck = await config.checkTokenLimit(payload, userId, estimatedTotalTokens);\n\n if (!limitCheck.allowed) {\n logger.warn('Token limit exceeded for user', {\n userId,\n limit: limitCheck.limit,\n used: limitCheck.used,\n remaining: limitCheck.remaining,\n });\n return jsonResponse(\n {\n error: 'Has alcanzado tu límite diario de tokens.',\n limit_info: {\n limit: limitCheck.limit,\n used: limitCheck.used,\n remaining: limitCheck.remaining,\n reset_at: limitCheck.reset_at,\n },\n },\n { status: 429 }\n );\n }\n\n logger.info('Chat request started with token limit check passed', {\n userId,\n userEmail,\n limit: limitCheck.limit,\n used: limitCheck.used,\n remaining: limitCheck.remaining,\n });\n\n return null; // Token limit passed\n}\n","import type { Payload } from 'payload';\nimport type { ChatEndpointConfig } from '../route.js';\nimport { logger } from '../../../../../core/logging/logger.js';\nimport { SpendingEntry, SSEEvent } from '../../../../../shared/index.js';\n\n/**\n * Calculates total usage from spending entries\n */\nexport function calculateTotalUsage(spendingEntries: SpendingEntry[]): {\n totalTokens: number;\n totalCostUSD: number;\n} {\n const totalTokensUsed = spendingEntries.reduce(\n (sum, entry) => sum + entry.tokens.total,\n 0\n );\n const totalCostUSD = spendingEntries.reduce(\n (sum, entry) => sum + (entry.cost_usd || 0),\n 0\n );\n\n logger.info('Total token usage calculated', {\n totalTokens: totalTokensUsed,\n totalCostUsd: totalCostUSD,\n });\n\n return { totalTokens: totalTokensUsed, totalCostUSD };\n}\n\n/**\n * Sends usage statistics event to client\n */\nexport async function sendUsageStatsIfNeeded(\n config: ChatEndpointConfig,\n payload: Payload,\n userId: string | number,\n totalTokens: number,\n totalCostUSD: number,\n sendEvent: (event: SSEEvent) => void\n): Promise<void> {\n if (!config.getUserUsageStats) {\n return;\n }\n\n const usageStats = await config.getUserUsageStats(payload, userId);\n\n sendEvent({\n type: 'usage',\n data: {\n tokens_used: totalTokens,\n cost_usd: totalCostUSD,\n daily_limit: usageStats.limit,\n daily_used: usageStats.used,\n daily_remaining: usageStats.remaining,\n reset_at: usageStats.reset_at,\n },\n });\n}\n","import { CollectionSlug, Payload, PayloadRequest } from 'payload'\nimport { logger } from '../../../../core/logging/logger.js'\nimport type { ChunkSource, EmbeddingProviderConfig, RAGFeatureConfig, SpendingEntry, SSEEvent } from '../../../../shared/types/plugin-types.js'\nimport {\n executeRAGSearch,\n sendSSEEvent,\n type RAGSearchConfig,\n type TypesenseConnectionConfig,\n} from '../../index.js'\n\n// Import atomized handlers\nimport { generateEmbeddingWithTracking } from './handlers/embedding-handler.js'\nimport { saveChatSessionIfNeeded } from './handlers/session-handler.js'\nimport { checkTokenLimitsIfNeeded } from './handlers/token-limit-handler.js'\nimport { calculateTotalUsage, sendUsageStatsIfNeeded } from './handlers/usage-stats-handler.js'\nimport { validateChatRequest } from './validators/index.js'\n\n/**\n * Configuration for chat endpoint\n */\nexport type ChatEndpointConfig = {\n /** Collection name for chat sessions */\n collectionName: CollectionSlug;\n /** Check permissions function */\n checkPermissions: (request: PayloadRequest) => Promise<boolean>;\n /** Typesense connection config */\n typesense: TypesenseConnectionConfig\n /** RAG search configuration (full config for multi-agent resolution) */\n rag: RAGFeatureConfig\n /** Get Payload instance */\n getPayload: () => Promise<Payload>\n /** Embedding configuration */\n embeddingConfig?: EmbeddingProviderConfig\n /** Check token limit function */\n checkTokenLimit?: (\n payload: Payload,\n userId: string | number,\n tokens: number,\n ) => Promise<{\n allowed: boolean\n limit: number\n used: number\n remaining: number\n reset_at?: string\n }>\n /** Get user usage stats function */\n getUserUsageStats?: (payload: Payload, userId: string | number) => Promise<{\n limit: number\n used: number\n remaining: number\n reset_at?: string\n }>\n /** Save chat session function */\n saveChatSession?: (\n payload: Payload,\n userId: string | number,\n conversationId: string,\n userMessage: string,\n assistantMessage: string,\n sources: ChunkSource[],\n spendingEntries: SpendingEntry[],\n collectionName: CollectionSlug,\n ) => Promise<void>\n /** Handle streaming response function */\n handleStreamingResponse: (\n response: Response,\n controller: ReadableStreamDefaultController<Uint8Array>,\n encoder: TextEncoder,\n ) => Promise<{\n fullAssistantMessage: string\n conversationId: string | null\n sources: ChunkSource[]\n llmSpending: SpendingEntry\n }>\n /** Handle non-streaming response function */\n handleNonStreamingResponse: (\n data: Record<string, unknown>,\n controller: ReadableStreamDefaultController<Uint8Array>,\n encoder: TextEncoder,\n ) => Promise<{\n fullAssistantMessage: string\n conversationId: string | null\n sources: ChunkSource[]\n llmSpending: SpendingEntry\n }>\n /** Create embedding spending function */\n createEmbeddingSpending?: (model: string, tokens: number) => SpendingEntry\n /** Estimate tokens from text function */\n estimateTokensFromText?: (text: string) => number\n}\n\n/**\n * Create a parameterizable POST handler for chat endpoint\n */\nexport function createChatPOSTHandler(config: ChatEndpointConfig) {\n return async function POST(request: PayloadRequest) {\n try {\n // Validate request\n const validated = await validateChatRequest(request, config);\n if (!validated.success) {\n return validated.error;\n }\n\n const { userId, userEmail, payload, userMessage, body } = validated;\n\n // Resolve Agent Configuration\n let searchConfig: RAGSearchConfig;\n const agentSlug = body.agentSlug;\n \n if (agentSlug && config.rag?.agents) {\n const agent = config.rag.agents.find(a => a.slug === agentSlug);\n if (!agent) {\n return new Response(JSON.stringify({ error: `Agent not found: ${agentSlug}` }), { status: 404 });\n }\n searchConfig = {\n modelId: agent.slug,\n searchCollections: agent.searchCollections,\n kResults: agent.kResults,\n advancedConfig: config.rag.advanced\n };\n } else if (config.rag?.agents && config.rag.agents.length > 0) {\n // Use first agent as default\n const agent = config.rag.agents[0];\n if (!agent) throw new Error(\"Default agent not found\");\n searchConfig = {\n modelId: agent.slug,\n searchCollections: agent.searchCollections,\n kResults: agent.kResults,\n advancedConfig: config.rag.advanced\n };\n } else {\n return new Response(JSON.stringify({ error: 'No RAG configuration available' }), { status: 500 });\n }\n\n // Check token limits if configured\n const tokenLimitError = await checkTokenLimitsIfNeeded(\n config,\n payload,\n userId,\n userEmail,\n userMessage\n );\n if (tokenLimitError) {\n return tokenLimitError;\n }\n\n logger.info('Processing chat message', {\n userId,\n chatId: body.chatId || 'new',\n agentSlug: agentSlug || 'default',\n modelId: searchConfig.modelId,\n isFollowUp: !!body.chatId,\n hasSelectedDocuments: !!body.selectedDocuments,\n messageLength: userMessage.length,\n })\n\n // Create a streaming response\n const encoder = new TextEncoder()\n const stream = new ReadableStream({\n async start(controller) {\n const spendingEntries: SpendingEntry[] = []\n let fullAssistantMessage = ''\n let conversationIdCapture: string | null = null\n let sourcesCapture: ChunkSource[] = []\n\n try {\n const sendEvent = (event: SSEEvent) => sendSSEEvent(controller, encoder, event);\n\n // Generate embedding with tracking\n const queryEmbedding = await generateEmbeddingWithTracking(\n userMessage,\n config,\n spendingEntries\n );\n\n // Execute RAG search\n const searchResult = await executeRAGSearch(\n config.typesense,\n searchConfig,\n {\n userMessage,\n queryEmbedding,\n chatId: body.chatId,\n selectedDocuments: body.selectedDocuments,\n }\n );\n\n // Handle streaming or non-streaming response\n const streamResult = searchResult.isStreaming && searchResult.response.body\n ? await config.handleStreamingResponse(searchResult.response, controller, encoder)\n : await config.handleNonStreamingResponse(\n await searchResult.response.json(),\n controller,\n encoder\n );\n\n // Extract results\n fullAssistantMessage = streamResult.fullAssistantMessage;\n conversationIdCapture = streamResult.conversationId;\n sourcesCapture = streamResult.sources;\n spendingEntries.push(streamResult.llmSpending);\n\n // Calculate total usage\n const { totalTokens: totalTokensUsed, totalCostUSD } =\n calculateTotalUsage(spendingEntries);\n\n // Send usage stats\n await sendUsageStatsIfNeeded(\n config,\n payload,\n userId,\n totalTokensUsed,\n totalCostUSD,\n sendEvent\n );\n\n // Save session\n await saveChatSessionIfNeeded(\n config,\n payload,\n userId,\n conversationIdCapture,\n userMessage,\n fullAssistantMessage,\n sourcesCapture,\n spendingEntries\n );\n\n logger.info('Chat request completed successfully', {\n userId,\n conversationId: conversationIdCapture,\n totalTokens: totalTokensUsed,\n });\n controller.close();\n } catch (error) {\n logger.error('Fatal error in chat stream', error as Error, {\n userId,\n chatId: body.chatId,\n });\n sendSSEEvent(controller, encoder, {\n type: 'error',\n data: {\n error: error instanceof Error ? error.message : 'Error desconocido',\n },\n });\n controller.close();\n }\n },\n })\n\n return new Response(stream, {\n headers: {\n 'Content-Type': 'text/event-stream',\n 'Cache-Control': 'no-cache',\n Connection: 'keep-alive',\n },\n })\n } catch (error) {\n logger.error('Error in chat API endpoint', error as Error, {\n userId: request.user?.id,\n })\n\n return new Response(\n JSON.stringify({\n error: 'Error al procesar tu mensaje. Por favor, inténtalo de nuevo.',\n details: error instanceof Error ? error.message : 'Error desconocido',\n }),\n {\n status: 500,\n headers: { 'Content-Type': 'application/json' },\n },\n )\n }\n }\n}\n\n/**\n * Default export for Next.js App Router\n * Users should call createChatPOSTHandler with their config\n */\nexport { createChatPOSTHandler as POST }\n","/**\n * Stream handler utilities\n *\n * Shared utility functions for stream handlers\n */\n\n/**\n * Resolve document type from collection name\n */\nexport function resolveDocumentType(collectionName: string): string {\n if (collectionName === 'article_web_chunk') return 'article';\n if (collectionName === 'book_chunk') return 'book';\n return 'document';\n}\n\n/**\n * Estimate tokens from text (simple word-based estimation)\n * More accurate implementations can be provided via callbacks\n */\nexport function estimateTokensFromText(text: string): number {\n // Simple estimation: ~1.3 tokens per word\n const words = text.trim().split(/\\s+/).length;\n return Math.ceil(words * 1.3);\n}\n","/**\n * Streaming response handler\n *\n * Handles streaming responses from Typesense conversational search\n */\n\nimport { parseConversationEvent, extractSourcesFromResults, buildContextText } from '../stream-handler.js'\nimport { sendSSEEvent } from '../utils/sse-utils.js'\nimport { logger } from '../../../core/logging/logger.js'\nimport { resolveDocumentType, estimateTokensFromText } from './utils.js'\nimport { ChunkSource, SpendingEntry } from '../../../shared/index.js';\n\n/**\n * Default implementation for handling streaming responses\n */\nexport async function defaultHandleStreamingResponse(\n response: Response,\n controller: ReadableStreamDefaultController<Uint8Array>,\n encoder: TextEncoder,\n): Promise<{\n fullAssistantMessage: string;\n conversationId: string | null;\n sources: ChunkSource[];\n llmSpending: SpendingEntry;\n}> {\n logger.debug('Starting streaming response handling')\n\n if (!response.body) {\n throw new Error('Response body is null');\n }\n\n const reader = response.body.getReader();\n const decoder = new TextDecoder();\n let buffer = '';\n let sources: ChunkSource[] = [];\n let hasCollectedSources = false;\n let conversationId: string | null = null;\n let contextText = ''; // To estimate LLM tokens\n let fullAssistantMessage = '';\n\n try {\n while (true) {\n const { done, value } = await reader.read();\n if (done) {\n logger.debug('Streaming response completed');\n break;\n }\n\n buffer += decoder.decode(value, { stream: true });\n const lines = buffer.split('\\n');\n buffer = lines.pop() || '';\n\n for (const line of lines) {\n const event = parseConversationEvent(line);\n if (!event) continue;\n\n // Handle [DONE] event\n if (event.raw === '[DONE]') {\n sendSSEEvent(controller, encoder, { type: 'done', data: '' });\n continue;\n }\n\n // Capture conversation_id\n if (!conversationId && event.conversationId) {\n conversationId = event.conversationId;\n logger.debug('Conversation ID captured', { conversationId });\n sendSSEEvent(controller, encoder, { type: 'conversation_id', data: conversationId });\n }\n\n // Extract sources\n if (!hasCollectedSources && event.results) {\n sources = extractSourcesFromResults(event.results, resolveDocumentType);\n contextText = buildContextText(event.results);\n\n if (sources.length > 0) {\n sendSSEEvent(controller, encoder, { type: 'sources', data: sources });\n }\n\n hasCollectedSources = true;\n }\n\n // Stream conversation tokens\n if (event.message) {\n fullAssistantMessage += event.message;\n sendSSEEvent(controller, encoder, { type: 'token', data: event.message });\n }\n }\n }\n } finally {\n reader.releaseLock();\n }\n\n // Estimate LLM tokens (context + user message + response)\n const llmInputTokens = estimateTokensFromText(contextText);\n const llmOutputTokens = estimateTokensFromText(fullAssistantMessage);\n\n // Track LLM spending (defaults to a simple model)\n const llmSpending: SpendingEntry = {\n service: 'openai_llm',\n model: 'gpt-4o-mini',\n tokens: {\n input: llmInputTokens,\n output: llmOutputTokens,\n total: llmInputTokens + llmOutputTokens,\n },\n cost_usd: (llmInputTokens * 0.00000015) + (llmOutputTokens * 0.0000006), // gpt-4o-mini pricing\n timestamp: new Date().toISOString(),\n };\n\n logger.info('LLM cost calculated', {\n inputTokens: llmInputTokens,\n outputTokens: llmOutputTokens,\n totalTokens: llmSpending.tokens.total,\n costUsd: llmSpending.cost_usd,\n })\n\n return {\n fullAssistantMessage,\n conversationId,\n sources,\n llmSpending,\n };\n}\n","/**\n * Non-streaming response handler\n *\n * Handles non-streaming (regular JSON) responses from Typesense conversational search\n */\n\nimport { extractSourcesFromResults, buildContextText } from '../stream-handler.js'\nimport { sendSSEEvent } from '../utils/sse-utils.js'\nimport { logger } from '../../../core/logging/logger.js'\nimport { resolveDocumentType, estimateTokensFromText } from './utils.js'\nimport { ChunkSource, SpendingEntry, TypesenseRAGSearchResult } from '../../../shared/index.js';\n\n/**\n * Default implementation for handling non-streaming responses\n */\nexport async function defaultHandleNonStreamingResponse(\n data: Record<string, unknown>,\n controller: ReadableStreamDefaultController<Uint8Array>,\n encoder: TextEncoder,\n): Promise<{\n fullAssistantMessage: string;\n conversationId: string | null;\n sources: ChunkSource[];\n llmSpending: SpendingEntry;\n}> {\n logger.debug('Using non-streaming fallback for response handling');\n\n // Type assertion for accessing known properties from the Typesense response\n const typedData = data as {\n conversation?: { conversation_id?: string; answer?: string };\n conversation_id?: string;\n response?: string;\n message?: string;\n results?: unknown[];\n };\n\n let conversationId: string | null = null;\n if (typedData.conversation?.conversation_id) {\n conversationId = typedData.conversation.conversation_id;\n } else if (typedData.conversation_id) {\n conversationId = typedData.conversation_id;\n }\n\n let fullAnswer = '';\n if (typedData.conversation?.answer) {\n fullAnswer = typedData.conversation.answer;\n } else if (typedData.response || typedData.message) {\n fullAnswer = typedData.response || typedData.message || '';\n }\n\n const sources = extractSourcesFromResults((typedData.results || []) as TypesenseRAGSearchResult[], resolveDocumentType);\n const contextText = buildContextText((typedData.results || []) as TypesenseRAGSearchResult[]);\n\n // Simulate streaming by sending tokens word by word\n if (fullAnswer) {\n const words = fullAnswer.split(' ');\n for (let i = 0; i < words.length; i++) {\n const token = i === 0 ? words[i] : ' ' + words[i];\n if (token) {\n sendSSEEvent(controller, encoder, { type: 'token', data: token });\n }\n }\n }\n\n if (conversationId) {\n sendSSEEvent(controller, encoder, { type: 'conversation_id', data: conversationId });\n }\n\n if (sources.length > 0) {\n sendSSEEvent(controller, encoder, { type: 'sources', data: sources });\n }\n\n sendSSEEvent(controller, encoder, { type: 'done', data: '' });\n\n // Estimate LLM tokens\n const llmInputTokens = estimateTokensFromText(contextText);\n const llmOutputTokens = estimateTokensFromText(fullAnswer);\n\n const llmSpending: SpendingEntry = {\n service: 'openai_llm',\n model: 'gpt-4o-mini',\n tokens: {\n input: llmInputTokens,\n output: llmOutputTokens,\n total: llmInputTokens + llmOutputTokens,\n },\n cost_usd: (llmInputTokens * 0.00000015) + (llmOutputTokens * 0.0000006),\n timestamp: new Date().toISOString(),\n };\n\n return {\n fullAssistantMessage: fullAnswer,\n conversationId,\n sources,\n llmSpending,\n };\n}\n","import { Payload, PayloadRequest } from 'payload';\nimport { jsonResponse } from '../validators/index.js'\nimport { logger } from '../../../../../core/logging/logger.js'\nimport { SessionConfig, getActiveSession, getSessionByConversationId, closeSession } from '../../../handlers/session-handlers.js';\n\n/**\n * Configuration for session endpoints\n */\nexport type SessionEndpointConfig = {\n /** Get Payload instance */\n getPayload: () => Promise<Payload>\n checkPermissions: (request: PayloadRequest) => Promise<boolean>;\n /** Session configuration */\n sessionConfig?: SessionConfig\n}\n\n/**\n * Create a parameterizable GET handler for session endpoint\n *\n * Query params:\n * - ?active=true → Get the most recent active session\n * - ?conversationId=xxx → Get a specific session by conversation ID\n */\nexport function createSessionGETHandler(config: SessionEndpointConfig) {\n return async function GET(request: PayloadRequest) {\n try {\n if (!await config.checkPermissions(request)) {\n return jsonResponse({ error: 'No tienes permisos para acceder a esta sesión.' }, { status: 403 })\n }\n const userId = request.user?.id\n\n if (!request.url || !userId) {\n return jsonResponse({ error: 'URL not found' }, { status: 400 })\n }\n\n const { searchParams } = new URL(request.url)\n const isActive = searchParams.get('active') === 'true'\n const conversationId = searchParams.get('conversationId')\n\n // Get Payload instance\n const payload = await config.getPayload()\n\n // Handle active session request\n if (isActive) {\n const session = await getActiveSession(payload, userId, config.sessionConfig)\n\n if (!session) {\n return jsonResponse({ error: 'No hay sesión activa.' }, { status: 404 })\n }\n\n return jsonResponse(session)\n }\n\n // Handle specific session request\n if (!conversationId) {\n return jsonResponse(\n { error: 'Se requiere conversationId o active=true.' },\n { status: 400 },\n )\n }\n\n const session = await getSessionByConversationId(\n payload,\n userId,\n conversationId,\n config.sessionConfig,\n )\n\n if (!session) {\n return jsonResponse({ error: 'Sesión de chat no encontrada.' }, { status: 404 })\n }\n\n return jsonResponse(session)\n } catch (error) {\n logger.error('Error retrieving chat session', error as Error, {\n userId: request.user?.id,\n })\n\n return jsonResponse(\n {\n error: 'Error al recuperar la sesión.',\n details: error instanceof Error ? error.message : 'Error desconocido',\n },\n { status: 500 },\n )\n }\n }\n}\n\n/**\n * Create a parameterizable DELETE handler for session endpoint\n *\n * DELETE /api/chat/session?conversationId=xxx\n * Close a chat session\n */\nexport function createSessionDELETEHandler(config: SessionEndpointConfig) {\n return async function DELETE(request: PayloadRequest) {\n try {\n if (!await config.checkPermissions(request)) {\n return jsonResponse({ error: 'No tienes permisos para acceder a esta sesión.' }, { status: 403 })\n }\n const userId = request.user?.id\n if (!request.url || !userId) {\n return jsonResponse({ error: 'URL not found' }, { status: 400 })\n }\n\n const { searchParams } = new URL(request.url)\n const conversationId = searchParams.get('conversationId')\n\n if (!conversationId) {\n return jsonResponse(\n { error: 'Se requiere un conversationId válido.' },\n { status: 400 },\n )\n }\n\n // Get Payload instance\n const payload = await config.getPayload()\n\n logger.info('Closing chat session', { conversationId, userId })\n\n const session = await closeSession(payload, userId, conversationId, config.sessionConfig)\n\n if (!session) {\n return jsonResponse(\n { error: 'Sesión de chat no encontrada o no tienes permisos.' },\n { status: 404 },\n )\n }\n\n logger.info('Chat session closed successfully', {\n conversationId,\n totalTokens: session.total_tokens,\n totalCost: session.total_cost,\n })\n\n return jsonResponse({\n success: true,\n message: 'Sesión cerrada correctamente',\n session: {\n conversation_id: conversationId,\n status: 'closed',\n total_tokens: session.total_tokens,\n total_cost: session.total_cost,\n },\n })\n } catch (error) {\n logger.error('Error closing chat session', error as Error, {\n conversationId: request.url ? new URL(request.url).searchParams.get('conversationId') : undefined,\n userId: request.user?.id,\n })\n\n return jsonResponse(\n {\n error: 'Error al cerrar la sesión. Por favor, inténtalo de nuevo.',\n details: error instanceof Error ? error.message : 'Error desconocido',\n },\n { status: 500 },\n )\n }\n }\n}\n\n/**\n * Default exports for Next.js App Router\n */\nexport { createSessionGETHandler as GET, createSessionDELETEHandler as DELETE }\n","import { PayloadRequest } from 'payload'\nimport { createTypesenseClient } from '../../../../../core/client/typesense-client.js'\nimport { fetchChunkById, type TypesenseConnectionConfig } from '../../../index.js'\nimport { jsonResponse } from '../../chat/validators/index.js'\nimport { logger } from '../../../../../core/logging/logger.js'\n\n/**\n * Configuration for chunks endpoint\n */\nexport type ChunksEndpointConfig = {\n /** Typesense connection config */\n typesense: TypesenseConnectionConfig\n /** Check permissions function */\n checkPermissions: (request: PayloadRequest) => Promise<boolean>;\n /** Valid collections for chunks */\n validCollections: string[]\n}\n\n/**\n * Create a parameterizable GET handler for chunks endpoint\n *\n * GET /api/chat/chunks/[id]?collection=article_web_chunk\n * Fetch the full chunk text from Typesense by document ID\n */\nexport function createChunksGETHandler(config: ChunksEndpointConfig) {\n return async function GET(\n request: PayloadRequest\n ) {\n try {\n if (!await config.checkPermissions(request)) {\n return jsonResponse({ error: 'No tienes permisos para acceder a este chunk.' }, { status: 403 })\n }\n if (!request.url || !request.user) {\n return jsonResponse({ error: 'URL not found' }, { status: 400 })\n }\n const id = request.routeParams?.id\n const url = new URL(request.url)\n const collectionName = url.searchParams.get('collection')\n\n // Validate chunk ID\n if (!id) {\n return jsonResponse({ error: 'Se requiere el ID del chunk' }, { status: 400 })\n }\n\n // Validate collection name\n if (!collectionName) {\n return jsonResponse(\n {\n error: 'Se requiere el parámetro collection',\n collections: config.validCollections,\n },\n { status: 400 },\n )\n }\n\n // Get Typesense client\n const client = createTypesenseClient(config.typesense)\n\n // Use the parameterizable function from the package\n const chunkData = await fetchChunkById(client, {\n chunkId: id as string,\n collectionName,\n validCollections: config.validCollections,\n })\n\n // Return the chunk data\n return jsonResponse(chunkData)\n } catch (error: unknown) {\n logger.error('Error fetching chunk', error as Error, {\n chunkId: request.routeParams?.id,\n collection: request.url ? new URL(request.url).searchParams.get('collection') : undefined,\n })\n\n // Handle known errors\n if (error instanceof Error) {\n if (error.message.includes('Invalid collection')) {\n return jsonResponse(\n {\n error: error.message,\n collections: config.validCollections,\n },\n { status: 400 },\n )\n }\n if (error.message.includes('not found')) {\n return jsonResponse({ error: 'Chunk no encontrado' }, { status: 404 })\n }\n }\n\n return jsonResponse(\n {\n error: 'Error al obtener el chunk',\n details: error instanceof Error ? error.message : 'Error desconocido',\n },\n { status: 500 },\n )\n }\n }\n}\n\n/**\n * Default export for Next.js App Router\n */\nexport { createChunksGETHandler as GET }\n","import type { PayloadRequest } from 'payload';\nimport { RAGFeatureConfig } from '../../../../../shared/types/plugin-types.js';\nimport { jsonResponse } from '../validators/index.js';\n\nexport type AgentsEndpointConfig = {\n ragConfig: RAGFeatureConfig;\n checkPermissions: (req: PayloadRequest) => Promise<boolean>;\n};\n\nexport function createAgentsGETHandler(config: AgentsEndpointConfig) {\n return async function GET() {\n try {\n const agents = config.ragConfig?.agents || [];\n \n // Map to PublicAgentInfo\n const publicAgents = agents.map(agent => ({\n slug: agent.slug,\n name: agent.name || agent.slug\n }));\n\n return jsonResponse({ agents: publicAgents }, { status: 200 });\n } catch (error) {\n return jsonResponse({ error: 'Internal Server Error' }, { status: 500 });\n }\n };\n}\n","/**\n * Payload CMS adapters for RAG endpoints\n *\n * These adapters convert the RAG API handlers (designed for standard Request/Response)\n * into Payload CMS handlers that work with Payload's endpoint system.\n */\n\nimport type { PayloadHandler } from \"payload\";\nimport type { TypesenseRAGPluginConfig } from \"../../plugin/rag-types.js\";\nimport { createChatPOSTHandler } from \"./endpoints/chat/route.js\";\nimport { defaultHandleNonStreamingResponse, defaultHandleStreamingResponse } from \"./stream-handlers/index.js\";\nimport { createSessionDELETEHandler, createSessionGETHandler } from \"./endpoints/chat/session/route.js\";\nimport { createChunksGETHandler } from \"./endpoints/chunks/[id]/route.js\";\nimport { createAgentsGETHandler } from \"./endpoints/chat/agents/route.js\";\n\n/**\n * Creates Payload handlers for RAG endpoints\n *\n * @param config - RAG plugin configuration (composable, doesn't depend on ModularPluginConfig)\n */\nexport function createRAGPayloadHandlers(\n config: TypesenseRAGPluginConfig\n): Array<{ path: string; method: 'connect' | 'delete' | 'get' | 'head' | 'options' | 'patch' | 'post' | 'put'; handler: PayloadHandler }> {\n const endpoints: Array<{ path: string; method: 'connect' | 'delete' | 'get' | 'head' | 'options' | 'patch' | 'post' | 'put'; handler: PayloadHandler }> = [];\n\n // Validate required config\n if (!config.agents || config.agents.length === 0 || !config.callbacks) {\n return endpoints;\n }\n\n const { agents, callbacks, typesense } = config;\n\n // Get valid collections from agents configuration\n const agentCollections = agents.flatMap(agent => agent.searchCollections) || [];\n const validCollections = Array.from(new Set(agentCollections));\n\n // Build RAG feature config for handlers that still need it\n const ragFeatureConfig = {\n enabled: true,\n agents,\n callbacks,\n hybrid: config.hybrid,\n hnsw: config.hnsw,\n advanced: config.advanced,\n };\n\n // Add endpoints\n endpoints.push({\n path: \"/chat\",\n method: \"post\" as const,\n handler: createChatPOSTHandler({\n collectionName: 'chat-sessions', // Default fallback\n checkPermissions: callbacks.checkPermissions,\n typesense,\n rag: ragFeatureConfig,\n getPayload: callbacks.getPayload,\n checkTokenLimit: callbacks.checkTokenLimit,\n getUserUsageStats: callbacks.getUserUsageStats,\n saveChatSession: callbacks.saveChatSession,\n handleStreamingResponse: defaultHandleStreamingResponse,\n handleNonStreamingResponse: defaultHandleNonStreamingResponse,\n createEmbeddingSpending: callbacks.createEmbeddingSpending,\n estimateTokensFromText: callbacks.estimateTokensFromText,\n embeddingConfig: config.embeddingConfig,\n }),\n });\n\n endpoints.push({\n path: \"/chat/session\",\n method: \"get\" as const,\n handler: createSessionGETHandler({\n getPayload: callbacks.getPayload,\n checkPermissions: callbacks.checkPermissions,\n }),\n });\n\n endpoints.push({\n path: \"/chat/session\",\n method: \"delete\" as const,\n handler: createSessionDELETEHandler({\n getPayload: callbacks.getPayload,\n checkPermissions: callbacks.checkPermissions,\n }),\n });\n\n endpoints.push({\n path: \"/chat/chunks/:id\",\n method: \"get\" as const,\n handler: createChunksGETHandler({\n typesense,\n checkPermissions: callbacks.checkPermissions,\n validCollections,\n }),\n });\n\n endpoints.push({\n path: \"/chat/agents\",\n method: \"get\" as const,\n handler: createAgentsGETHandler({\n ragConfig: ragFeatureConfig,\n checkPermissions: callbacks.checkPermissions,\n }),\n });\n\n return endpoints;\n}\n","import type { PayloadHandler } from \"payload\";\nimport { ModularPluginConfig } from \"../../../../index.js\";\n\n/**\n * Creates a handler for listing available search collections\n */\nexport const createCollectionsHandler = (\n pluginOptions: ModularPluginConfig\n): PayloadHandler => {\n return () => {\n try {\n // Flatten table configs to collections list\n const collections: Array<Record<string, unknown>> = [];\n for (const [slug, tableConfigs] of Object.entries(\n pluginOptions.collections || {}\n )) {\n if (Array.isArray(tableConfigs)) {\n // Get first enabled config for collection metadata\n const firstEnabledConfig = tableConfigs.find((config) => config.enabled);\n if (firstEnabledConfig) {\n // Extract fields based on mode\n let fields: { name: string; facet?: boolean; index?: boolean }[] = [];\n fields = firstEnabledConfig.fields;\n const facetFields = fields.filter(f => f.facet).map(f => f.name);\n const searchFields = fields.filter(f => f.index !== false).map(f => f.name); // Default to index true unless explicitly false? Or explicit index?\n // In our new config, index is optional, defaulting to... ?\n // Let's assume if it's in the fields list, it's relevant.\n // Actually, we should check 'index' property if we want to be precise.\n // But for now, let's just map all fields as search fields if they are not facets only?\n // The UI probably needs to know what to search.\n \n collections.push({\n slug,\n displayName:\n firstEnabledConfig.displayName ||\n slug.charAt(0).toUpperCase() + slug.slice(1),\n facetFields,\n searchFields,\n });\n }\n }\n }\n\n return Response.json({\n categorized: false, // Categorized setting moved or removed\n collections,\n });\n } catch (_error) {\n // Handle collections error\n return Response.json(\n { error: \"Failed to get collections\" },\n { status: 500 }\n );\n }\n };\n};\n\n\n\n","/**\n * Simple in-memory cache for search results\n * In production, consider using Redis or similar\n */\n\nimport type { CacheEntry, CacheOptions } from '../types/types.js'\n\nclass SearchCache<T = unknown> {\n private cache = new Map<string, CacheEntry<T>>()\n private readonly defaultTTL: number\n private readonly maxSize: number\n\n constructor(options: CacheOptions = {}) {\n this.defaultTTL = options.ttl || 5 * 60 * 1000 // 5 minutes default\n this.maxSize = options.maxSize || 1000 // 1000 entries default\n }\n\n /**\n * Generate cache key from search parameters\n */\n private generateKey(query: string, collection?: string, params?: Record<string, any>): string {\n const baseKey = `${collection || 'universal'}:${query}`\n if (params) {\n const sortedParams = Object.keys(params)\n .sort()\n .map(key => `${key}=${params[key]}`)\n .join('&')\n return `${baseKey}:${sortedParams}`\n }\n return baseKey\n }\n\n /**\n * Clear expired entries\n */\n cleanup(): void {\n const now = Date.now()\n for (const [key, entry] of this.cache.entries()) {\n if (now - entry.timestamp > entry.ttl) {\n this.cache.delete(key)\n }\n }\n }\n\n /**\n * Clear cache entries matching pattern\n */\n clear(pattern?: string): void {\n if (!pattern) {\n this.cache.clear()\n return\n }\n\n for (const key of this.cache.keys()) {\n if (key.includes(pattern)) {\n this.cache.delete(key)\n }\n }\n }\n\n /**\n * Get cached search result\n */\n get(query: string, collection?: string, params?: Record<string, any>): null | T {\n const key = this.generateKey(query, collection || '', params)\n const entry = this.cache.get(key)\n\n if (!entry) {\n return null\n }\n\n // Check if entry has expired\n if (Date.now() - entry.timestamp > entry.ttl) {\n this.cache.delete(key)\n return null\n }\n\n return entry.data\n }\n\n /**\n * Get cache statistics\n */\n getStats(): { hitRate?: number; maxSize: number; size: number } {\n return {\n maxSize: this.maxSize,\n size: this.cache.size\n }\n }\n\n /**\n * Check if cache has valid entry\n */\n has(query: string, collection?: string, params?: Record<string, any>): boolean {\n return this.get(query, collection, params) !== null\n }\n\n /**\n * Set cached search result\n */\n set(\n query: string, \n data: T, \n collection?: string, \n params?: Record<string, any>,\n ttl?: number\n ): void {\n const key = this.generateKey(query, collection || '', params)\n \n // Enforce max size by removing oldest entries\n if (this.cache.size >= this.maxSize) {\n const oldestKey = this.cache.keys().next().value\n if (oldestKey) {\n this.cache.delete(oldestKey)\n }\n }\n\n this.cache.set(key, {\n data,\n timestamp: Date.now(),\n ttl: ttl || this.defaultTTL\n })\n }\n}\n\n// Global cache instance\nexport const searchCache = new SearchCache({\n maxSize: 1000,\n ttl: 5 * 60 * 1000 // 5 minutes\n})\n\n// Cleanup expired entries every 10 minutes\nsetInterval(() => {\n searchCache.cleanup()\n}, 10 * 60 * 1000)\n","/**\n * Default values for vector search parameters\n *\n * K is set high because:\n * - Documents are split into chunks (avg 5-10 chunks per doc)\n * - To get 20 unique documents, we need K = 20 docs × 7 chunks/doc = 140\n * - Higher K = better coverage but slightly slower (still fast with good indexing)\n */\nexport const DEFAULT_K = 150; // High K for good chunk coverage\nexport const DEFAULT_PAGE = 1;\nexport const DEFAULT_PER_PAGE = 20; // Show more results per page (was 10)\nexport const DEFAULT_ALPHA = 0.7;\n\n/**\n * Default search field names when not specified\n */\nexport const DEFAULT_SEARCH_FIELDS = [\"title\", \"content\"];\n\n/**\n * Default snippet threshold for search results\n */\nexport const DEFAULT_SNIPPET_THRESHOLD = 30;\n\n/**\n * Default typo tokens threshold\n */\nexport const DEFAULT_TYPO_TOKENS_THRESHOLD = 1;\n\n/**\n * Default number of typos allowed\n */\nexport const DEFAULT_NUM_TYPOS = 0;\n","import { SearchResponse } from \"typesense/lib/Typesense/Documents.js\";\nimport type { TableConfig } from \"@nexo-labs/payload-indexer\";\nimport type { CombinedSearchResult, CollectionSearchResult, SearchHit } from \"../types.js\";\n\n/**\n * Processes traditional search results from a single collection\n */\nexport const processSingleCollectionTraditionalResults = (\n results: SearchResponse<object>,\n collectionName: string,\n config: TableConfig\n): CollectionSearchResult => {\n return {\n collection: collectionName,\n displayName: config?.displayName || collectionName,\n icon: \"📄\",\n found: results.found,\n hits: results.hits?.map((hit): SearchHit => ({\n ...hit,\n collection: collectionName,\n displayName: config?.displayName || collectionName,\n icon: \"📄\",\n document: (hit.document || {}) as Record<string, unknown>,\n })) || [],\n };\n};\n\n/**\n * Combines traditional search results from multiple collections\n */\nexport const combineTraditionalResults = (\n results: CollectionSearchResult[],\n options: {\n page: number;\n per_page: number;\n query: string;\n }\n): CombinedSearchResult => {\n const { page, per_page, query } = options;\n\n const combinedHits = results.flatMap((result) => result.hits || []);\n const totalFound = results.reduce(\n (sum, result) => sum + (result.found || 0),\n 0\n );\n\n // Sort by text match score\n combinedHits.sort((a, b) => (b.text_match || 0) - (a.text_match || 0));\n\n const searchResult: CombinedSearchResult = {\n collections: results.map((r) => ({\n collection: r.collection,\n displayName: r.displayName,\n error: r.error,\n found: r.found || 0,\n icon: r.icon,\n })),\n found: totalFound,\n hits: combinedHits.slice(0, per_page),\n page,\n request_params: { per_page, query },\n search_cutoff: false,\n search_time_ms: 0,\n };\n\n return searchResult;\n};\n\n","import {\n DEFAULT_NUM_TYPOS,\n DEFAULT_SEARCH_FIELDS,\n DEFAULT_SNIPPET_THRESHOLD,\n DEFAULT_TYPO_TOKENS_THRESHOLD,\n} from \"../constants.js\";\nimport type { TraditionalSearchParams } from \"../types.js\";\n\n/**\n * Builds traditional search parameters for a single collection\n */\nexport const buildTraditionalSearchParams = (\n query: string,\n options: {\n page: number;\n per_page: number;\n searchFields?: string[];\n sort_by?: string;\n exclude_fields?: string;\n }\n): TraditionalSearchParams => {\n const {\n page,\n per_page,\n searchFields = DEFAULT_SEARCH_FIELDS,\n sort_by,\n exclude_fields,\n } = options;\n\n const params: TraditionalSearchParams = {\n highlight_full_fields: searchFields.join(\",\"),\n num_typos: DEFAULT_NUM_TYPOS,\n page,\n per_page,\n q: query,\n query_by: searchFields.join(\",\"),\n snippet_threshold: DEFAULT_SNIPPET_THRESHOLD,\n typo_tokens_threshold: DEFAULT_TYPO_TOKENS_THRESHOLD,\n exclude_fields: exclude_fields,\n sort_by: sort_by,\n };\n\n return params;\n};\n","import type { Client } from \"typesense\";\nimport type { TableConfig } from \"@nexo-labs/payload-indexer\";\nimport { processSingleCollectionTraditionalResults } from \"../results/process-traditional-results.js\";\nimport type { CollectionSearchResult } from \"../types.js\";\nimport { buildTraditionalSearchParams } from \"./build-params.js\";\n\n/**\n * Performs a traditional search on a single collection\n */\nexport const searchTraditionalCollection = async (\n typesenseClient: Client,\n collectionName: string,\n config: TableConfig,\n options: {\n query: string;\n page: number;\n per_page: number;\n searchFields?: string[];\n sort_by?: string;\n exclude_fields?: string;\n skipChunkFilter?: boolean; // Skip the !is_chunk filter for simple searches\n }\n): Promise<CollectionSearchResult> => {\n try {\n const buildOptions: {\n page: number;\n per_page: number;\n searchFields?: string[];\n sort_by?: string;\n exclude_fields?: string;\n } = {\n page: options.page,\n per_page: options.per_page,\n };\n\n // Extract search fields from config if not provided in options\n if (options.searchFields) {\n buildOptions.searchFields = options.searchFields;\n } else if (config) {\n let fields: { name: string; index?: boolean; type?: string }[] = [];\n fields = config.fields;\n // Filter for indexed fields that are searchable (string or string[] types only)\n // Typesense only accepts string/string[] fields in query_by parameter\n const searchFields = fields\n .filter(f =>\n f.index !== false &&\n (f.type === 'string' || f.type === 'string[]')\n )\n .map(f => f.name);\n if (searchFields.length > 0) {\n buildOptions.searchFields = searchFields;\n }\n }\n\n if (options.sort_by) {\n buildOptions.sort_by = options.sort_by;\n }\n\n if (options.exclude_fields) {\n buildOptions.exclude_fields = options.exclude_fields;\n }\n\n const searchParameters = buildTraditionalSearchParams(\n options.query,\n buildOptions\n );\n\n // Try to add chunk filter, but handle gracefully if schema doesn't support it\n // Skip chunk filter for simple searches since we're already searching main collections only\n if (!options.skipChunkFilter) {\n try {\n // First check if schema supports is_chunk field\n const collectionSchema = await typesenseClient\n .collections(collectionName)\n .retrieve();\n\n const fieldNames = collectionSchema.fields?.map(f => f.name) || [];\n if (fieldNames.includes(\"is_chunk\")) {\n // Schema supports chunking, add filter\n searchParameters.filter_by = \"!is_chunk:true\";\n }\n // If schema doesn't support is_chunk, don't add filter (backward compatibility)\n } catch (schemaError: unknown) {\n // If we can't retrieve schema, don't add filter (will work for old collections)\n }\n }\n\n const results = await typesenseClient\n .collections(collectionName)\n .documents()\n .search(searchParameters);\n\n return processSingleCollectionTraditionalResults(\n results,\n collectionName,\n config\n );\n } catch (error) {\n return {\n collection: collectionName,\n displayName: config?.displayName || collectionName,\n error: error instanceof Error ? error.message : \"Unknown error\",\n found: 0,\n hits: [],\n icon: \"📄\",\n };\n }\n};\n","import type { Client } from \"typesense\";\nimport type { TableConfig } from \"@nexo-labs/payload-indexer\";\nimport { searchTraditionalCollection } from \"../../../traditional/search-collection.js\";\nimport type { UniversalSearchOptions, CombinedSearchResult } from \"../../../types.js\";\nimport { combineTraditionalResults } from \"../../../results/process-traditional-results.js\";\nimport { searchCache } from \"../../../../../shared/cache/cache.js\";\nimport { logger } from \"../../../../../core/logging/logger.js\";\n\nexport const performTraditionalMultiCollectionSearch = async (\n typesenseClient: Client,\n enabledCollections: Array<[string, TableConfig]>,\n query: string,\n options: UniversalSearchOptions\n): Promise<CombinedSearchResult> => {\n logger.info('Performing traditional multi-collection search', {\n query,\n collections: enabledCollections.map(([name]) => name),\n });\n\n // Determine search fields (override if query_by is provided)\n const searchFieldsOverride = options.query_by\n ? options.query_by.split(',').map(f => f.trim())\n : undefined;\n\n const searchPromises = enabledCollections.map(\n async ([collectionName, config]) => {\n try {\n const result = await searchTraditionalCollection(\n typesenseClient,\n collectionName,\n config,\n {\n query,\n page: options.page,\n per_page: options.per_page,\n ...(searchFieldsOverride\n ? { searchFields: searchFieldsOverride }\n : (() => {\n // Extract default search fields from config\n if (!config) return {};\n let fields: { name: string; index?: boolean; type?: string }[] = [];\n fields = config.fields;\n // Filter for indexed fields that are searchable (string or string[] types only)\n // Typesense only accepts string/string[] fields in query_by parameter\n const searchFields = fields\n .filter(f =>\n f.index !== false &&\n (f.type === 'string' || f.type === 'string[]')\n )\n .map(f => f.name);\n return searchFields.length > 0 ? { searchFields } : {};\n })()\n ),\n ...(options.sort_by && { sort_by: options.sort_by }),\n ...(options.exclude_fields && { exclude_fields: options.exclude_fields }),\n }\n );\n return result;\n } catch (error) {\n logger.error('Error searching collection', error as Error, {\n collection: collectionName,\n query,\n });\n throw error;\n }\n }\n );\n\n const results = await Promise.all(searchPromises);\n const fallbackResult = combineTraditionalResults(results, {\n page: options.page,\n per_page: options.per_page,\n query,\n });\n\n searchCache.set(query, fallbackResult, \"universal\", options);\n return fallbackResult;\n};\n","import type { TableConfig } from \"@nexo-labs/payload-indexer\";\nimport {\n DEFAULT_PAGE,\n DEFAULT_PER_PAGE,\n} from \"../constants.js\";\nimport type {\n ProcessVectorSearchResultsOptions,\n SearchHit,\n CombinedSearchResult,\n} from \"../types.js\";\n\n/**\n * Typesense search result from a single collection\n */\ninterface TypesenseCollectionResult {\n found?: number;\n error?: string;\n hits?: Array<{\n document?: Record<string, unknown>;\n vector_distance?: number;\n text_match?: number;\n [key: string]: unknown;\n }>;\n}\n\n/**\n * Typesense multi-search response\n */\ninterface TypesenseMultiSearchResponse {\n results?: TypesenseCollectionResult[];\n}\n\n/**\n * Internal result type with collection metadata\n */\ninterface CollectionResult {\n collection: string;\n displayName: string;\n icon: string;\n found: number;\n error: string | undefined;\n hits: SearchHit[];\n}\n\n/**\n * Processes and combines vector search results from multiple collections\n */\nexport const processVectorSearchResults = (\n multiSearchResults: TypesenseMultiSearchResponse,\n enabledCollections: Array<[string, TableConfig]>,\n options: ProcessVectorSearchResultsOptions\n): CombinedSearchResult => {\n const {\n per_page = DEFAULT_PER_PAGE,\n page = DEFAULT_PAGE,\n k,\n query,\n vector,\n } = options;\n\n const rawResults = multiSearchResults.results?.map((result: TypesenseCollectionResult, index: number): CollectionResult | null => {\n if (!enabledCollections[index]) {\n return null;\n }\n const [collectionName, config] = enabledCollections[index];\n\n return {\n collection: collectionName,\n displayName: config?.displayName || collectionName,\n icon: \"📄\",\n found: result.found || 0,\n error: result.error || undefined,\n hits:\n result.hits?.map((hit): SearchHit => {\n const doc = hit.document || {};\n const hint = doc.chunk_text\n ? String(doc.chunk_text).substring(0, 300) + '...'\n : doc.description\n ? String(doc.description).substring(0, 300) + '...'\n : doc.hint;\n\n return {\n ...hit,\n collection: collectionName,\n displayName: config?.displayName || collectionName,\n icon: \"📄\",\n document: {\n ...doc,\n hint,\n // Keep chunk_text as a separate field for chunks\n ...(doc.chunk_text ? { chunk_text: doc.chunk_text } : {}),\n },\n vector_distance: hit.vector_distance,\n text_match: hit.text_match,\n };\n }) || [],\n };\n }) || [];\n\n const results: CollectionResult[] = rawResults.filter((r: CollectionResult | null): r is CollectionResult => r !== null);\n\n // Combine results\n const combinedHits = results.flatMap((result) => result.hits);\n const totalFound = results.reduce(\n (sum, result) => sum + result.found,\n 0\n );\n\n // Sort by vector distance (if available) or relevance\n combinedHits.sort((a, b) => {\n const aDistance = a.vector_distance ?? Infinity;\n const bDistance = b.vector_distance ?? Infinity;\n return aDistance - bDistance;\n });\n\n const searchResult: CombinedSearchResult = {\n collections: results.map((r: CollectionResult) => ({\n collection: r.collection,\n displayName: r.displayName,\n error: r.error,\n found: r.found || 0,\n icon: r.icon,\n })),\n found: totalFound,\n hits: combinedHits.slice(0, per_page),\n page,\n request_params: {\n k: k,\n per_page,\n query: query || null,\n vector: vector ? \"provided\" : null,\n },\n search_cutoff: false,\n search_time_ms: 0,\n };\n\n return searchResult;\n};\n","import {\n DEFAULT_ALPHA,\n DEFAULT_K,\n DEFAULT_PAGE,\n DEFAULT_PER_PAGE,\n DEFAULT_SEARCH_FIELDS,\n} from \"../constants.js\";\nimport type { BuildVectorSearchParamsOptions } from \"../types.js\";\n\n/**\n * Builds vector search parameters for a single collection\n */\nexport const buildVectorSearchParams = (\n searchVector: number[],\n options: BuildVectorSearchParamsOptions\n): Record<string, unknown> => {\n const {\n query,\n k = DEFAULT_K,\n hybrid = false,\n alpha = DEFAULT_ALPHA,\n page = DEFAULT_PAGE,\n per_page = DEFAULT_PER_PAGE,\n filter_by,\n sort_by,\n searchFields,\n } = options;\n\n const searchParams: Record<string, unknown> = {\n q: \"*\", // Required by Typesense, use wildcard for pure vector search\n vector_query: `embedding:([${searchVector.join(\",\")}], k:${k})`,\n per_page,\n page,\n exclude_fields: 'embedding',\n };\n\n // Add keyword search if hybrid mode\n if (hybrid && query) {\n searchParams.q = query;\n searchParams.query_by = searchFields?.join(\",\") || DEFAULT_SEARCH_FIELDS.join(\",\");\n searchParams.vector_query = `embedding:([${searchVector.join(\",\")}], k:${k}, alpha:${alpha})`;\n }\n\n // Add filters if provided\n if (filter_by) {\n searchParams.filter_by = filter_by;\n }\n\n // Add sorting if provided\n if (sort_by) {\n searchParams.sort_by = sort_by;\n }\n\n return searchParams;\n};\n\n\n\n","import type { TableConfig } from \"@nexo-labs/payload-indexer\";\nimport type { BuildMultiCollectionVectorSearchParamsOptions } from \"../types.js\";\nimport { buildVectorSearchParams } from \"./build-params.js\";\n\n/**\n * Builds multi-collection vector search parameters\n */\nexport const buildMultiCollectionVectorSearchParams = (\n searchVector: number[],\n enabledCollections: Array<[string, TableConfig]>,\n options: BuildMultiCollectionVectorSearchParamsOptions\n): Array<Record<string, unknown>> => {\n const {\n query,\n k,\n hybrid,\n alpha,\n page,\n per_page,\n filter_by,\n sort_by,\n } = options;\n\n return enabledCollections.map(([collectionName, config]) => {\n // Extract search fields\n let searchFields: string[] | undefined;\n if (config) {\n let fields: { name: string; index?: boolean; type?: string }[] = [];\n fields = config.fields;\n // Filter for indexed fields that are searchable (string or string[] types only)\n // Typesense only accepts string/string[] fields in query_by parameter\n const extracted = fields\n .filter(f =>\n f.index !== false &&\n (f.type === 'string' || f.type === 'string[]')\n )\n .map(f => f.name);\n if (extracted.length > 0) {\n searchFields = extracted;\n }\n }\n\n // Build search params - don't add filter_by here\n // The filter will be added conditionally in the handler after schema check\n const collectionSearchParams = buildVectorSearchParams(searchVector, {\n ...(query !== undefined && { query }),\n ...(k !== undefined && { k }),\n ...(hybrid !== undefined && { hybrid }),\n ...(alpha !== undefined && { alpha }),\n ...(page !== undefined && { page }),\n ...(per_page !== undefined && { per_page }),\n // Don't add filter_by here - will be handled in handler after schema check\n ...(sort_by !== undefined && { sort_by }),\n ...(searchFields !== undefined && {\n searchFields: searchFields,\n }),\n });\n\n // Store filter_by separately - handler will add it conditionally\n return {\n collection: collectionName,\n ...collectionSearchParams,\n _filter_by: filter_by, // Internal flag for handler to check schema and add filter\n };\n });\n};\n\n\n\n","import type { EmbeddingProviderConfig } from \"../../../shared/types/plugin-types.js\";\nimport { generateEmbedding } from \"../../embedding/embeddings.js\";\n\n/**\n * Generates or retrieves a search vector from query text or provided vector\n */\nexport const generateOrGetVector = async (\n query?: string,\n vector?: number[],\n embeddingConfig?: EmbeddingProviderConfig\n): Promise<number[] | null> => {\n // Use provided vector if available\n if (vector && Array.isArray(vector) && vector.length > 0) {\n return vector;\n }\n\n // Generate embedding from query if vector not provided\n if (query) {\n const searchVector = await generateEmbedding(query, embeddingConfig);\n if (!searchVector || searchVector.length === 0) {\n return null;\n }\n return searchVector;\n }\n\n return null;\n};\n\n\n\n","import type { Client } from \"typesense\";\nimport type { ModularPluginConfig } from \"../../../core/config/types.js\";\nimport { logger } from \"../../../core/logging/logger.js\";\nimport { searchCache } from \"../../../shared/cache/cache.js\";\nimport type { TableConfig } from \"@nexo-labs/payload-indexer\";\nimport { DEFAULT_ALPHA, DEFAULT_K } from \"../constants.js\";\nimport { performTraditionalMultiCollectionSearch } from \"../endpoints/handlers/executors/traditional-multi-collection-search.js\";\nimport { processVectorSearchResults } from \"../results/process-vector-results.js\";\nimport type { CombinedSearchResult, UniversalSearchOptions } from \"../types.js\";\nimport { buildMultiCollectionVectorSearchParams } from \"../vector/build-multi-collection-params.js\";\nimport { generateOrGetVector } from \"../vector/generate-vector.js\";\n\nexport class SearchService {\n constructor(\n private typesenseClient: Client,\n private pluginOptions: ModularPluginConfig\n ) {}\n\n async performSearch(\n query: string,\n targetCollections: Array<[string, TableConfig]>,\n options: UniversalSearchOptions\n ): Promise<CombinedSearchResult> {\n // Cache key generation\n const cacheKey = `search:${query}:${JSON.stringify(options)}:${targetCollections.map(c => c[0]).join(',')}`;\n const cachedResult = searchCache.get(query, cacheKey, options) as CombinedSearchResult | null;\n if (cachedResult) return cachedResult;\n\n const searchMode = options.mode || \"semantic\";\n\n // 1. Simple / Traditional Search\n if (searchMode === \"simple\") {\n return this.performTraditionalSearch(query, targetCollections, options);\n }\n\n // 2. Semantic / Hybrid Search\n const searchVector = await generateOrGetVector(\n query,\n undefined,\n this.pluginOptions.features.embedding\n );\n\n if (!searchVector) {\n // Fallback to traditional if vector generation fails\n return this.performTraditionalSearch(query, targetCollections, options);\n }\n\n try {\n // Execute Vector Search\n const results = await this.executeVectorSearch(query, searchVector, targetCollections, options);\n searchCache.set(query, results, cacheKey, options);\n return results;\n } catch (error) {\n logger.error(\"Vector search failed, falling back to traditional\", error as Error);\n return this.performTraditionalSearch(query, targetCollections, options);\n }\n }\n\n private async performTraditionalSearch(\n query: string,\n targetCollections: Array<[string, TableConfig]>,\n options: UniversalSearchOptions\n ): Promise<CombinedSearchResult> {\n return performTraditionalMultiCollectionSearch(\n this.typesenseClient,\n targetCollections,\n query,\n options\n );\n }\n\n private async executeVectorSearch(\n query: string,\n searchVector: number[],\n targetCollections: Array<[string, TableConfig]>,\n options: UniversalSearchOptions\n ): Promise<CombinedSearchResult> {\n const searches = buildMultiCollectionVectorSearchParams(\n searchVector,\n targetCollections,\n {\n query,\n k: Math.min(30, DEFAULT_K),\n hybrid: true,\n alpha: DEFAULT_ALPHA,\n page: options.page,\n per_page: options.per_page,\n ...(options.sort_by !== undefined && { sort_by: options.sort_by }),\n }\n );\n\n if (searches.length === 0) {\n return {\n collections: [],\n found: 0,\n hits: [],\n page: options.page,\n request_params: {\n per_page: options.per_page,\n query: query,\n },\n search_cutoff: false,\n search_time_ms: 0\n };\n }\n\n const multiSearchResults = await this.typesenseClient.multiSearch.perform({ searches });\n \n return processVectorSearchResults(\n multiSearchResults,\n targetCollections,\n {\n per_page: options.per_page,\n page: options.page,\n k: DEFAULT_K,\n query,\n }\n );\n }\n}\n","import type { CombinedSearchResult, SearchHit } from \"../../../types.js\";\n\n/**\n * Helper to resolve document type from collection name\n */\nfunction resolveDocumentType(collectionName: string): string {\n if (collectionName.includes('article')) return 'article';\n if (collectionName.includes('book')) return 'book';\n return 'document';\n}\n\n/**\n * Simplified document format for API responses\n */\ntype SimplifiedDocument = {\n id: string;\n title: string;\n slug: string;\n type: string;\n collection: string;\n};\n\n/**\n * Transform search response to simplified format\n */\nexport function transformToSimpleFormat(data: CombinedSearchResult): { documents: SimplifiedDocument[] } {\n if (!data.hits) {\n return { documents: [] };\n }\n\n const documents = data.hits.map((hit: SearchHit) => {\n const doc = hit.document || {};\n const collectionValue = hit.collection || doc.collection;\n const collection = typeof collectionValue === 'string' ? collectionValue : '';\n\n return {\n id: String(doc.id || ''),\n title: String(doc.title || 'Sin título'),\n slug: String(doc.slug || ''),\n type: resolveDocumentType(collection),\n collection: collection,\n };\n });\n\n return { documents };\n}\n","import type { TableConfig } from \"@nexo-labs/payload-indexer\";\n\n/**\n * Generates the Typesense collection name based on the configuration.\n *\n * Priority:\n * 1. Explicit `tableName` if provided.\n * 2. `collectionSlug` (fallback).\n *\n * @param collectionSlug The slug of the Payload collection\n * @param tableConfig The configuration for the specific table\n * @returns The generated Typesense collection name\n */\nexport const getTypesenseCollectionName = (\n collectionSlug: string,\n tableConfig: TableConfig\n): string => {\n return tableConfig.tableName ?? collectionSlug;\n};\n","import type { ModularPluginConfig } from \"../../../../../core/config/types.js\";\nimport { getTypesenseCollectionName } from \"../../../../../core/utils/naming.js\";\n\nexport class TargetCollectionResolver {\n private allowedTableNames: string[];\n\n constructor(private pluginOptions: ModularPluginConfig) {\n this.allowedTableNames = this.getAllowedTableNames(pluginOptions);\n }\n\n private getAllowedTableNames(\n pluginOptions: ModularPluginConfig\n ): string[] {\n const configuredAllowed = pluginOptions.features.search?.defaults?.tables || [];\n const allowedTableNames: Set<string> = new Set();\n const allTableNames: Set<string> = new Set();\n \n for (const [collectionSlug, tableConfigs] of Object.entries(\n pluginOptions.collections || {}\n )) {\n if (Array.isArray(tableConfigs)) {\n for (const tableConfig of tableConfigs) {\n if (!tableConfig.enabled) continue;\n \n const tableName = getTypesenseCollectionName(collectionSlug, tableConfig);\n allTableNames.add(tableName);\n \n // If no restrictions are configured, everything is allowed\n if (configuredAllowed.length === 0) {\n allowedTableNames.add(tableName);\n continue;\n }\n \n // STRICT MODE: Only allow if the exact table name is in the allowed list.\n // Do NOT allow by collection slug.\n if (configuredAllowed.includes(tableName)) {\n allowedTableNames.add(tableName);\n }\n }\n }\n }\n \n return Array.from(allowedTableNames);\n };\n \n \n /**\n * Resolves target table names based on request parameters.\n * Handles both multi-collection (array) and single-collection (slug) requests.\n * Enforces strict validation against allowed tables.\n */\n resolveTargetTables(\n collectionNameSlug: string | null,\n requestedCollections: string[] | undefined\n ): string[] {\n // Case 1: Multi-collection search (no path param)\n if (!collectionNameSlug) {\n if (requestedCollections && requestedCollections.length > 0) {\n // Strict filtering: Only keep requested tables that are explicitly allowed\n return requestedCollections.filter((c) =>\n this.allowedTableNames.includes(c)\n );\n }\n // Default: Return all allowed tables\n return this.allowedTableNames;\n }\n\n const targetTables: string[] = [];\n const tableConfigs =\n this.pluginOptions.collections?.[collectionNameSlug] || [];\n\n if (Array.isArray(tableConfigs)) {\n for (const config of tableConfigs) {\n if (config.enabled) {\n const tableName = getTypesenseCollectionName(\n collectionNameSlug,\n config\n );\n if (this.allowedTableNames.includes(tableName)) {\n targetTables.push(tableName);\n }\n }\n }\n }\n\n return targetTables;\n }\n}\n","import type { ModularPluginConfig } from \"../../../../../core/config/types.js\";\nimport type { TableConfig } from \"@nexo-labs/payload-indexer\";\nimport { getTypesenseCollectionName } from \"../../../../../core/utils/naming.js\";\n\nexport class SearchConfigMapper {\n constructor(private pluginOptions: ModularPluginConfig) {}\n\n /**\n * Maps a list of table names to their full configuration objects.\n * Essential for the search service which needs config details (fields, weights, etc.)\n */\n mapTablesToConfigs(\n targetTableNames: string[]\n ): Array<[string, TableConfig]> {\n const searchConfigs: Array<[string, TableConfig]> = [];\n\n // Iterate through all collections in global config\n for (const [slug, configs] of Object.entries(\n this.pluginOptions.collections || {}\n )) {\n if (!Array.isArray(configs)) continue;\n\n for (const config of configs) {\n if (!config.enabled) continue;\n\n const tableName = getTypesenseCollectionName(slug, config);\n \n // If this table is in our target list, add it to the result\n if (targetTableNames.includes(tableName)) {\n searchConfigs.push([tableName, config]);\n }\n }\n }\n\n return searchConfigs;\n }\n}\n","/**\n * Configuration validation using Zod schemas\n */\n\nimport { z } from 'zod'\n\n// Validation result type\nexport interface ValidationResult {\n data?: ValidatedSearchParams\n errors?: string[]\n success: boolean\n}\n\n/**\n * Get configuration validation errors in a user-friendly format\n */\nexport function getValidationErrors(errors: string[]): string {\n return errors.map((error, index) => `${index + 1}. ${error}`).join('\\n')\n}\n\n/**\n * Validate search parameters\n */\nconst SearchParamsSchema = z.object({\n facets: z.array(z.string()).optional(),\n filters: z.record(z.string(), z.any()).optional(),\n highlight_fields: z.array(z.string()).optional(),\n num_typos: z.number().int().min(0).max(4).optional().default(0),\n page: z.number().int().min(1).optional().default(1),\n per_page: z.number().int().min(1).max(250).optional().default(10),\n q: z.string().min(1, 'Query parameter \"q\" is required'),\n snippet_threshold: z.number().int().min(0).max(100).optional().default(30),\n sort_by: z.string().optional(),\n typo_tokens_threshold: z.number().int().min(1).optional().default(1)\n})\n\nexport type ValidatedSearchParams = z.infer<typeof SearchParamsSchema>\n\n/**\n * Validate search parameters\n */\nexport function validateSearchParams(params: unknown): ValidationResult {\n try {\n const validatedParams = SearchParamsSchema.parse(params)\n return {\n data: validatedParams,\n success: true\n }\n } catch (error) {\n if (error instanceof z.ZodError) {\n const errors = error.issues.map((err) => {\n const path = err.path.length > 0 ? `${err.path.join('.')}: ` : ''\n return `${path}${err.message}`\n })\n \n return {\n errors,\n success: false\n }\n }\n \n return {\n errors: ['Invalid search parameters format'],\n success: false\n }\n }\n}\n","import { PayloadRequest } from \"payload\";\n\n/**\n * Extracts collection name from request URL or params\n */\nexport const extractCollectionName = (\n request: PayloadRequest\n): { collectionName: string; collectionNameStr: string } => {\n let collectionName: string;\n let collectionNameStr: string;\n\n if (request.url && typeof request.url === \"string\") {\n const url = new URL(request.url);\n const pathParts = url.pathname.split(\"/\");\n const searchIndex = pathParts.indexOf(\"search\");\n if (searchIndex !== -1 && pathParts[searchIndex + 1]) {\n collectionName = pathParts[searchIndex + 1] || \"\";\n collectionNameStr = String(collectionName);\n } else {\n collectionName = \"\";\n collectionNameStr = \"\";\n }\n } else {\n // Fallback to params extraction\n const params = request.routeParams;\n const paramCollectionName = params?.collectionName;\n collectionName = String(paramCollectionName || \"\");\n collectionNameStr = collectionName;\n }\n\n return { collectionName, collectionNameStr };\n};\n\n\n\n","/**\n * Extracts and validates search parameters from request query\n */\nexport const extractSearchParams = (\n query: Record<string, unknown>\n): {\n q: string;\n page: number;\n per_page: number;\n sort_by?: string;\n mode?: 'simple' | 'semantic';\n collections?: string[];\n exclude_fields?: string;\n query_by?: string;\n simple?: boolean;\n errors?: string[];\n} => {\n const q = String(query?.q || \"\");\n const pageParam = query?.page;\n const perPageParam = query?.per_page;\n const page = pageParam ? parseInt(String(pageParam), 10) : 1;\n const per_page = perPageParam ? parseInt(String(perPageParam), 10) : 10;\n const sort_by = query?.sort_by as string | undefined;\n const mode = query?.mode as 'simple' | 'semantic' | undefined;\n\n // New parameters for collection filtering and simplified response\n const collectionParam = query?.collection;\n const collections: string[] | undefined = collectionParam\n ? Array.isArray(collectionParam)\n ? collectionParam.map(c => String(c))\n : [String(collectionParam)]\n : undefined;\n\n const exclude_fields = query?.exclude_fields as string | undefined;\n const query_by = query?.query_by as string | undefined;\n const simpleParam = query?.simple;\n const simple = simpleParam === 'true' || simpleParam === true || simpleParam === '1';\n\n const errors: string[] = [];\n\n // Validate parsed numbers\n if (isNaN(page) || page < 1) {\n errors.push(\"Invalid page parameter\");\n }\n if (isNaN(per_page) || per_page < 1 || per_page > 250) {\n errors.push(\"Invalid per_page parameter\");\n }\n\n const result: {\n q: string;\n page: number;\n per_page: number;\n sort_by?: string;\n mode?: 'simple' | 'semantic';\n collections?: string[];\n exclude_fields?: string;\n query_by?: string;\n simple?: boolean;\n errors?: string[];\n } = {\n q,\n page,\n per_page,\n };\n\n if (sort_by) {\n result.sort_by = sort_by;\n }\n\n if (mode) {\n result.mode = mode;\n }\n\n if (collections && collections.length > 0) {\n result.collections = collections;\n }\n\n if (exclude_fields) {\n result.exclude_fields = exclude_fields;\n }\n\n if (query_by) {\n result.query_by = query_by;\n }\n\n if (simple) {\n result.simple = simple;\n }\n\n if (errors.length > 0) {\n result.errors = errors;\n }\n\n return result;\n};\n","import { PayloadRequest } from \"payload\";\nimport {\n getValidationErrors,\n validateSearchParams,\n} from \"../../../../../core/config/config-validation.js\";\nimport { extractCollectionName } from \"../../../utils/extract-collection-name.js\";\nimport { extractSearchParams } from \"../../../utils/extract-search-params.js\";\n\n/**\n * Result type for request validation\n */\nexport type ValidationResult =\n | { success: false; error: Response }\n | {\n success: true;\n collectionName: string | null;\n collectionNameStr: string;\n searchParams: ReturnType<typeof extractSearchParams>;\n };\n\n/**\n * Validates search request and returns parsed parameters\n */\nexport function validateSearchRequest(request: PayloadRequest): ValidationResult {\n const { query } = request;\n const { collectionName, collectionNameStr } = extractCollectionName(request);\n const searchParams = extractSearchParams(query as Record<string, unknown>);\n\n // Check for parsing errors\n if (searchParams.errors && searchParams.errors.length > 0) {\n return {\n success: false,\n error: Response.json({ error: searchParams.errors[0] }, { status: 400 }),\n };\n }\n\n // Validate search parameters\n const validation = validateSearchParams({\n page: searchParams.page,\n per_page: searchParams.per_page,\n q: searchParams.q,\n sort_by: searchParams.sort_by,\n });\n\n if (!validation.success) {\n return {\n success: false,\n error: Response.json(\n {\n details: getValidationErrors(validation.errors || []),\n error: \"Invalid search parameters\",\n },\n { status: 400 }\n ),\n };\n }\n\n return { success: true, collectionName, collectionNameStr, searchParams };\n}\n","import type { PayloadHandler, PayloadRequest } from \"payload\";\nimport type { Client } from \"typesense\";\nimport type { ModularPluginConfig } from \"../../../../core/config/types.js\";\nimport { SearchService } from \"../../services/search-service.js\";\nimport {\n SearchConfigMapper,\n TargetCollectionResolver,\n transformToSimpleFormat,\n} from \"./utils/index.js\";\nimport { validateSearchRequest } from \"./validators/index.js\";\n\n/**\n * Creates a handler for standard search requests\n */\nexport const createSearchHandler = (\n typesenseClient: Client,\n pluginOptions: ModularPluginConfig\n): PayloadHandler => {\n const searchService = new SearchService(typesenseClient, pluginOptions);\n const targetResolver = new TargetCollectionResolver(pluginOptions);\n const configMapper = new SearchConfigMapper(pluginOptions);\n\n return async (request: PayloadRequest) => {\n try {\n // 1. Validate Request\n const validated = validateSearchRequest(request);\n if (!validated.success) return validated.error;\n\n const { collectionName, searchParams } = validated;\n\n // 2. Resolve Target Tables (Atomized Logic)\n const targetCollections = targetResolver.resolveTargetTables(\n collectionName, // Pass null if multi-search, or slug if single\n searchParams.collections\n );\n\n // Validation: Check if we have valid targets\n if (targetCollections.length === 0) {\n const isMultiSearch = !collectionName;\n const hasExplicitRequest = isMultiSearch && searchParams.collections && searchParams.collections.length > 0;\n \n if (hasExplicitRequest) {\n return Response.json({ error: \"None of the requested collections are allowed\" }, { status: 403 });\n }\n return Response.json({ error: \"Collection not allowed or not enabled\" }, { status: 403 });\n }\n\n if (!searchParams.q || searchParams.q.trim() === \"\") {\n return Response.json({ error: 'Query parameter \"q\" is required' }, { status: 400 });\n }\n\n // 3. Prepare Search Configuration (Atomized Logic)\n const searchConfigs = configMapper.mapTablesToConfigs(targetCollections);\n\n // 4. Execute Search via Service\n const searchResult = await searchService.performSearch(\n searchParams.q,\n searchConfigs,\n {\n filters: {},\n page: searchParams.page,\n per_page: searchParams.per_page,\n sort_by: searchParams.sort_by,\n mode: searchParams.mode,\n exclude_fields: searchParams.exclude_fields,\n query_by: searchParams.query_by,\n }\n );\n\n // 5. Format Response\n if (searchParams.simple) {\n return Response.json(transformToSimpleFormat(searchResult));\n }\n\n return Response.json(searchResult);\n\n } catch (error) {\n return Response.json(\n {\n details: error instanceof Error ? error.message : \"Unknown error\",\n error: \"Search handler failed\",\n },\n { status: 500 }\n );\n }\n };\n};\n","import type { Client } from \"typesense\";\n\nimport type { ModularPluginConfig } from \"../../core/config/types.js\";\nimport {\n createCollectionsHandler,\n createSearchHandler,\n} from \"./endpoints/handlers/index.js\";\n\n\nexport const createSearchEndpoints = (\n typesenseClient: Client,\n pluginOptions: ModularPluginConfig\n) => {\n return [\n {\n handler: createCollectionsHandler(pluginOptions),\n method: \"get\" as const,\n path: \"/search/collections\",\n },\n {\n handler: createSearchHandler(typesenseClient, pluginOptions),\n method: \"get\" as const,\n path: \"/search/:collectionName\",\n },\n {\n handler: createSearchHandler(typesenseClient, pluginOptions),\n method: \"get\" as const,\n path: \"/search\",\n },\n ];\n};\n","/**\n * Constants for payload-typesense plugin\n * Centralizes all magic numbers and configuration defaults\n */\n\n// ============================================================================\n// EMBEDDING CONSTANTS\n// ============================================================================\n\n/**\n * Default dimensions for OpenAI text-embedding-3-large model\n */\nexport const DEFAULT_EMBEDDING_DIMENSIONS = 3072;\n\n// ============================================================================\n// SEARCH CONSTANTS\n// ============================================================================\n\n/**\n * Default alpha value for hybrid search (0 = pure semantic, 1 = pure keyword)\n */\nexport const DEFAULT_HYBRID_SEARCH_ALPHA = 0.5;\n\n/**\n * Default number of search results to return\n */\nexport const DEFAULT_SEARCH_LIMIT = 10;\n\n// ============================================================================\n// CACHE CONSTANTS\n// ============================================================================\n\n/**\n * Default TTL for cache entries (in milliseconds) - 5 minutes\n */\nexport const DEFAULT_CACHE_TTL_MS = 5 * 60 * 1000;\n\n// ============================================================================\n// RAG CONSTANTS\n// ============================================================================\n\n/**\n * Default maximum tokens for RAG responses\n */\nexport const DEFAULT_RAG_MAX_TOKENS = 1000;\n\n/**\n * Default number of search results to use for RAG context\n */\nexport const DEFAULT_RAG_CONTEXT_LIMIT = 5;\n\n/**\n * Default session TTL (in seconds) - 30 minutes\n */\nexport const DEFAULT_SESSION_TTL_SEC = 30 * 60;\n\n/**\n * Default OpenAI model for RAG chat\n */\nexport const DEFAULT_RAG_LLM_MODEL = 'gpt-4o-mini';\n","import type { CollectionCreateSchema } from \"typesense/lib/Typesense/Collections.js\";\nimport { DEFAULT_EMBEDDING_DIMENSIONS } from \"../../core/config/constants.js\";\nimport type { TableConfig } from \"@nexo-labs/payload-indexer\";\nimport type { TypesenseFieldMapping } from \"../../adapter/types.js\";\n\n/**\n * Field schema definitions for Typesense collections\n */\n\n/**\n * Type for Typesense collection field schema\n * Extracted from CollectionCreateSchema to ensure type compatibility\n */\ntype TypesenseFieldSchema = NonNullable<\n CollectionCreateSchema[\"fields\"]\n>[number];\n\n/**\n * Base fields that every collection should have\n */\nconst getBaseFields = () => [\n { name: \"id\", type: \"string\" as const },\n { name: \"slug\", type: \"string\" as const },\n { name: \"createdAt\", type: \"int64\" as const },\n { name: \"updatedAt\", type: \"int64\" as const },\n];\n\n/**\n * Creates embedding field definition\n * @param optional - Whether the embedding field is optional\n * @param dimensions - Number of dimensions for the embedding vector (default: 1536)\n */\nconst getEmbeddingField = (\n optional: boolean = true,\n dimensions: number = DEFAULT_EMBEDDING_DIMENSIONS\n) => ({\n name: \"embedding\",\n type: \"float[]\" as const,\n num_dim: dimensions,\n ...(optional && { optional: true }),\n});\n\n/**\n * Maps TypesenseFieldMapping to TypesenseFieldSchema\n */\nconst mapFieldMappingsToSchema = (fields: TypesenseFieldMapping[]): TypesenseFieldSchema[] => {\n return fields.map(field => ({\n name: field.name,\n type: field.type === 'auto' ? 'string' : field.type,\n facet: field.facet,\n index: field.index,\n optional: field.optional\n }));\n};\n\n/**\n * Gets chunk-specific fields for chunk collections\n */\nconst getChunkFields = () => [\n { name: \"parent_doc_id\", type: \"string\" as const, facet: true }, // Required for chunks\n { name: \"chunk_index\", type: \"int32\" as const },\n { name: \"chunk_text\", type: \"string\" as const }, // The chunk content\n { name: \"is_chunk\", type: \"bool\" as const }, // Always true for chunks\n { name: \"headers\", type: \"string[]\" as const, facet: true, optional: true }, // Hierarchical header metadata\n];\n\n/**\n * Creates a complete schema for a chunk collection\n */\nexport const getChunkCollectionSchema = (\n collectionSlug: string,\n tableConfig: TableConfig<TypesenseFieldMapping>,\n embeddingDimensions: number = DEFAULT_EMBEDDING_DIMENSIONS\n) => {\n const fields = tableConfig.fields ? mapFieldMappingsToSchema(tableConfig.fields) : [];\n \n // Get user-defined field names to avoid duplicates\n const userFieldNames = new Set([\n ...fields.map(f => f.name),\n ...getChunkFields().map(f => f.name)\n ]);\n \n // Filter base fields to exclude any that are already defined by user or chunk fields\n const baseFields = getBaseFields().filter(f => !userFieldNames.has(f.name));\n\n return {\n name: collectionSlug,\n fields: [\n ...baseFields,\n ...getChunkFields(),\n ...fields,\n getEmbeddingField(false, embeddingDimensions), // Embeddings are required for chunks\n ],\n };\n};\n\n/**\n * Creates a complete schema for a full document collection\n */\nexport const getFullDocumentCollectionSchema = (\n collectionSlug: string,\n tableConfig: TableConfig<TypesenseFieldMapping>,\n embeddingDimensions: number = DEFAULT_EMBEDDING_DIMENSIONS\n) => {\n const mappedFields = mapFieldMappingsToSchema(tableConfig.fields);\n \n // Get user-defined field names to avoid duplicates\n const userFieldNames = new Set(mappedFields.map(f => f.name));\n \n // Filter base fields to exclude any that are already defined by user\n const baseFields = getBaseFields().filter(f => !userFieldNames.has(f.name));\n\n return {\n name: collectionSlug,\n fields: [\n ...baseFields,\n ...mappedFields,\n // Optional embedding for full documents\n getEmbeddingField(true, embeddingDimensions) \n ],\n };\n};\n","import type { Client } from \"typesense\";\nimport type { CollectionCreateSchema } from \"typesense/lib/Typesense/Collections.js\";\nimport { logger } from \"../../../core/logging/logger.js\";\nimport { getTypesenseCollectionName } from \"../../../core/utils/naming.js\";\nimport type { ModularPluginConfig } from \"../../../core/config/types.js\";\nimport type { TableConfig } from \"@nexo-labs/payload-indexer\";\nimport type { TypesenseFieldMapping } from \"../../../adapter/types.js\";\nimport {\n getChunkCollectionSchema,\n getFullDocumentCollectionSchema,\n} from \"../../../shared/schema/collection-schemas.js\";\nimport { DEFAULT_EMBEDDING_DIMENSIONS } from \"../../../core/config/constants.js\";\n\nexport class SchemaManager {\n constructor(\n private client: Client,\n private config: ModularPluginConfig\n ) {}\n\n /**\n * Synchronizes all configured collections with Typesense\n */\n async syncCollections(): Promise<void> {\n if (!this.config.collections) return;\n\n logger.info('Starting schema synchronization...');\n\n const embeddingDimensions = this.getEmbeddingDimensions();\n\n for (const [collectionSlug, tableConfigs] of Object.entries(this.config.collections)) {\n if (!tableConfigs) continue;\n\n for (const tableConfig of tableConfigs as TableConfig<TypesenseFieldMapping>[]) {\n if (!tableConfig.enabled) continue;\n\n await this.syncTable(collectionSlug, tableConfig, embeddingDimensions);\n }\n }\n\n logger.info('Schema synchronization completed.');\n }\n\n /**\n * Syncs a single table configuration\n */\n private async syncTable(\n collectionSlug: string,\n tableConfig: TableConfig<TypesenseFieldMapping>,\n embeddingDimensions: number\n ): Promise<void> {\n const tableName = getTypesenseCollectionName(collectionSlug, tableConfig);\n\n // Generate target schema\n let targetSchema: CollectionCreateSchema;\n\n if (tableConfig.embedding?.chunking) {\n targetSchema = getChunkCollectionSchema(tableName, tableConfig, embeddingDimensions);\n } else {\n targetSchema = getFullDocumentCollectionSchema(tableName, tableConfig, embeddingDimensions);\n }\n\n try {\n // Check if collection exists\n const collection = await this.client.collections(tableName).retrieve();\n \n // Collection exists, check for updates (new fields)\n // Typesense only allows adding fields, not modifying/deleting (requires reindex)\n await this.updateCollectionSchema(tableName, collection, targetSchema);\n\n } catch (error: unknown) {\n const typesenseError = error as { httpStatus?: number };\n if (typesenseError?.httpStatus === 404) {\n // Collection doesn't exist, create it\n logger.info(`Creating collection: ${tableName}`);\n await this.client.collections().create(targetSchema);\n } else {\n logger.error(`Error checking collection ${tableName}`, error as Error);\n throw error;\n }\n }\n }\n\n private async updateCollectionSchema(\n tableName: string,\n currentSchema: any, // Typesense retrieval response\n targetSchema: CollectionCreateSchema\n ): Promise<void> {\n if (!currentSchema || !currentSchema.fields) return;\n \n const currentFields = new Set(currentSchema.fields.map((f: any) => f.name));\n // Filter out fields that already exist OR are 'id' (which is immutable)\n const newFields = targetSchema.fields?.filter(f => !currentFields.has(f.name) && f.name !== 'id') || [];\n\n if (newFields.length > 0) {\n logger.info(`Updating collection ${tableName} with ${newFields.length} new fields`, {\n fields: newFields.map(f => f.name)\n });\n\n try {\n // Update collection with new fields\n await this.client.collections(tableName).update({\n fields: newFields\n });\n } catch (error) {\n logger.error(`Failed to update collection ${tableName}`, error as Error);\n }\n }\n }\n\n private getEmbeddingDimensions(): number {\n const embeddingConfig = this.config.features.embedding;\n \n if (embeddingConfig?.dimensions) {\n }\n return DEFAULT_EMBEDDING_DIMENSIONS;\n }\n}\n","import type { Client } from \"typesense\";\nimport type { NodeConfiguration } from \"typesense/lib/Typesense/Configuration.js\";\nimport { logger } from \"../../../core/logging/logger.js\";\nimport type { AgentConfig } from \"../../../shared/types/plugin-types.js\";\nimport { ensureConversationCollection } from \"../setup.js\";\n\n/**\n * Configuration for AgentManager\n * Simple interface that only requires what it needs\n */\nexport interface AgentManagerConfig {\n agents: AgentConfig[];\n}\n\nexport class AgentManager {\n constructor(\n private client: Client,\n private config: AgentManagerConfig\n ) {}\n\n /**\n * Synchronizes all configured RAG agents with Typesense\n */\n async syncAgents(): Promise<void> {\n // Get agents from configuration\n const agents = this.config.agents || [];\n\n if (agents.length === 0) return;\n\n logger.info(`Starting synchronization of ${agents.length} RAG agents...`);\n\n // Ensure history collections exist for all agents\n const historyCollections = new Set(agents.map(a => a.historyCollection || 'conversation_history'));\n for (const collectionName of historyCollections) {\n await ensureConversationCollection(this.client, collectionName);\n }\n\n // Sync each agent model\n for (const agent of agents) {\n await this.syncAgentModel(agent);\n }\n\n logger.info('Agent synchronization completed.');\n }\n\n private async syncAgentModel(agent: AgentConfig): Promise<boolean> {\n try {\n\n const modelConfig = {\n id: agent.slug,\n model_name: agent.llmModel,\n system_prompt: agent.systemPrompt,\n api_key: agent.apiKey,\n history_collection: agent.historyCollection || 'conversation_history',\n max_bytes: agent.maxContextBytes || 65536,\n ttl: agent.ttl || 86400,\n k_results: agent.kResults || 5,\n };\n\n // Direct API call logic\n return await this.upsertConversationModel(modelConfig);\n\n } catch (error) {\n logger.error(`Failed to sync agent ${agent.slug}`, error as Error);\n return false;\n }\n }\n\n private async upsertConversationModel(modelConfig: any): Promise<boolean> {\n // Get configuration from client\n const configuration = this.client.configuration;\n\n if (!configuration || !configuration.nodes || configuration.nodes.length === 0) {\n logger.error('Invalid Typesense client configuration');\n return false;\n }\n\n const node = configuration.nodes[0] as NodeConfiguration;\n const typesenseApiKey = configuration.apiKey;\n const baseUrl = `${node.protocol}://${node.host}:${node.port}`;\n\n try {\n // Try to create\n const createResponse = await fetch(`${baseUrl}/conversations/models`, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'X-TYPESENSE-API-KEY': typesenseApiKey || '',\n },\n body: JSON.stringify(modelConfig),\n });\n\n if (createResponse.ok) {\n logger.info(`Agent model created: ${modelConfig.id}`);\n return true;\n }\n\n if (createResponse.status === 409) {\n // Update if exists\n logger.debug(`Agent model ${modelConfig.id} exists, updating...`);\n const updateResponse = await fetch(`${baseUrl}/conversations/models/${modelConfig.id}`, {\n method: 'PUT',\n headers: {\n 'Content-Type': 'application/json',\n 'X-TYPESENSE-API-KEY': typesenseApiKey || '',\n },\n body: JSON.stringify(modelConfig),\n });\n\n if (updateResponse.ok) {\n logger.info(`Agent model updated: ${modelConfig.id}`);\n return true;\n } else {\n const err = await updateResponse.text();\n logger.error(`Failed to update agent ${modelConfig.id}: ${err}`);\n return false;\n }\n }\n\n const err = await createResponse.text();\n logger.error(`Failed to create agent ${modelConfig.id}: ${err}`);\n return false;\n\n } catch (networkError) {\n logger.error('Network error syncing agent model', networkError as Error);\n return false;\n }\n }\n}\n","/**\n * Composable Typesense RAG plugin factory for Payload CMS\n *\n * This plugin handles all Typesense-specific functionality:\n * - Search endpoints\n * - RAG endpoints\n * - Schema synchronization\n * - Agent synchronization\n *\n * It's designed to be used together with createIndexerPlugin from @nexo-labs/payload-indexer.\n *\n * @example\n * ```typescript\n * import { createIndexerPlugin } from '@nexo-labs/payload-indexer'\n * import { createTypesenseAdapter, createTypesenseRAGPlugin } from '@nexo-labs/payload-typesense'\n *\n * // 1. Create adapter\n * const adapter = createTypesenseAdapter(typesenseConnection)\n *\n * // 2. Create indexer plugin (sync hooks + embedding)\n * const { plugin: indexerPlugin, embeddingService } = createIndexerPlugin({\n * adapter,\n * features: { embedding: embeddingConfig, sync: { enabled: true } },\n * collections,\n * })\n *\n * // 3. Create Typesense RAG plugin (search + RAG + schema)\n * const typesenseRAGPlugin = createTypesenseRAGPlugin({\n * typesense: typesenseConnection,\n * embeddingConfig,\n * collections,\n * search: { enabled: true, defaults: { mode: 'semantic', perPage: 10 } },\n * agents: [...],\n * callbacks: {...},\n * })\n *\n * // 4. Export both plugins\n * export const plugins = [indexerPlugin, typesenseRAGPlugin]\n * ```\n */\n\nimport type { Config } from \"payload\";\nimport type { TypesenseRAGPluginConfig } from \"./rag-types.js\";\nimport { Logger } from \"@nexo-labs/payload-indexer\";\nimport { createTypesenseClient } from \"../core/client/typesense-client.js\";\nimport { createRAGPayloadHandlers } from \"../features/rag/endpoints.js\";\nimport { createSearchEndpoints } from \"../features/search/endpoints.js\";\nimport { SchemaManager } from \"../features/sync/services/schema-manager.js\";\nimport { AgentManager } from \"../features/rag/services/agent-manager.js\";\n\n/**\n * Creates a composable Typesense RAG plugin for Payload CMS\n *\n * This plugin handles all Typesense-specific features:\n * - Search endpoints (semantic, hybrid, keyword)\n * - RAG endpoints (chat, session management)\n * - Schema synchronization\n * - Agent synchronization\n *\n * @param config - Typesense RAG plugin configuration\n * @returns Payload config modifier function\n */\nexport function createTypesenseRAGPlugin(config: TypesenseRAGPluginConfig) {\n const logger = new Logger({ enabled: true, prefix: \"[payload-typesense]\" });\n\n return (payloadConfig: Config): Config => {\n // Create Typesense client\n const typesenseClient = createTypesenseClient(config.typesense);\n\n // 1. Add search endpoints if enabled\n if (config.search?.enabled) {\n const searchEndpoints = createSearchEndpoints(typesenseClient, {\n typesense: config.typesense,\n features: {\n embedding: config.embeddingConfig,\n search: config.search,\n },\n collections: config.collections || {},\n });\n\n payloadConfig.endpoints = [\n ...(payloadConfig.endpoints || []),\n ...searchEndpoints,\n ];\n\n logger.debug(\"Search endpoints registered\", {\n endpointsCount: searchEndpoints.length,\n });\n }\n\n // 2. Add RAG endpoints if agents and callbacks are configured\n if (config.agents && config.agents.length > 0 && config.callbacks) {\n const ragEndpoints = createRAGPayloadHandlers({\n typesense: config.typesense,\n embeddingConfig: config.embeddingConfig,\n agents: config.agents,\n callbacks: config.callbacks,\n hybrid: config.hybrid,\n hnsw: config.hnsw,\n advanced: config.advanced,\n });\n\n payloadConfig.endpoints = [\n ...(payloadConfig.endpoints || []),\n ...ragEndpoints,\n ];\n\n logger.debug(\"RAG endpoints registered\", {\n endpointsCount: ragEndpoints.length,\n agentsCount: config.agents.length,\n });\n }\n\n // 3. Initialize on startup (schema sync + agent sync)\n const incomingOnInit = payloadConfig.onInit;\n payloadConfig.onInit = async (payload) => {\n if (incomingOnInit) {\n await incomingOnInit(payload);\n }\n\n try {\n // A. Sync Typesense collections schema\n if (config.collections && Object.keys(config.collections).length > 0) {\n logger.info(\"Syncing Typesense collections schema...\");\n const schemaManager = new SchemaManager(typesenseClient, {\n typesense: config.typesense,\n features: {\n embedding: config.embeddingConfig,\n },\n collections: config.collections,\n });\n await schemaManager.syncCollections();\n }\n\n // B. Sync RAG agents\n if (config.agents && config.agents.length > 0) {\n logger.info(\"Initializing RAG agents...\");\n const agentManager = new AgentManager(typesenseClient, {\n agents: config.agents,\n });\n await agentManager.syncAgents();\n }\n } catch (error) {\n // Fail soft: Log error but don't crash Payload startup\n logger.error(\"Error initializing Typesense resources\", error as Error);\n }\n };\n\n return payloadConfig;\n };\n}\n\n","import type { Client } from \"typesense\";\nimport type { CollectionFieldSchema } from \"typesense/lib/Typesense/Collection.js\";\nimport type { CollectionCreateSchema } from \"typesense/lib/Typesense/Collections.js\";\nimport type {\n IndexerAdapter,\n IndexDocument,\n VectorSearchOptions,\n AdapterSearchResult,\n} from \"@nexo-labs/payload-indexer\";\nimport { logger } from \"../core/logging/logger.js\";\nimport type {\n TypesenseFieldMapping,\n TypesenseCollectionSchema,\n TypesenseFieldSchema,\n TypesenseSearchResult,\n TypesenseCollectionInfo,\n} from \"./types.js\";\n\n/**\n * Typesense implementation of the IndexerAdapter interface\n *\n * This adapter provides type-safe field definitions for Typesense.\n * When used with createIndexerPlugin, TypeScript will validate that\n * all field mappings in your collection config are valid TypesenseFieldMapping.\n *\n * @example\n * ```typescript\n * const adapter = createTypesenseAdapter(config);\n *\n * // TypeScript infers TFieldMapping = TypesenseFieldMapping\n * const { plugin } = createIndexerPlugin({\n * adapter,\n * collections: {\n * posts: [{\n * enabled: true,\n * fields: [\n * { name: 'title', type: 'string' }, // ✅ Valid\n * { name: 'views', type: 'int64' }, // ✅ Valid\n * { name: 'tags', type: 'string[]', facet: true }, // ✅ With faceting\n * ]\n * }]\n * }\n * });\n * ```\n */\nexport class TypesenseAdapter implements IndexerAdapter<TypesenseFieldMapping, TypesenseCollectionSchema> {\n readonly name = 'typesense';\n\n constructor(private client: Client) {}\n\n /**\n * Test connection to Typesense\n */\n async testConnection(): Promise<boolean> {\n try {\n await this.client.health.retrieve();\n return true;\n } catch (error) {\n logger.error(\"Typesense connection test failed\", error);\n return false;\n }\n }\n\n /**\n * Create or update a collection schema\n */\n async ensureCollection(schema: TypesenseCollectionSchema): Promise<void> {\n const typesenseSchema = this.convertToTypesenseSchema(schema);\n\n try {\n // Check if collection exists\n const existing = await this.client.collections(schema.name).retrieve() as TypesenseCollectionInfo;\n\n // Collection exists, add new fields if any\n await this.updateCollectionIfNeeded(schema.name, existing, typesenseSchema);\n } catch (error: unknown) {\n const typesenseError = error as { httpStatus?: number };\n if (typesenseError?.httpStatus === 404) {\n // Collection doesn't exist, create it\n logger.info(`Creating collection: ${schema.name}`);\n await this.client.collections().create(typesenseSchema);\n } else {\n throw error;\n }\n }\n }\n\n /**\n * Check if a collection exists\n */\n async collectionExists(collectionName: string): Promise<boolean> {\n try {\n await this.client.collections(collectionName).retrieve();\n return true;\n } catch (error: unknown) {\n const typesenseError = error as { httpStatus?: number };\n if (typesenseError?.httpStatus === 404) {\n return false;\n }\n throw error;\n }\n }\n\n /**\n * Delete a collection\n */\n async deleteCollection(collectionName: string): Promise<void> {\n try {\n await this.client.collections(collectionName).delete();\n logger.info(`Deleted collection: ${collectionName}`);\n } catch (error: unknown) {\n const typesenseError = error as { httpStatus?: number };\n if (typesenseError?.httpStatus !== 404) {\n throw error;\n }\n }\n }\n\n /**\n * Upsert a single document\n */\n async upsertDocument(collectionName: string, document: IndexDocument): Promise<void> {\n try {\n await this.client.collections(collectionName).documents().upsert(document);\n } catch (error) {\n logger.error(`Failed to upsert document ${document.id} to ${collectionName}`, error);\n throw error;\n }\n }\n\n /**\n * Upsert multiple documents (batch)\n */\n async upsertDocuments(collectionName: string, documents: IndexDocument[]): Promise<void> {\n if (documents.length === 0) return;\n\n try {\n await this.client.collections(collectionName).documents().import(documents, {\n action: 'upsert',\n });\n } catch (error) {\n logger.error(`Failed to batch upsert ${documents.length} documents to ${collectionName}`, error);\n throw error;\n }\n }\n\n /**\n * Delete a document by ID\n */\n async deleteDocument(collectionName: string, documentId: string): Promise<void> {\n try {\n await this.client.collections(collectionName).documents(documentId).delete();\n } catch (error: unknown) {\n const typesenseError = error as { httpStatus?: number };\n // Ignore 404 errors (document already deleted)\n if (typesenseError?.httpStatus !== 404) {\n logger.error(`Failed to delete document ${documentId} from ${collectionName}`, error);\n throw error;\n }\n }\n }\n\n /**\n * Delete documents matching a filter\n * Returns the number of deleted documents\n */\n async deleteDocumentsByFilter(\n collectionName: string,\n filter: Record<string, unknown>\n ): Promise<number> {\n const filterStr = this.buildFilterString(filter);\n\n try {\n const result = await this.client.collections(collectionName).documents().delete({\n filter_by: filterStr,\n });\n return result.num_deleted || 0;\n } catch (error) {\n logger.error(`Failed to delete documents by filter from ${collectionName}`, error, { filter });\n throw error;\n }\n }\n\n /**\n * Perform a vector search\n * @typeParam TDoc - The document type to return in results\n */\n async vectorSearch<TDoc = Record<string, unknown>>(\n collectionName: string,\n vector: number[],\n options: VectorSearchOptions = {}\n ): Promise<AdapterSearchResult<TDoc>[]> {\n const { limit = 10, filter, includeFields, excludeFields } = options;\n\n try {\n const searchParams: Record<string, unknown> = {\n q: '*',\n vector_query: `embedding:([${vector.join(',')}], k:${limit})`,\n };\n\n if (filter) {\n searchParams['filter_by'] = this.buildFilterString(filter);\n }\n\n if (includeFields) {\n searchParams['include_fields'] = includeFields.join(',');\n }\n\n if (excludeFields) {\n searchParams['exclude_fields'] = excludeFields.join(',');\n }\n\n const result = await this.client\n .collections(collectionName)\n .documents()\n .search(searchParams) as TypesenseSearchResult<TDoc>;\n\n return (result.hits || []).map(hit => ({\n id: String((hit.document as Record<string, unknown>)?.id || ''),\n score: hit.vector_distance ?? 0,\n document: hit.document,\n }));\n } catch (error) {\n logger.error(`Vector search failed on ${collectionName}`, error);\n throw error;\n }\n }\n\n // === Private helper methods ===\n\n /**\n * Convert generic schema to Typesense-specific schema\n */\n private convertToTypesenseSchema(schema: TypesenseCollectionSchema): CollectionCreateSchema {\n return {\n name: schema.name,\n fields: schema.fields.map(field => this.convertField(field)),\n default_sorting_field: schema.defaultSortingField,\n };\n }\n\n /**\n * Convert a single field schema to Typesense format\n */\n private convertField(field: TypesenseFieldSchema): CollectionFieldSchema {\n const typesenseField: CollectionFieldSchema = {\n name: field.name,\n type: field.type,\n facet: field.facet,\n index: field.index,\n optional: field.optional,\n };\n\n // Add vector dimensions for float[] embedding fields\n if (field.type === 'float[]' && field.vectorDimensions) {\n typesenseField.num_dim = field.vectorDimensions;\n }\n\n return typesenseField;\n }\n\n /**\n * Update collection with new fields if needed\n */\n private async updateCollectionIfNeeded(\n collectionName: string,\n currentSchema: TypesenseCollectionInfo,\n targetSchema: CollectionCreateSchema\n ): Promise<void> {\n if (!currentSchema?.fields) return;\n\n const currentFields = new Set(currentSchema.fields.map(f => f.name));\n const newFields = targetSchema.fields?.filter(\n f => !currentFields.has(f.name) && f.name !== 'id'\n ) || [];\n\n if (newFields.length > 0) {\n logger.info(`Updating collection ${collectionName} with ${newFields.length} new fields`, {\n fields: newFields.map(f => f.name)\n });\n\n try {\n await this.client.collections(collectionName).update({\n fields: newFields\n });\n } catch (error) {\n logger.error(`Failed to update collection ${collectionName}`, error);\n }\n }\n }\n\n /**\n * Build a Typesense filter string from a filter object\n */\n private buildFilterString(filter: Record<string, unknown>): string {\n const parts: string[] = [];\n\n for (const [key, value] of Object.entries(filter)) {\n if (Array.isArray(value)) {\n // Array values use 'IN' syntax\n parts.push(`${key}:[${value.map(v => String(v)).join(',')}]`);\n } else if (typeof value === 'string') {\n parts.push(`${key}:=${value}`);\n } else if (typeof value === 'number') {\n parts.push(`${key}:${value}`);\n } else if (typeof value === 'boolean') {\n parts.push(`${key}:${value}`);\n }\n }\n\n return parts.join(' && ');\n }\n}\n","/**\n * Factory function for creating a TypesenseAdapter\n */\n\nimport { Client } from \"typesense\";\nimport { TypesenseAdapter } from \"./typesense-adapter.js\";\nimport type { TypesenseConnectionConfig } from \"../shared/types/plugin-types.js\";\n\n/**\n * Creates a TypesenseAdapter instance with the provided configuration\n *\n * @param config - Typesense connection configuration\n * @returns A configured TypesenseAdapter instance\n *\n * @example\n * ```typescript\n * import { createTypesenseAdapter } from '@nexo-labs/payload-typesense';\n *\n * const adapter = createTypesenseAdapter({\n * apiKey: process.env.TYPESENSE_API_KEY!,\n * nodes: [{\n * host: 'localhost',\n * port: 8108,\n * protocol: 'http'\n * }]\n * });\n * ```\n */\nexport function createTypesenseAdapter(config: TypesenseConnectionConfig): TypesenseAdapter {\n const client = new Client({\n apiKey: config.apiKey,\n nodes: config.nodes,\n connectionTimeoutSeconds: config.connectionTimeoutSeconds ?? 10,\n retryIntervalSeconds: config.retryIntervalSeconds,\n numRetries: config.numRetries,\n });\n\n return new TypesenseAdapter(client);\n}\n\n/**\n * Creates a TypesenseAdapter from an existing Typesense Client\n * Useful when you already have a configured client instance\n *\n * @param client - Existing Typesense Client instance\n * @returns A TypesenseAdapter instance wrapping the provided client\n */\nexport function createTypesenseAdapterFromClient(client: Client): TypesenseAdapter {\n return new TypesenseAdapter(client);\n}\n","import type { Client } from \"typesense\";\nimport { getTypesenseCollectionName } from \"../../../core/utils/naming.js\";\nimport type { TableConfig } from \"@nexo-labs/payload-indexer\";\nimport { logger } from \"../../../core/logging/logger.js\";\n\n/**\n * Deletes a document from Typesense\n * Handles both direct document deletion and chunk deletion\n */\nexport const deleteDocumentFromTypesense = async (\n typesenseClient: Client,\n collectionSlug: string,\n docId: string,\n tableConfig: TableConfig\n) => {\n try {\n // Build table name from collection slug + tableSuffix\n const tableName = getTypesenseCollectionName(collectionSlug, tableConfig);\n\n logger.debug('Attempting to delete document from Typesense', {\n documentId: docId,\n collection: collectionSlug,\n tableName,\n });\n\n // Try to delete the document directly first\n try {\n await typesenseClient.collections(tableName).documents(docId).delete();\n logger.info('Document deleted from Typesense', {\n documentId: docId,\n tableName,\n });\n } catch (docDeleteError: unknown) {\n const typesenseError = docDeleteError as { httpStatus?: number };\n\n // If document doesn't exist, try to delete chunks by parent_doc_id\n if (typesenseError.httpStatus === 404) {\n logger.debug('Document not found, attempting to delete chunks', {\n documentId: docId,\n tableName,\n });\n\n try {\n await typesenseClient\n .collections(tableName)\n .documents()\n .delete({\n filter_by: `parent_doc_id:${docId}`,\n });\n logger.info('All chunks deleted for document', {\n documentId: docId,\n tableName,\n });\n } catch (chunkDeleteError: unknown) {\n const chunkError = chunkDeleteError as { httpStatus?: number };\n\n // Ignore 404 errors (collection might not exist)\n if (chunkError.httpStatus !== 404) {\n logger.error('Failed to delete chunks for document', chunkDeleteError as Error, {\n documentId: docId,\n tableName,\n });\n } else {\n logger.debug('No chunks found to delete', { documentId: docId });\n }\n }\n } else {\n throw docDeleteError;\n }\n }\n } catch (error: unknown) {\n // Build table name for error message\n const tableName = getTypesenseCollectionName(collectionSlug, tableConfig);\n\n logger.error('Failed to delete document from Typesense', error as Error, {\n documentId: docId,\n collection: collectionSlug,\n tableName,\n });\n\n // Note: We don't rethrow to allow the deletion process to continue\n }\n};\n"],"mappings":";;;;;;;AAIA,MAAa,yBAAyB,oBAA+C;AACnF,QAAO,IAAI,UAAU,OAAO;EAC1B,QAAQ,gBAAgB;EACxB,0BAA0B,gBAAgB,4BAA4B;EACtE,OAAO,gBAAgB;EACxB,CAAC;;AAGJ,MAAa,0BAA0B,OAAO,WAAqC;AACjF,KAAI;AACF,QAAM,OAAO,OAAO,UAAU;AAC9B,SAAO;UACA,QAAQ;AAEf,SAAO;;;;;;ACHX,IAAIA,eAA8B;AAClC,IAAIC,sBAAqC;AAEzC,IAAIC,eAA0C;AAC9C,IAAIC,sBAAqC;AAEzC,MAAM,mBAAmB,WAAmC;CAC1D,MAAM,MAAM,UAAU,QAAQ,IAAI;AAElC,KAAI,CAAC,IACH,QAAO;AAIT,KAAI,CAAC,gBAAgB,wBAAwB,KAAK;AAChD,iBAAe,IAAI,OAAO,EACxB,QAAQ,KACT,CAAC;AACF,wBAAsB;;AAGxB,QAAO;;AAGT,MAAM,mBAAmB,WAA+C;CACtE,MAAM,MAAM,UAAU,QAAQ,IAAI;AAElC,KAAI,CAAC,IACH,QAAO;AAIT,KAAI,CAAC,gBAAgB,wBAAwB,KAAK;AAChD,iBAAe,IAAI,mBAAmB,IAAI;AAC1C,wBAAsB;;AAGxB,QAAO;;;;;;;;AAST,MAAa,oBAAoB,OAC/B,MACA,WAC6B;AAC7B,KAAI,CAAC,QAAQ,KAAK,MAAM,CAAC,SAAS,2BAA2B;AAC3D,SAAO,MAAM,0DAA0D;AACvE,SAAO;;AAKT,MAFiB,QAAQ,QAAQ,cAEhB,SACf,QAAO,wBAAwB,MAAM,OAAO;KAE5C,QAAO,wBAAwB,MAAM,OAAO;;;;;AAOhD,MAAM,0BAA0B,OAC9B,MACA,WAC6B;CAC7B,MAAM,SAAS,gBAAgB,QAAQ,OAAO;AAE9C,KAAI,CAAC,QAAQ;AACX,SAAO,MAAM,+DAA+D;AAC5E,SAAO;;AAGT,KAAI;EACF,MAAM,QAAQ,QAAQ,SAAS,QAAQ,IAAI,0BAA0B;EACrE,MAAM,aAAa,QAAQ,cAAc;AAEzC,SAAO,MAAM,+BAA+B;GAAE;GAAO;GAAY,YAAY,KAAK;GAAQ,CAAC;EAQ3F,MAAM,aANW,MAAM,OAAO,WAAW,OAAO;GAC9C;GACA,OAAO,KAAK,MAAM;GAClB;GACD,CAAC,EAEyB,KAAK,IAAI;AAEpC,SAAO,MAAM,8BAA8B,EAAE,iBAAiB,WAAW,QAAQ,CAAC;AAElF,MACE,CAAC,aACD,CAAC,MAAM,QAAQ,UAAU,IACzB,UAAU,WAAW,YACrB;AACA,UAAO,KAAK,8CAA8C;IACxD,UAAU;IACV,UAAU,WAAW;IACtB,CAAC;AACF,UAAO;;AAGT,SAAO;UACA,OAAO;AACd,SAAO,MAAM,uCAAuC,OAAO;GACzD,YAAY,KAAK;GACjB,OAAO,QAAQ;GAChB,CAAC;AACF,SAAO;;;;;;AAOX,MAAM,0BAA0B,OAC9B,MACA,WAC6B;CAC7B,MAAM,SAAS,gBAAgB,QAAQ,OAAO;AAE9C,KAAI,CAAC,QAAQ;AACX,SAAO,MAAM,+DAA+D;AAC5E,SAAO;;AAGT,KAAI;EACF,MAAM,QAAQ,QAAQ,SAAS;EAC/B,MAAM,aAAa,QAAQ,cAAc;AAEzC,SAAO,MAAM,+BAA+B;GAAE;GAAO;GAAY,YAAY,KAAK;GAAQ,CAAC;EAQ3F,MAAM,aALS,MADQ,OAAO,mBAAmB,EAAE,OAAO,CAAC,CACvB,aAAa;GAC/C,SAAS;IAAE,MAAM;IAAQ,OAAO,CAAC,EAAE,MAAM,KAAK,MAAM,EAAE,CAAC;IAAE;GACzD,UAAU,SAAS;GACpB,CAAC,EAEuB,UAAU;AAEnC,SAAO,MAAM,8BAA8B,EAAE,iBAAiB,WAAW,QAAQ,CAAC;AAElF,MACE,CAAC,aACD,CAAC,MAAM,QAAQ,UAAU,IACzB,UAAU,WAAW,YACrB;AACA,UAAO,KAAK,8CAA8C;IACxD,UAAU;IACV,UAAU,WAAW;IACtB,CAAC;AACF,UAAO;;AAGT,SAAO;UACA,OAAO;AACd,SAAO,MAAM,uCAAuC,OAAO;GACzD,YAAY,KAAK;GACjB,OAAO,QAAQ;GAChB,CAAC;AACF,SAAO;;;;;;;;;;;;AAaX,MAAa,6BAA6B,OACxC,MACA,WACuC;AACvC,KAAI,CAAC,QAAQ,KAAK,MAAM,CAAC,SAAS,2BAA2B;AAC3D,SAAO,MAAM,0DAA0D;AACvE,SAAO;;AAKT,MAFiB,QAAQ,QAAQ,cAEhB,SACf,QAAO,iCAAiC,MAAM,OAAO;KAErD,QAAO,iCAAiC,MAAM,OAAO;;;;;AAOzD,MAAM,mCAAmC,OACvC,MACA,WACuC;CACvC,MAAM,SAAS,gBAAgB,QAAQ,OAAO;AAE9C,KAAI,CAAC,QAAQ;AACX,SAAO,MAAM,+DAA+D;AAC5E,SAAO;;AAGT,KAAI;EACF,MAAM,QAAQ,QAAQ,SAAS,QAAQ,IAAI,0BAA0B;EACrE,MAAM,aAAa,QAAQ,cAAc;AAEzC,SAAO,MAAM,mDAAmD;GAAE;GAAO;GAAY,CAAC;EAEtF,MAAM,WAAW,MAAM,OAAO,WAAW,OAAO;GAC9C;GACA,OAAO,KAAK,MAAM;GAClB;GACD,CAAC;EAEF,MAAM,YAAY,SAAS,KAAK,IAAI;AAEpC,MACE,CAAC,aACD,CAAC,MAAM,QAAQ,UAAU,IACzB,UAAU,WAAW,YACrB;AACA,UAAO,KAAK,8CAA8C;IACxD,UAAU;IACV,UAAU,WAAW;IACtB,CAAC;AACF,UAAO;;AAGT,SAAO;GACL;GACA,OAAO;IACL,cAAc,SAAS,OAAO,iBAAiB;IAC/C,aAAa,SAAS,OAAO,gBAAgB;IAC9C;GACF;UACM,OAAO;AACd,SAAO,MAAM,kDAAkD,OAAO;GACpE,YAAY,KAAK;GACjB,OAAO,QAAQ;GAChB,CAAC;AACF,SAAO;;;;;;;AAQX,MAAM,mCAAmC,OACvC,MACA,WACuC;CACvC,MAAM,kBAAkB,MAAM,wBAAwB,MAAM,OAAO;AAEnE,KAAI,CAAC,gBACH,QAAO;CAIT,MAAM,kBAAkB,KAAK,KAAK,KAAK,SAAS,EAAE;AAElD,QAAO;EACL,WAAW;EACX,OAAO;GACL,cAAc;GACd,aAAa;GACd;EACF;;;;;;;;;AAUH,MAAa,mCAAmC,OAC9C,OACA,WAC4C;AAC5C,KAAI,CAAC,SAAS,MAAM,WAAW,GAAG;AAChC,SAAO,MAAM,mDAAmD;AAChE,SAAO;;CAIT,MAAM,aAAa,MAAM,QAAO,MAAK,KAAK,EAAE,MAAM,CAAC,UAAU,0BAA0B;AAEvF,KAAI,WAAW,WAAW,GAAG;AAC3B,SAAO,MAAM,gEAAgE;AAC7E,SAAO;;AAKT,MAFiB,QAAQ,QAAQ,cAEhB,SACf,QAAO,uCAAuC,YAAY,OAAO;KAEjE,QAAO,uCAAuC,YAAY,OAAO;;;;;AAOrE,MAAM,yCAAyC,OAC7C,YACA,WAC4C;CAC5C,MAAM,SAAS,gBAAgB,QAAQ,OAAO;AAE9C,KAAI,CAAC,QAAQ;AACX,SAAO,MAAM,qEAAqE;AAClF,SAAO;;AAGT,KAAI;EACF,MAAM,QAAQ,QAAQ,SAAS,QAAQ,IAAI,0BAA0B;EACrE,MAAM,aAAa,QAAQ,cAAc;AAEzC,SAAO,MAAM,0DAA0D;GACrE;GACA;GACA,WAAW,WAAW;GACvB,CAAC;EAEF,MAAM,WAAW,MAAM,OAAO,WAAW,OAAO;GAC9C;GACA,OAAO,WAAW,KAAI,MAAK,EAAE,MAAM,CAAC;GACpC;GACD,CAAC;EAEF,MAAM,aAAa,SAAS,KAAK,KAAI,SAAQ,KAAK,UAAU;AAO5D,MAAI,CAJa,WAAW,OAC1B,QAAO,MAAM,QAAQ,IAAI,IAAI,IAAI,WAAW,WAC7C,EAEc;AACb,UAAO,KAAK,qDAAqD;IAC/D,UAAU;IACV,WAAW,WAAW;IACvB,CAAC;AACF,UAAO;;AAGT,SAAO,KAAK,kDAAkD;GAC5D,OAAO,WAAW;GAClB,aAAa,SAAS,OAAO,gBAAgB;GAC9C,CAAC;AAEF,SAAO;GACL;GACA,OAAO;IACL,cAAc,SAAS,OAAO,iBAAiB;IAC/C,aAAa,SAAS,OAAO,gBAAgB;IAC9C;GACF;UACM,OAAO;AACd,SAAO,MAAM,yDAAyD,OAAO;GAC3E,WAAW,WAAW;GACtB,OAAO,QAAQ;GAChB,CAAC;AACF,SAAO;;;;;;;AAQX,MAAM,yCAAyC,OAC7C,YACA,WAC4C;CAC5C,MAAM,SAAS,gBAAgB,QAAQ,OAAO;AAE9C,KAAI,CAAC,QAAQ;AACX,SAAO,MAAM,qEAAqE;AAClF,SAAO;;AAGT,KAAI;EACF,MAAM,QAAQ,QAAQ,SAAS;EAC/B,MAAM,aAAa,QAAQ,cAAc;AAEzC,SAAO,MAAM,0DAA0D;GACrE;GACA;GACA,WAAW,WAAW;GACvB,CAAC;EAEF,MAAM,iBAAiB,OAAO,mBAAmB,EAAE,OAAO,CAAC;EAC3D,MAAMC,aAAyB,EAAE;EACjC,IAAI,uBAAuB;AAG3B,OAAK,MAAM,QAAQ,YAAY;GAC7B,MAAM,SAAS,MAAM,eAAe,aAAa;IAC/C,SAAS;KAAE,MAAM;KAAQ,OAAO,CAAC,EAAE,MAAM,KAAK,MAAM,EAAE,CAAC;KAAE;IACzD,UAAU,SAAS;IACpB,CAAC;AAEF,cAAW,KAAK,OAAO,UAAU,OAAO;AACxC,2BAAwB,KAAK,KAAK,KAAK,SAAS,EAAE;;AAQpD,MAAI,CAJa,WAAW,OAC1B,QAAO,MAAM,QAAQ,IAAI,IAAI,IAAI,WAAW,WAC7C,EAEc;AACb,UAAO,KAAK,qDAAqD;IAC/D,UAAU;IACV,WAAW,WAAW;IACvB,CAAC;AACF,UAAO;;AAGT,SAAO,KAAK,kDAAkD;GAC5D,OAAO,WAAW;GAClB,iBAAiB;GAClB,CAAC;AAEF,SAAO;GACL;GACA,OAAO;IACL,cAAc;IACd,aAAa;IACd;GACF;UACM,OAAO;AACd,SAAO,MAAM,yDAAyD,OAAO;GAC3E,WAAW,WAAW;GACtB,OAAO,QAAQ;GAChB,CAAC;AACF,SAAO;;;;;;;;;;;;;;;;ACjaX,SAAgB,uBACd,QACA,qBACA,iBACK;CACL,MAAM,WAAW,gBAAgB,MAAM,GAAG,YAAY;CACtD,MAAM,eAAe,IAAI,IACvB,GAAG,SAAS,KAAK,gBAAgB,MAAM,GAAG,KAAK,GAAG,gBAAgB,MAAM,GAAG,KAAK,eACjF;AAGD,cAAa,aAAa,IAAI,KAAK,OAAO,YAAY;AACtD,cAAa,aAAa,IAAI,gBAAgB,OAAO;AACrD,cAAa,aAAa,IAAI,yBAAyB,oBAAoB;AAE3E,KAAI,OAAO,OACT,cAAa,aAAa,IAAI,mBAAmB,OAAO,OAAO;AAGjE,cAAa,aAAa,IAAI,uBAAuB,OAAO;AAE5D,QAAO;;;;;;;;AAST,SAAgB,yBAAyB,QAA8B;CACrE,MAAM,EACJ,mBACA,gBACA,mBACA,WAAW,IACX,iBAAiB,EAAE,KACjB;AAEJ,QAAO,kBAAkB,KAAK,eAAuB;EACnD,MAAMC,UAAkC;GACtC;GACA,UAAU;GACV,cAAc,eAAe,eAAe,KAAK,IAAI,CAAC,OAAO,SAAS;GACtE,gBAAgB;GAChB,GAAG,0BAA0B,eAAe;GAC7C;AAGD,MAAI,qBAAqB,kBAAkB,SAAS,EAElD,SAAQ,YAAY,kBADA,kBAAkB,KAAK,OAAe,IAAI,GAAG,GAAG,CAAC,KAAK,IAAI,CAC5B;AAGpD,SAAO;GACP;;;;;;;;AASJ,SAAS,0BAA0B,QAAoD;CACrF,MAAMC,SAA+B,EAAE;AAEvC,KAAI,OAAO,wBAAwB,OACjC,QAAO,wBAAwB,OAAO;AAGxC,KAAI,OAAO,aAAa,OACtB,QAAO,YAAY,OAAO;AAG5B,KAAI,OAAO,WAAW,OACpB,QAAO,SAAS,OAAO;AAGzB,KAAI,OAAO,wBAAwB,OACjC,QAAO,wBAAwB,OAAO;AAGxC,KAAI,OAAO,mBAAmB,OAC5B,QAAO,kBAAkB,OAAO;AAGlC,QAAO;;;;;;;;AAST,SAAgB,4BAA4B,QAA8B;AACxE,QAAO,EACL,UAAU,yBAAyB,OAAO,EAC3C;;;;;;;;;;AAWH,SAAgB,wBACd,QAAQ,IACR,gBAAgB,MAChB,cAAc,oBACd;AACA,QAAO;EACL;EACA,uBAAuB;EACvB,cAAc;EACf;;;;;;;;;;;AC1HH,SAAgB,uBAAuB,MAAwC;AAC7E,KAAI,CAAC,KAAK,WAAW,SAAS,CAC5B,QAAO;CAGT,MAAM,OAAO,KAAK,MAAM,EAAE;AAE1B,KAAI,SAAS,SACX,QAAO,EAAE,KAAK,UAAU;AAG1B,KAAI;EACF,MAAM,SAAS,KAAK,MAAM,KAAK;EAC/B,MAAMC,QAA2B,EAAE,KAAK,QAAQ;AAGhD,MAAI,OAAO,gBACT,OAAM,iBAAiB,OAAO;WACrB,OAAO,cAAc,gBAC9B,OAAM,iBAAiB,OAAO,aAAa;AAI7C,MAAI,OAAO,YAAY,OACrB,OAAM,UAAU,OAAO;WACd,OAAO,cAAc,OAC9B,OAAM,UAAU,OAAO,aAAa;AAItC,MAAI,OAAO,QACT,OAAM,UAAU,OAAO;AAGzB,SAAO;UACA,GAAG;AACV,WAAO,MAAM,mDAAmD,EAAW;AAC3E,SAAO;;;;;;;;;;AAWX,SAAgB,0BACd,SACA,sBACe;CACf,MAAMC,aAA4B,EAAE;AAEpC,MAAK,MAAM,UAAU,QACnB,KAAI,OAAO,KACT,MAAK,MAAM,OAAO,OAAO,MAAM;EAC7B,MAAM,MAAM,IAAI;EAChB,MAAM,QAAQ,IAAI,mBAAmB,IAAI,cAAc;EACvD,MAAM,iBAAiB,OAAO,gBAAgB,mBAAmB;EAEjE,MAAM,OAAO,uBACT,qBAAqB,eAAe,GACpC,uBAAuB,eAAe;EAE1C,MAAM,cAAc,IAAI,cAAc;EAEtC,MAAMC,SAAsB;GAC1B,IAAI,IAAI,MAAM;GACd,OAAO,IAAI,SAAS;GACpB,MAAM,IAAI,QAAQ;GAClB;GACA,YAAY,IAAI,eAAe;GAC/B,gBAAgB;GAChB,SAAS;GACT,SAAS,YAAY,UAAU,GAAG,IAAI,IAAI,YAAY,SAAS,MAAM,QAAQ;GAC9E;AAED,aAAW,KAAK,OAAO;;AAK7B,QAAO;;;;;;;;AAST,SAAgB,iBAAiB,SAA6C;CAC5E,IAAI,cAAc;AAElB,MAAK,MAAM,UAAU,QACnB,KAAI,OAAO,KACT,MAAK,MAAM,OAAO,OAAO,MAAM;EAC7B,MAAM,MAAM,IAAI;AAChB,kBAAgB,IAAI,cAAc,MAAM;;AAK9C,QAAO;;;;;;;;;;AAWT,eAAsB,0BACpB,UACA,SACA,sBACiC;CACjC,MAAM,SAAS,SAAS,KAAM,WAAW;CACzC,MAAM,UAAU,IAAI,aAAa;CAEjC,IAAI,SAAS;CACb,IAAIC,UAAyB,EAAE;CAC/B,IAAI,sBAAsB;CAC1B,IAAIC,iBAAgC;CACpC,IAAI,cAAc;CAClB,IAAI,cAAc;AAElB,QAAO,MAAM;EACX,MAAM,EAAE,MAAM,UAAU,MAAM,OAAO,MAAM;AAC3C,MAAI,KAAM;AAEV,YAAU,QAAQ,OAAO,OAAO,EAAE,QAAQ,MAAM,CAAC;EACjD,MAAM,QAAQ,OAAO,MAAM,KAAK;AAChC,WAAS,MAAM,KAAK,IAAI;AAExB,OAAK,MAAM,QAAQ,OAAO;GACxB,MAAM,QAAQ,uBAAuB,KAAK;AAC1C,OAAI,CAAC,MAAO;AAGZ,OAAI,QACF,SAAQ,MAAM;AAIhB,OAAI,CAAC,kBAAkB,MAAM,eAC3B,kBAAiB,MAAM;AAIzB,OAAI,CAAC,uBAAuB,MAAM,SAAS;AACzC,cAAU,0BAA0B,MAAM,SAAS,qBAAqB;AACxE,kBAAc,iBAAiB,MAAM,QAAQ;AAC7C,0BAAsB;;AAIxB,OAAI,MAAM,QACR,gBAAe,MAAM;;;AAK3B,QAAO;EACL;EACA;EACA;EACA;EACD;;;;;;;;;AAUH,SAAgB,uBACd,UACA,QAC4B;CAC5B,MAAM,SAAS,SAAS,KAAM,WAAW;CACzC,MAAM,UAAU,IAAI,aAAa;CACjC,MAAM,UAAU,IAAI,aAAa;CAEjC,IAAI,SAAS;AAEb,QAAO,IAAI,eAAe;EACxB,MAAM,MAAM,YAAY;AACtB,UAAO,MAAM;IACX,MAAM,EAAE,MAAM,UAAU,MAAM,OAAO,MAAM;AAC3C,QAAI,MAAM;AACR,gBAAW,OAAO;AAClB;;AAGF,cAAU,QAAQ,OAAO,OAAO,EAAE,QAAQ,MAAM,CAAC;IACjD,MAAM,QAAQ,OAAO,MAAM,KAAK;AAChC,aAAS,MAAM,KAAK,IAAI;AAExB,SAAK,MAAM,QAAQ,OAAO;KACxB,MAAM,QAAQ,uBAAuB,KAAK;AAE1C,SAAI,SAAS,OACX,QAAO,MAAM;AAIf,SAAI,KACF,YAAW,QAAQ,QAAQ,OAAO,OAAO,KAAK,CAAC;;;;EAKvD,SAAS;AACP,UAAO,QAAQ;;EAElB,CAAC;;;;;;;;AASJ,SAAS,uBAAuB,gBAAgC;AAC9D,KAAI,eAAe,SAAS,UAAU,CACpC,QAAO;AAET,KAAI,eAAe,SAAS,OAAO,CACjC,QAAO;AAET,KAAI,eAAe,SAAS,OAAO,CACjC,QAAO;AAET,KAAI,eAAe,SAAS,OAAO,CACjC,QAAO;AAET,QAAO;;;;;;;;;;;;AC9QT,eAAsB,6BACpB,QACA,iBAAyB,wBACP;AAClB,KAAI;AAEF,QAAM,OAAO,YAAY,eAAe,CAAC,UAAU;AACnD,WAAO,KAAK,0CAA0C,EAAE,YAAY,gBAAgB,CAAC;AACrF,SAAO;UACAC,OAAgB;AAEvB,MADuB,OACH,eAAe,KAAK;AACtC,YAAO,KAAK,oCAAoC,EAAE,YAAY,gBAAgB,CAAC;AAE/E,OAAI;AAIF,UAAM,OAAO,aAAa,CAAC,OAAO;KAChC,MAAM;KACN,QAAQ;MACN;OAAE,MAAM;OAAmB,MAAM;OAAU;MAC3C;OAAE,MAAM;OAAY,MAAM;OAAU;MACpC;OAAE,MAAM;OAAa,MAAM;OAAS;MACpC;OAAE,MAAM;OAAQ,MAAM;OAAU;MAChC;OAAE,MAAM;OAAW,MAAM;OAAU;MACpC;KACF,CAAC;AAEF,aAAO,KAAK,gDAAgD,EAAE,YAAY,gBAAgB,CAAC;AAC3F,WAAO;YACA,aAAa;AACpB,aAAO,MAAM,4CAA4C,aAAsB,EAC7E,YAAY,gBACb,CAAC;AACF,WAAO;;;AAIX,WAAO,MAAM,0CAA0C,OAAgB,EACrE,YAAY,gBACb,CAAC;AACF,SAAO;;;;;;;;AASX,SAAgB,sBAA2D;AACzE,QAAO;EACL,QAAQ;GACN,OAAO;GACP,eAAe;GACf,aAAa;GACd;EACD,MAAM;GACJ,gBAAgB;GAChB,GAAG;GACH,IAAI;GACJ,gBAAgB;GAChB,gBAAgB;GACjB;EACD,UAAU;GACR,qBAAqB;GACrB,UAAU;GACV,QAAQ;GACR,qBAAqB;GACrB,gBAAgB;GACjB;EACF;;;;;;;;AASH,SAAgB,2BAA2B,YAAmC;CAC5E,MAAM,WAAW,qBAAqB;AAEtC,KAAI,CAAC,WACH,QAAO;AAGT,QAAO;EACL,QAAQ;GAAE,GAAG,SAAS;GAAQ,GAAG,WAAW;GAAQ;EACpD,MAAM;GAAE,GAAG,SAAS;GAAM,GAAG,WAAW;GAAM;EAC9C,UAAU;GAAE,GAAG,SAAS;GAAU,GAAG,WAAW;GAAU;EAC3D;;;;;;;;;;;;;;;;;;;AC9BH,eAAsB,iBACpB,iBACA,cACA,SAC0B;CAE1B,MAAM,eAAe,uBACnB,SACA,aAAa,SACb,gBACD;CAGD,MAAM,cAAc,4BAA4B;EAC9C,aAAa,QAAQ;EACrB,gBAAgB,QAAQ;EACxB,mBAAmB,QAAQ;EAC3B,QAAQ,QAAQ;EAChB,mBAAmB,aAAa;EAChC,UAAU,aAAa,YAAY;EACnC,gBAAgB,aAAa;EAC9B,CAAC;CAGF,MAAM,WAAW,MAAM,MAAM,aAAa,UAAU,EAAE;EACpD,QAAQ;EACR,SAAS;GACP,gBAAgB;GAChB,uBAAuB,gBAAgB;GACxC;EACD,MAAM,KAAK,UAAU,YAAY;EAClC,CAAC;AAEF,KAAI,CAAC,SAAS,IAAI;EAChB,MAAM,YAAY,MAAM,SAAS,MAAM;AACvC,QAAM,IAAI,MAAM,4BAA4B,YAAY;;AAO1D,QAAO;EACL;EACA,aALkB,SAAS,QAAQ,IAAI,eAAe,EACvB,SAAS,oBAAoB,IAAI;EAKhE,SAAS,EAAE;EACZ;;;;;;;;;;;;;ACjFH,eAAsB,eACpB,QACA,QAC2B;CAC3B,MAAM,EAAE,SAAS,gBAAgB,qBAAqB;AAGtD,KAAI,oBAAoB,CAAC,iBAAiB,SAAS,eAAe,CAChE,OAAM,IAAI,MACR,uBAAuB,eAAe,oBAAoB,iBAAiB,KAAK,KAAK,GACtF;AAGH,KAAI;EAEF,MAAM,WAAY,MAAM,OACrB,YAAY,eAAe,CAC3B,UAAU,QAAQ,CAClB,UAAU;EAGb,MAAM,YAAY,SAAS,cAAc;AAEzC,MAAI,CAAC,UACH,OAAM,IAAI,MAAM,yBAAyB;AAG3C,SAAO;GACL,IAAI,SAAS;GACb,YAAY;GACZ,OAAO,SAAS;GAChB,MAAM,SAAS;GACf,aAAa,SAAS;GACtB,YAAY;GACb;UACMC,OAAgB;AAEvB,MAAI,SAAS,OAAO,UAAU,YAAY,gBAAgB,SAAS,MAAM,eAAe,IACtF,OAAM,IAAI,MAAM,oBAAoB,UAAU;AAEhD,QAAM;;;;;;;;;;;;;;AC3CV,eAAsB,iBACpB,SACA,QACA,SAAwB,EAAE,EACO;CACjC,MAAM,iBAAiB,OAAO,kBAAkB;CAChD,MAAM,WAAW,OAAO,uBAAuB,OAAU,KAAK;CAE9D,MAAM,aAAa,IAAI,KAAK,KAAK,KAAK,GAAG,SAAS;CAElD,MAAM,eAAe,MAAM,QAAQ,KAAK;EACtC,YAAY;EACZ,OAAO,EACL,KAAK;GACH,EACE,MAAM,EACJ,QAAQ,QACT,EACF;GACD,EACE,QAAQ,EACN,QAAQ,UACT,EACF;GACD,EACE,eAAe,EACb,cAAc,WAAW,aAAa,EACvC,EACF;GACF,EACF;EACD,MAAM;EACN,OAAO;EACR,CAAC;AAEF,KAAI,CAAC,aAAa,KAAK,OACrB,QAAO;AAGT,QAAO,aAAa,KAAK;;;;;;;;;;;AAY3B,eAAsB,2BACpB,SACA,QACA,gBACA,SAAwB,EAAE,EACO;CACjC,MAAM,iBAAiB,OAAO,kBAAkB;CAEhD,MAAM,eAAe,MAAM,QAAQ,KAAK;EACtC,YAAY;EACZ,OAAO,EACL,KAAK,CACH,EACE,iBAAiB,EACf,QAAQ,gBACT,EACF,EACD,EACE,MAAM,EACJ,QAAQ,QACT,EACF,CACF,EACF;EACD,OAAO;EACR,CAAC;AAEF,KAAI,CAAC,aAAa,KAAK,OACrB,QAAO;AAGT,QAAO,aAAa,KAAK;;;;;;;;;;;AAY3B,eAAsB,aACpB,SACA,QACA,gBACA,SAAwB,EAAE,EACO;CACjC,MAAM,iBAAiB,OAAO,kBAAkB;CAEhD,MAAM,eAAe,MAAM,QAAQ,KAAK;EACtC,YAAY;EACZ,OAAO,EACL,KAAK,CACH,EACE,iBAAiB,EACf,QAAQ,gBACT,EACF,EACD,EACE,MAAM,EACJ,QAAQ,QACT,EACF,CACF,EACF;EACD,OAAO;EACR,CAAC;AAEF,KAAI,CAAC,aAAa,KAAK,OACrB,QAAO;CAGT,MAAM,UAAU,aAAa,KAAK;AAClC,KAAI,CAAC,QACH,QAAO;AAET,OAAM,QAAQ,OAAO;EACnB,YAAY;EACZ,OAAO,EACL,iBAAiB,EACf,QAAQ,gBACT,EACF;EACD,MAAM;GACJ,QAAQ;GACR,4BAAW,IAAI,MAAM,EAAC,aAAa;GACpC;EACF,CAAC;AAEF,QAAO;EACL,iBAAiB,QAAQ;EACzB,UAAU,QAAQ,YAAY,EAAE;EAChC,QAAQ;EACR,cAAc,QAAQ;EACtB,YAAY,QAAQ;EACpB,eAAe,QAAQ;EACxB;;;;;;;;;;;AC5KH,SAAgB,eAAe,OAAyB;AACtD,QAAO,SAAS,KAAK,UAAU,MAAM,CAAC;;;;;;;;;AAUxC,SAAgB,aACd,YACA,SACA,OACM;CACN,MAAM,OAAO,eAAe,MAAM;AAClC,YAAW,QAAQ,QAAQ,OAAO,KAAK,CAAC;;;;;;;;;;;;;;;;;ACkB1C,eAAsB,gBACpB,SACA,QACA,gBACA,aACA,kBACA,SACA,UACA,iBAAiC,iBAClB;AACf,KAAI;EAEF,MAAM,WAAW,MAAM,QAAQ,KAAK;GAClC,YAAY;GACZ,OAAO,EACL,iBAAiB,EACf,QAAQ,gBACT,EACF;GACD,OAAO;GACR,CAAC;EAEF,MAAMC,iBAAyC;GAC7C,MAAM;GACN,SAAS;GACT,4BAAW,IAAI,MAAM,EAAC,aAAa;GACpC;EAED,MAAMC,sBAA8C;GAClD,MAAM;GACN,SAAS;GACT,4BAAW,IAAI,MAAM,EAAC,aAAa;GACnC,SAAS,QAAQ,KAAK,OAAO;IAC3B,IAAI,EAAE;IACN,OAAO,EAAE;IACT,MAAM,EAAE;IACR,aAAa,EAAE;IACf,MAAM,EAAE;IACT,EAAE;GACJ;AAED,MAAI,SAAS,KAAK,SAAS,KAAK,SAAS,KAAK,GAE5C,OAAM,sBACJ,SACA,SAAS,KAAK,IACd,gBACA,qBACA,UACA,eACD;MAGD,OAAM,iBACJ,SACA,QACA,gBACA,gBACA,qBACA,UACA,eACD;UAEI,OAAO;AACd,WAAO,MAAM,6BAA6B,OAAgB;GACxD;GACA;GACD,CAAC;;;;;;AAQN,eAAe,sBACb,SACA,SACA,gBACA,qBACA,UACA,gBACe;CACf,MAAM,mBAAoB,QAAQ,YAAyC,EAAE;CAC7E,MAAM,mBAAoB,QAAQ,YAAgC,EAAE;CAEpE,MAAM,WAAW;EAAC,GAAG;EAAkB;EAAgB;EAAoB;CAC3E,MAAM,cAAc,CAAC,GAAG,kBAAkB,GAAG,SAAS;CACtD,MAAM,eACH,QAAQ,gBAAgB,KAAK,SAAS,QAAQ,KAAK,MAAM,MAAM,EAAE,OAAO,OAAO,EAAE;CACpF,MAAM,aACH,QAAQ,cAAc,KAAK,SAAS,QAAQ,KAAK,MAAM,OAAO,EAAE,YAAY,IAAI,EAAE;AAErF,OAAM,QAAQ,OAAO;EACnB,YAAY;EACZ,IAAI,QAAQ;EACZ,MAAM;GACJ;GACA,UAAU;GACV,cAAc;GACd,YAAY;GACZ,gCAAe,IAAI,MAAM,EAAC,aAAa;GACvC,QAAQ;GACT;EACF,CAAC;AAEF,UAAO,KAAK,qCAAqC;EAC/C,WAAW,QAAQ;EACnB,gBAAgB,QAAQ;EACxB;EACA;EACD,CAAC;;;;;AAMJ,eAAe,iBACb,SACA,QACA,gBACA,gBACA,qBACA,UACA,gBACe;CACf,MAAM,cAAc,SAAS,QAAQ,KAAK,MAAM,MAAM,EAAE,OAAO,OAAO,EAAE;CACxE,MAAM,YAAY,SAAS,QAAQ,KAAK,MAAM,OAAO,EAAE,YAAY,IAAI,EAAE;AAEzE,OAAM,QAAQ,OAAO;EACnB,YAAY;EACZ,MAAM;GACJ,MAAM;GACN,iBAAiB;GACjB,QAAQ;GACR,UAAU,CAAC,gBAAgB,oBAAoB;GAC/C;GACA,cAAc;GACd,YAAY;GACZ,gCAAe,IAAI,MAAM,EAAC,aAAa;GACxC;EACF,CAAC;AAEF,UAAO,KAAK,yCAAyC;EACnD;EACA;EACA;EACA;EACD,CAAC;;;;;;;;AChMJ,MAAa,gBAAgB,MAAW,YAA2B;AAC/D,QAAO,IAAI,SAAS,KAAK,UAAU,KAAK,EAAE;EACxC,SAAS,EAAE,gBAAgB,oBAAoB;EAC/C,GAAG;EACJ,CAAC;;;;;AAMN,eAAsB,oBACpB,SACA,QAWA;AAEA,KAAI,CAAC,MAAM,OAAO,iBAAiB,QAAQ,CACzC,QAAO;EACL,SAAS;EACT,OAAO,aAAa,EAAE,OAAO,kDAAkD,EAAE,EAAE,QAAQ,KAAK,CAAC;EAClG;AAIH,KAAI,CAAC,QAAQ,OAAO,CAAC,QAAQ,KAC3B,QAAO;EACL,SAAS;EACT,OAAO,aAAa,EAAE,OAAO,iBAAiB,EAAE,EAAE,QAAQ,KAAK,CAAC;EACjE;CAGH,MAAM,EAAE,IAAI,QAAQ,UAAU,QAAQ;CACtC,MAAM,YAAY,SAAS;CAC3B,MAAM,UAAU,MAAM,OAAO,YAAY;CACzC,MAAM,OAAO,MAAM,QAAQ,QAAQ;AAGnC,KAAI,CAAC,KACH,QAAO;EACL,SAAS;EACT,OAAO,aAAa,EAAE,OAAO,kBAAkB,EAAE,EAAE,QAAQ,KAAK,CAAC;EAClE;AAIH,KAAI,CAAC,KAAK,WAAW,OAAO,KAAK,YAAY,YAAY,KAAK,QAAQ,MAAM,KAAK,GAC/E,QAAO;EACL,SAAS;EACT,OAAO,aAAa,EAAE,OAAO,2BAA2B,EAAE,EAAE,QAAQ,KAAK,CAAC;EAC3E;AAKH,QAAO;EACL,SAAS;EACT;EACA;EACA;EACA,aAPkB,KAAK,QAAQ,MAAM;EAQrC;EACD;;;;;;;;AC5DH,eAAsB,8BACpB,aACA,QACA,iBACmB;AACnB,QAAO,MAAM,4CAA4C;CAEzD,MAAM,kBAAkB,OAAO;AAE/B,KAAI,CAAC,gBACD,OAAM,IAAI,MAAM,kCAAkC;CAGtD,IAAI;CAGJ,MAAM,eAAe,gBAAgB;CACrC,MAAM,SAAS,gBAAgB;CAC/B,MAAM,QAAQ,gBAAgB;CAC9B,MAAM,aAAa,gBAAgB;CAEnC,MAAM,gBAAgB,IAAI,OAAO;EAAE,SAAS;EAAM,QAAQ;EAAmB,CAAC;AAE9E,KAAI,iBAAiB,SACjB,YAAW,IAAI,wBAAwB;EACnC,MAAM;EACE;EACD;EACK;EACf,EAA0B,cAAc;KAExC,YAAW,IAAI,wBAAwB;EACpC,MAAM;EACE;EACD;EACK;EACf,EAA0B,cAAc;AAG7B,KAAI,qBAAqB,UAAU,eAAe,gBAAgB;CAMlF,MAAM,kBAAkB,MAAM,SAAS,kBAAkB,YAAY;AAErE,KAAI,CAAC,gBACH,OAAM,IAAI,MAAM,+BAA+B;CAKjD,MAAM,YAAY,SAAS;AAE3B,KAAI,OAAO,yBAAyB;EAClC,MAAM,oBAAoB,OAAO,wBAC/B,WACA,gBAAgB,MAAM,YACvB;AACD,kBAAgB,KAAK,kBAAkB;AAEvC,SAAO,KAAK,oCAAoC;GAC9C,OAAO;GACP,aAAa,gBAAgB,MAAM;GACnC,SAAS,kBAAkB;GAC5B,CAAC;;AAGJ,QAAO,gBAAgB;;;;;;;;AC7EzB,eAAsB,wBACpB,QACA,SACA,QACA,gBACA,aACA,kBACA,SACA,iBACe;AACf,KAAI,CAAC,kBAAkB,CAAC,OAAO,gBAC7B;AAGF,OAAM,OAAO,gBACX,SACA,QACA,gBACA,aACA,kBACA,SACA,iBACA,OAAO,eACR;AAED,UAAO,KAAK,oCAAoC,EAC9C,gBACD,CAAC;;;;;;;;AC3BJ,eAAsB,yBACpB,QACA,SACA,QACA,WACA,aAC0B;AAC1B,KAAI,CAAC,OAAO,0BAA0B,CAAC,OAAO,gBAC5C,QAAO;CAKT,MAAM,uBAF2B,OAAO,uBAAuB,YAAY,GAChD,OAAO,uBAAuB,YAAY,GAAG;CAGxE,MAAM,aAAa,MAAM,OAAO,gBAAgB,SAAS,QAAQ,qBAAqB;AAEtF,KAAI,CAAC,WAAW,SAAS;AACvB,WAAO,KAAK,iCAAiC;GAC3C;GACA,OAAO,WAAW;GAClB,MAAM,WAAW;GACjB,WAAW,WAAW;GACvB,CAAC;AACF,SAAO,aACL;GACE,OAAO;GACP,YAAY;IACV,OAAO,WAAW;IAClB,MAAM,WAAW;IACjB,WAAW,WAAW;IACtB,UAAU,WAAW;IACtB;GACF,EACD,EAAE,QAAQ,KAAK,CAChB;;AAGH,UAAO,KAAK,sDAAsD;EAChE;EACA;EACA,OAAO,WAAW;EAClB,MAAM,WAAW;EACjB,WAAW,WAAW;EACvB,CAAC;AAEF,QAAO;;;;;;;;AC9CT,SAAgB,oBAAoB,iBAGlC;CACA,MAAM,kBAAkB,gBAAgB,QACrC,KAAK,UAAU,MAAM,MAAM,OAAO,OACnC,EACD;CACD,MAAM,eAAe,gBAAgB,QAClC,KAAK,UAAU,OAAO,MAAM,YAAY,IACzC,EACD;AAED,UAAO,KAAK,gCAAgC;EAC1C,aAAa;EACb,cAAc;EACf,CAAC;AAEF,QAAO;EAAE,aAAa;EAAiB;EAAc;;;;;AAMvD,eAAsB,uBACpB,QACA,SACA,QACA,aACA,cACA,WACe;AACf,KAAI,CAAC,OAAO,kBACV;CAGF,MAAM,aAAa,MAAM,OAAO,kBAAkB,SAAS,OAAO;AAElE,WAAU;EACR,MAAM;EACN,MAAM;GACJ,aAAa;GACb,UAAU;GACV,aAAa,WAAW;GACxB,YAAY,WAAW;GACvB,iBAAiB,WAAW;GAC5B,UAAU,WAAW;GACtB;EACF,CAAC;;;;;;;;ACsCJ,SAAgB,sBAAsB,QAA4B;AAChE,QAAO,eAAe,KAAK,SAAyB;AAClD,MAAI;GAEF,MAAM,YAAY,MAAM,oBAAoB,SAAS,OAAO;AAC5D,OAAI,CAAC,UAAU,QACb,QAAO,UAAU;GAGnB,MAAM,EAAE,QAAQ,WAAW,SAAS,aAAa,SAAS;GAG1D,IAAIC;GACJ,MAAM,YAAY,KAAK;AAEvB,OAAI,aAAa,OAAO,KAAK,QAAQ;IACjC,MAAM,QAAQ,OAAO,IAAI,OAAO,MAAK,MAAK,EAAE,SAAS,UAAU;AAC/D,QAAI,CAAC,MACH,QAAO,IAAI,SAAS,KAAK,UAAU,EAAE,OAAO,oBAAoB,aAAa,CAAC,EAAE,EAAE,QAAQ,KAAK,CAAC;AAElG,mBAAe;KACX,SAAS,MAAM;KACf,mBAAmB,MAAM;KACzB,UAAU,MAAM;KAChB,gBAAgB,OAAO,IAAI;KAC9B;cACM,OAAO,KAAK,UAAU,OAAO,IAAI,OAAO,SAAS,GAAG;IAE3D,MAAM,QAAQ,OAAO,IAAI,OAAO;AAChC,QAAI,CAAC,MAAO,OAAM,IAAI,MAAM,0BAA0B;AACtD,mBAAe;KACX,SAAS,MAAM;KACf,mBAAmB,MAAM;KACzB,UAAU,MAAM;KAChB,gBAAgB,OAAO,IAAI;KAC9B;SAED,QAAO,IAAI,SAAS,KAAK,UAAU,EAAE,OAAO,kCAAkC,CAAC,EAAE,EAAE,QAAQ,KAAK,CAAC;GAIrG,MAAM,kBAAkB,MAAM,yBAC5B,QACA,SACA,QACA,WACA,YACD;AACD,OAAI,gBACF,QAAO;AAGT,YAAO,KAAK,2BAA2B;IACrC;IACA,QAAQ,KAAK,UAAU;IACvB,WAAW,aAAa;IACxB,SAAS,aAAa;IACtB,YAAY,CAAC,CAAC,KAAK;IACnB,sBAAsB,CAAC,CAAC,KAAK;IAC7B,eAAe,YAAY;IAC5B,CAAC;GAGF,MAAM,UAAU,IAAI,aAAa;GACjC,MAAM,SAAS,IAAI,eAAe,EAChC,MAAM,MAAM,YAAY;IACtB,MAAMC,kBAAmC,EAAE;IAC3C,IAAI,uBAAuB;IAC3B,IAAIC,wBAAuC;IAC3C,IAAIC,iBAAgC,EAAE;AAEtC,QAAI;KACF,MAAM,aAAa,UAAoB,aAAa,YAAY,SAAS,MAAM;KAG/E,MAAM,iBAAiB,MAAM,8BAC3B,aACA,QACA,gBACD;KAGD,MAAM,eAAe,MAAM,iBACzB,OAAO,WACP,cACA;MACE;MACA;MACA,QAAQ,KAAK;MACb,mBAAmB,KAAK;MACzB,CACF;KAGD,MAAM,eAAe,aAAa,eAAe,aAAa,SAAS,OACnE,MAAM,OAAO,wBAAwB,aAAa,UAAU,YAAY,QAAQ,GAChF,MAAM,OAAO,2BACX,MAAM,aAAa,SAAS,MAAM,EAClC,YACA,QACD;AAGL,4BAAuB,aAAa;AACpC,6BAAwB,aAAa;AACrC,sBAAiB,aAAa;AAC9B,qBAAgB,KAAK,aAAa,YAAY;KAG9C,MAAM,EAAE,aAAa,iBAAiB,iBACpC,oBAAoB,gBAAgB;AAGtC,WAAM,uBACJ,QACA,SACA,QACA,iBACA,cACA,UACD;AAGD,WAAM,wBACJ,QACA,SACA,QACA,uBACA,aACA,sBACA,gBACA,gBACD;AAED,cAAO,KAAK,uCAAuC;MACjD;MACA,gBAAgB;MAChB,aAAa;MACd,CAAC;AACF,gBAAW,OAAO;aACX,OAAO;AACd,cAAO,MAAM,8BAA8B,OAAgB;MACzD;MACA,QAAQ,KAAK;MACd,CAAC;AACF,kBAAa,YAAY,SAAS;MAChC,MAAM;MACN,MAAM,EACJ,OAAO,iBAAiB,QAAQ,MAAM,UAAU,qBACjD;MACF,CAAC;AACF,gBAAW,OAAO;;MAGvB,CAAC;AAEF,UAAO,IAAI,SAAS,QAAQ,EAC1B,SAAS;IACP,gBAAgB;IAChB,iBAAiB;IACjB,YAAY;IACb,EACF,CAAC;WACK,OAAO;AACd,YAAO,MAAM,8BAA8B,OAAgB,EACzD,QAAQ,QAAQ,MAAM,IACvB,CAAC;AAEF,UAAO,IAAI,SACT,KAAK,UAAU;IACb,OAAO;IACP,SAAS,iBAAiB,QAAQ,MAAM,UAAU;IACnD,CAAC,EACF;IACE,QAAQ;IACR,SAAS,EAAE,gBAAgB,oBAAoB;IAChD,CACF;;;;;;;;;;;;;;;ACtQP,SAAgBC,sBAAoB,gBAAgC;AAClE,KAAI,mBAAmB,oBAAqB,QAAO;AACnD,KAAI,mBAAmB,aAAc,QAAO;AAC5C,QAAO;;;;;;AAOT,SAAgB,uBAAuB,MAAsB;CAE3D,MAAM,QAAQ,KAAK,MAAM,CAAC,MAAM,MAAM,CAAC;AACvC,QAAO,KAAK,KAAK,QAAQ,IAAI;;;;;;;;;;;;;ACP/B,eAAsB,+BACpB,UACA,YACA,SAMC;AACD,UAAO,MAAM,uCAAuC;AAEpD,KAAI,CAAC,SAAS,KACZ,OAAM,IAAI,MAAM,wBAAwB;CAG1C,MAAM,SAAS,SAAS,KAAK,WAAW;CACxC,MAAM,UAAU,IAAI,aAAa;CACjC,IAAI,SAAS;CACb,IAAIC,UAAyB,EAAE;CAC/B,IAAI,sBAAsB;CAC1B,IAAIC,iBAAgC;CACpC,IAAI,cAAc;CAClB,IAAI,uBAAuB;AAE3B,KAAI;AACF,SAAO,MAAM;GACX,MAAM,EAAE,MAAM,UAAU,MAAM,OAAO,MAAM;AAC3C,OAAI,MAAM;AACR,aAAO,MAAM,+BAA+B;AAC5C;;AAGF,aAAU,QAAQ,OAAO,OAAO,EAAE,QAAQ,MAAM,CAAC;GACjD,MAAM,QAAQ,OAAO,MAAM,KAAK;AAChC,YAAS,MAAM,KAAK,IAAI;AAExB,QAAK,MAAM,QAAQ,OAAO;IACxB,MAAM,QAAQ,uBAAuB,KAAK;AAC1C,QAAI,CAAC,MAAO;AAGZ,QAAI,MAAM,QAAQ,UAAU;AAC1B,kBAAa,YAAY,SAAS;MAAE,MAAM;MAAQ,MAAM;MAAI,CAAC;AAC7D;;AAIF,QAAI,CAAC,kBAAkB,MAAM,gBAAgB;AAC3C,sBAAiB,MAAM;AACvB,cAAO,MAAM,4BAA4B,EAAE,gBAAgB,CAAC;AAC5D,kBAAa,YAAY,SAAS;MAAE,MAAM;MAAmB,MAAM;MAAgB,CAAC;;AAItF,QAAI,CAAC,uBAAuB,MAAM,SAAS;AACzC,eAAU,0BAA0B,MAAM,SAASC,sBAAoB;AACvE,mBAAc,iBAAiB,MAAM,QAAQ;AAE7C,SAAI,QAAQ,SAAS,EACnB,cAAa,YAAY,SAAS;MAAE,MAAM;MAAW,MAAM;MAAS,CAAC;AAGvE,2BAAsB;;AAIxB,QAAI,MAAM,SAAS;AACjB,6BAAwB,MAAM;AAC9B,kBAAa,YAAY,SAAS;MAAE,MAAM;MAAS,MAAM,MAAM;MAAS,CAAC;;;;WAIvE;AACR,SAAO,aAAa;;CAItB,MAAM,iBAAiB,uBAAuB,YAAY;CAC1D,MAAM,kBAAkB,uBAAuB,qBAAqB;CAGpE,MAAMC,cAA6B;EACjC,SAAS;EACT,OAAO;EACP,QAAQ;GACN,OAAO;GACP,QAAQ;GACR,OAAO,iBAAiB;GACzB;EACD,UAAW,iBAAiB,QAAe,kBAAkB;EAC7D,4BAAW,IAAI,MAAM,EAAC,aAAa;EACpC;AAED,UAAO,KAAK,uBAAuB;EACjC,aAAa;EACb,cAAc;EACd,aAAa,YAAY,OAAO;EAChC,SAAS,YAAY;EACtB,CAAC;AAEF,QAAO;EACL;EACA;EACA;EACA;EACD;;;;;;;;;;;;;AC1GH,eAAsB,kCACpB,MACA,YACA,SAMC;AACD,UAAO,MAAM,qDAAqD;CAGlE,MAAM,YAAY;CAQlB,IAAIC,iBAAgC;AACpC,KAAI,UAAU,cAAc,gBAC1B,kBAAiB,UAAU,aAAa;UAC/B,UAAU,gBACnB,kBAAiB,UAAU;CAG7B,IAAI,aAAa;AACjB,KAAI,UAAU,cAAc,OAC1B,cAAa,UAAU,aAAa;UAC3B,UAAU,YAAY,UAAU,QACzC,cAAa,UAAU,YAAY,UAAU,WAAW;CAG1D,MAAM,UAAU,0BAA2B,UAAU,WAAW,EAAE,EAAiCC,sBAAoB;CACvH,MAAM,cAAc,iBAAkB,UAAU,WAAW,EAAE,CAAgC;AAG7F,KAAI,YAAY;EACd,MAAM,QAAQ,WAAW,MAAM,IAAI;AACnC,OAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;GACrC,MAAM,QAAQ,MAAM,IAAI,MAAM,KAAK,MAAM,MAAM;AAC/C,OAAI,MACF,cAAa,YAAY,SAAS;IAAE,MAAM;IAAS,MAAM;IAAO,CAAC;;;AAKvE,KAAI,eACF,cAAa,YAAY,SAAS;EAAE,MAAM;EAAmB,MAAM;EAAgB,CAAC;AAGtF,KAAI,QAAQ,SAAS,EACnB,cAAa,YAAY,SAAS;EAAE,MAAM;EAAW,MAAM;EAAS,CAAC;AAGvE,cAAa,YAAY,SAAS;EAAE,MAAM;EAAQ,MAAM;EAAI,CAAC;CAG7D,MAAM,iBAAiB,uBAAuB,YAAY;CAC1D,MAAM,kBAAkB,uBAAuB,WAAW;CAE1D,MAAMC,cAA6B;EACjC,SAAS;EACT,OAAO;EACP,QAAQ;GACN,OAAO;GACP,QAAQ;GACR,OAAO,iBAAiB;GACzB;EACD,UAAW,iBAAiB,QAAe,kBAAkB;EAC7D,4BAAW,IAAI,MAAM,EAAC,aAAa;EACpC;AAED,QAAO;EACL,sBAAsB;EACtB;EACA;EACA;EACD;;;;;;;;;;;;ACxEH,SAAgB,wBAAwB,QAA+B;AACrE,QAAO,eAAe,IAAI,SAAyB;AACjD,MAAI;AACF,OAAI,CAAC,MAAM,OAAO,iBAAiB,QAAQ,CACzC,QAAO,aAAa,EAAE,OAAO,kDAAkD,EAAE,EAAE,QAAQ,KAAK,CAAC;GAEnG,MAAM,SAAS,QAAQ,MAAM;AAE7B,OAAI,CAAC,QAAQ,OAAO,CAAC,OACnB,QAAO,aAAa,EAAE,OAAO,iBAAiB,EAAE,EAAE,QAAQ,KAAK,CAAC;GAGlE,MAAM,EAAE,iBAAiB,IAAI,IAAI,QAAQ,IAAI;GAC7C,MAAM,WAAW,aAAa,IAAI,SAAS,KAAK;GAChD,MAAM,iBAAiB,aAAa,IAAI,iBAAiB;GAGzD,MAAM,UAAU,MAAM,OAAO,YAAY;AAGzC,OAAI,UAAU;IACZ,MAAMC,YAAU,MAAM,iBAAiB,SAAS,QAAQ,OAAO,cAAc;AAE7E,QAAI,CAACA,UACH,QAAO,aAAa,EAAE,OAAO,yBAAyB,EAAE,EAAE,QAAQ,KAAK,CAAC;AAG1E,WAAO,aAAaA,UAAQ;;AAI9B,OAAI,CAAC,eACH,QAAO,aACL,EAAE,OAAO,6CAA6C,EACtD,EAAE,QAAQ,KAAK,CAChB;GAGH,MAAM,UAAU,MAAM,2BACpB,SACA,QACA,gBACA,OAAO,cACR;AAED,OAAI,CAAC,QACH,QAAO,aAAa,EAAE,OAAO,iCAAiC,EAAE,EAAE,QAAQ,KAAK,CAAC;AAGlF,UAAO,aAAa,QAAQ;WACrB,OAAO;AACd,YAAO,MAAM,iCAAiC,OAAgB,EAC5D,QAAQ,QAAQ,MAAM,IACvB,CAAC;AAEF,UAAO,aACL;IACE,OAAO;IACP,SAAS,iBAAiB,QAAQ,MAAM,UAAU;IACnD,EACD,EAAE,QAAQ,KAAK,CAChB;;;;;;;;;;AAWP,SAAgB,2BAA2B,QAA+B;AACxE,QAAO,eAAe,OAAO,SAAyB;AACpD,MAAI;AACF,OAAI,CAAC,MAAM,OAAO,iBAAiB,QAAQ,CACzC,QAAO,aAAa,EAAE,OAAO,kDAAkD,EAAE,EAAE,QAAQ,KAAK,CAAC;GAEnG,MAAM,SAAS,QAAQ,MAAM;AAC7B,OAAI,CAAC,QAAQ,OAAO,CAAC,OACnB,QAAO,aAAa,EAAE,OAAO,iBAAiB,EAAE,EAAE,QAAQ,KAAK,CAAC;GAGlE,MAAM,EAAE,iBAAiB,IAAI,IAAI,QAAQ,IAAI;GAC7C,MAAM,iBAAiB,aAAa,IAAI,iBAAiB;AAEzD,OAAI,CAAC,eACH,QAAO,aACL,EAAE,OAAO,yCAAyC,EAClD,EAAE,QAAQ,KAAK,CAChB;GAIH,MAAM,UAAU,MAAM,OAAO,YAAY;AAEzC,YAAO,KAAK,wBAAwB;IAAE;IAAgB;IAAQ,CAAC;GAE/D,MAAM,UAAU,MAAM,aAAa,SAAS,QAAQ,gBAAgB,OAAO,cAAc;AAEzF,OAAI,CAAC,QACH,QAAO,aACL,EAAE,OAAO,sDAAsD,EAC/D,EAAE,QAAQ,KAAK,CAChB;AAGH,YAAO,KAAK,oCAAoC;IAC9C;IACA,aAAa,QAAQ;IACrB,WAAW,QAAQ;IACpB,CAAC;AAEF,UAAO,aAAa;IAClB,SAAS;IACT,SAAS;IACT,SAAS;KACP,iBAAiB;KACjB,QAAQ;KACR,cAAc,QAAQ;KACtB,YAAY,QAAQ;KACrB;IACF,CAAC;WACK,OAAO;AACd,YAAO,MAAM,8BAA8B,OAAgB;IACzD,gBAAgB,QAAQ,MAAM,IAAI,IAAI,QAAQ,IAAI,CAAC,aAAa,IAAI,iBAAiB,GAAG;IACxF,QAAQ,QAAQ,MAAM;IACvB,CAAC;AAEF,UAAO,aACL;IACE,OAAO;IACP,SAAS,iBAAiB,QAAQ,MAAM,UAAU;IACnD,EACD,EAAE,QAAQ,KAAK,CAChB;;;;;;;;;;;;;ACtIP,SAAgB,uBAAuB,QAA8B;AACnE,QAAO,eAAe,IACpB,SACA;AACA,MAAI;AACF,OAAI,CAAC,MAAM,OAAO,iBAAiB,QAAQ,CACzC,QAAO,aAAa,EAAE,OAAO,iDAAiD,EAAE,EAAE,QAAQ,KAAK,CAAC;AAElG,OAAI,CAAC,QAAQ,OAAO,CAAC,QAAQ,KAC3B,QAAO,aAAa,EAAE,OAAO,iBAAiB,EAAE,EAAE,QAAQ,KAAK,CAAC;GAElE,MAAM,KAAK,QAAQ,aAAa;GAEhC,MAAM,iBADM,IAAI,IAAI,QAAQ,IAAI,CACL,aAAa,IAAI,aAAa;AAGzD,OAAI,CAAC,GACH,QAAO,aAAa,EAAE,OAAO,+BAA+B,EAAE,EAAE,QAAQ,KAAK,CAAC;AAIhF,OAAI,CAAC,eACH,QAAO,aACL;IACE,OAAO;IACP,aAAa,OAAO;IACrB,EACD,EAAE,QAAQ,KAAK,CAChB;AAcH,UAAO,aAPW,MAAM,eAHT,sBAAsB,OAAO,UAAU,EAGP;IAC7C,SAAS;IACT;IACA,kBAAkB,OAAO;IAC1B,CAAC,CAG4B;WACvBC,OAAgB;AACvB,YAAO,MAAM,wBAAwB,OAAgB;IACnD,SAAS,QAAQ,aAAa;IAC9B,YAAY,QAAQ,MAAM,IAAI,IAAI,QAAQ,IAAI,CAAC,aAAa,IAAI,aAAa,GAAG;IACjF,CAAC;AAGF,OAAI,iBAAiB,OAAO;AAC1B,QAAI,MAAM,QAAQ,SAAS,qBAAqB,CAC9C,QAAO,aACL;KACE,OAAO,MAAM;KACb,aAAa,OAAO;KACrB,EACD,EAAE,QAAQ,KAAK,CAChB;AAEH,QAAI,MAAM,QAAQ,SAAS,YAAY,CACrC,QAAO,aAAa,EAAE,OAAO,uBAAuB,EAAE,EAAE,QAAQ,KAAK,CAAC;;AAI1E,UAAO,aACL;IACE,OAAO;IACP,SAAS,iBAAiB,QAAQ,MAAM,UAAU;IACnD,EACD,EAAE,QAAQ,KAAK,CAChB;;;;;;;ACtFP,SAAgB,uBAAuB,QAA8B;AACnE,QAAO,eAAe,MAAM;AAC1B,MAAI;AASA,UAAO,aAAa,EAAE,SARP,OAAO,WAAW,UAAU,EAAE,EAGjB,KAAI,WAAU;IACtC,MAAM,MAAM;IACZ,MAAM,MAAM,QAAQ,MAAM;IAC7B,EAAE,EAEyC,EAAE,EAAE,QAAQ,KAAK,CAAC;WACzD,OAAO;AACZ,UAAO,aAAa,EAAE,OAAO,yBAAyB,EAAE,EAAE,QAAQ,KAAK,CAAC;;;;;;;;;;;;ACFhF,SAAgB,yBACd,QACwI;CACxI,MAAMC,YAAoJ,EAAE;AAG5J,KAAI,CAAC,OAAO,UAAU,OAAO,OAAO,WAAW,KAAK,CAAC,OAAO,UAC1D,QAAO;CAGT,MAAM,EAAE,QAAQ,WAAW,cAAc;CAGzC,MAAM,mBAAmB,OAAO,SAAQ,UAAS,MAAM,kBAAkB,IAAI,EAAE;CAC/E,MAAM,mBAAmB,MAAM,KAAK,IAAI,IAAI,iBAAiB,CAAC;CAG9D,MAAM,mBAAmB;EACvB,SAAS;EACT;EACA;EACA,QAAQ,OAAO;EACf,MAAM,OAAO;EACb,UAAU,OAAO;EAClB;AAGD,WAAU,KAAK;EACb,MAAM;EACN,QAAQ;EACR,SAAS,sBAAsB;GAC7B,gBAAgB;GAChB,kBAAkB,UAAU;GAC5B;GACA,KAAK;GACL,YAAY,UAAU;GACtB,iBAAiB,UAAU;GAC3B,mBAAmB,UAAU;GAC7B,iBAAiB,UAAU;GAC3B,yBAAyB;GACzB,4BAA4B;GAC5B,yBAAyB,UAAU;GACnC,wBAAwB,UAAU;GAClC,iBAAiB,OAAO;GACzB,CAAC;EACH,CAAC;AAEF,WAAU,KAAK;EACb,MAAM;EACN,QAAQ;EACR,SAAS,wBAAwB;GAC/B,YAAY,UAAU;GACtB,kBAAkB,UAAU;GAC7B,CAAC;EACH,CAAC;AAEF,WAAU,KAAK;EACb,MAAM;EACN,QAAQ;EACR,SAAS,2BAA2B;GAClC,YAAY,UAAU;GACtB,kBAAkB,UAAU;GAC7B,CAAC;EACH,CAAC;AAEF,WAAU,KAAK;EACb,MAAM;EACN,QAAQ;EACR,SAAS,uBAAuB;GAC9B;GACA,kBAAkB,UAAU;GAC5B;GACD,CAAC;EACH,CAAC;AAEF,WAAU,KAAK;EACb,MAAM;EACN,QAAQ;EACR,SAAS,uBAAuB;GAC9B,WAAW;GACX,kBAAkB,UAAU;GAC7B,CAAC;EACH,CAAC;AAEF,QAAO;;;;;;;;AClGT,MAAa,4BACX,kBACmB;AACnB,cAAa;AACX,MAAI;GAEF,MAAMC,cAA8C,EAAE;AACtD,QAAK,MAAM,CAAC,MAAM,iBAAiB,OAAO,QACxC,cAAc,eAAe,EAAE,CAChC,CACC,KAAI,MAAM,QAAQ,aAAa,EAAE;IAE/B,MAAM,qBAAqB,aAAa,MAAM,WAAW,OAAO,QAAQ;AACxE,QAAI,oBAAoB;KAEtB,IAAIC,SAA+D,EAAE;AACrE,cAAS,mBAAmB;KAC5B,MAAM,cAAc,OAAO,QAAO,MAAK,EAAE,MAAM,CAAC,KAAI,MAAK,EAAE,KAAK;KAChE,MAAM,eAAe,OAAO,QAAO,MAAK,EAAE,UAAU,MAAM,CAAC,KAAI,MAAK,EAAE,KAAK;AAO3E,iBAAY,KAAK;MACf;MACA,aACE,mBAAmB,eACnB,KAAK,OAAO,EAAE,CAAC,aAAa,GAAG,KAAK,MAAM,EAAE;MAC9C;MACA;MACD,CAAC;;;AAKR,UAAO,SAAS,KAAK;IACnB,aAAa;IACb;IACD,CAAC;WACK,QAAQ;AAEf,UAAO,SAAS,KACd,EAAE,OAAO,6BAA6B,EACtC,EAAE,QAAQ,KAAK,CAChB;;;;;;;AC7CP,IAAM,cAAN,MAA+B;CAC7B,AAAQ,wBAAQ,IAAI,KAA4B;CAChD,AAAiB;CACjB,AAAiB;CAEjB,YAAY,UAAwB,EAAE,EAAE;AACtC,OAAK,aAAa,QAAQ,OAAO,MAAS;AAC1C,OAAK,UAAU,QAAQ,WAAW;;;;;CAMpC,AAAQ,YAAY,OAAe,YAAqB,QAAsC;EAC5F,MAAM,UAAU,GAAG,cAAc,YAAY,GAAG;AAChD,MAAI,OAKF,QAAO,GAAG,QAAQ,GAJG,OAAO,KAAK,OAAO,CACrC,MAAM,CACN,KAAI,QAAO,GAAG,IAAI,GAAG,OAAO,OAAO,CACnC,KAAK,IAAI;AAGd,SAAO;;;;;CAMT,UAAgB;EACd,MAAM,MAAM,KAAK,KAAK;AACtB,OAAK,MAAM,CAAC,KAAK,UAAU,KAAK,MAAM,SAAS,CAC7C,KAAI,MAAM,MAAM,YAAY,MAAM,IAChC,MAAK,MAAM,OAAO,IAAI;;;;;CAQ5B,MAAM,SAAwB;AAC5B,MAAI,CAAC,SAAS;AACZ,QAAK,MAAM,OAAO;AAClB;;AAGF,OAAK,MAAM,OAAO,KAAK,MAAM,MAAM,CACjC,KAAI,IAAI,SAAS,QAAQ,CACvB,MAAK,MAAM,OAAO,IAAI;;;;;CAQ5B,IAAI,OAAe,YAAqB,QAAwC;EAC9E,MAAM,MAAM,KAAK,YAAY,OAAO,cAAc,IAAI,OAAO;EAC7D,MAAM,QAAQ,KAAK,MAAM,IAAI,IAAI;AAEjC,MAAI,CAAC,MACH,QAAO;AAIT,MAAI,KAAK,KAAK,GAAG,MAAM,YAAY,MAAM,KAAK;AAC5C,QAAK,MAAM,OAAO,IAAI;AACtB,UAAO;;AAGT,SAAO,MAAM;;;;;CAMf,WAAgE;AAC9D,SAAO;GACL,SAAS,KAAK;GACd,MAAM,KAAK,MAAM;GAClB;;;;;CAMH,IAAI,OAAe,YAAqB,QAAuC;AAC7E,SAAO,KAAK,IAAI,OAAO,YAAY,OAAO,KAAK;;;;;CAMjD,IACE,OACA,MACA,YACA,QACA,KACM;EACN,MAAM,MAAM,KAAK,YAAY,OAAO,cAAc,IAAI,OAAO;AAG7D,MAAI,KAAK,MAAM,QAAQ,KAAK,SAAS;GACnC,MAAM,YAAY,KAAK,MAAM,MAAM,CAAC,MAAM,CAAC;AAC3C,OAAI,UACF,MAAK,MAAM,OAAO,UAAU;;AAIhC,OAAK,MAAM,IAAI,KAAK;GAClB;GACA,WAAW,KAAK,KAAK;GACrB,KAAK,OAAO,KAAK;GAClB,CAAC;;;AAKN,MAAa,cAAc,IAAI,YAAY;CACzC,SAAS;CACT,KAAK,MAAS;CACf,CAAC;AAGF,kBAAkB;AAChB,aAAY,SAAS;GACpB,MAAU,IAAK;;;;;;;;;;;;AC9HlB,MAAa,YAAY;AACzB,MAAa,eAAe;AAC5B,MAAa,mBAAmB;AAChC,MAAa,gBAAgB;;;;AAK7B,MAAa,wBAAwB,CAAC,SAAS,UAAU;;;;AAKzD,MAAa,4BAA4B;;;;AAKzC,MAAa,gCAAgC;;;;AAK7C,MAAa,oBAAoB;;;;;;;ACxBjC,MAAa,6CACX,SACA,gBACA,WAC2B;AAC3B,QAAO;EACL,YAAY;EACZ,aAAa,QAAQ,eAAe;EACpC,MAAM;EACN,OAAO,QAAQ;EACf,MAAM,QAAQ,MAAM,KAAK,SAAoB;GACzC,GAAG;GACH,YAAY;GACZ,aAAa,QAAQ,eAAe;GACpC,MAAM;GACN,UAAW,IAAI,YAAY,EAAE;GAC9B,EAAE,IAAI,EAAE;EACZ;;;;;AAMH,MAAa,6BACX,SACA,YAKyB;CACzB,MAAM,EAAE,MAAM,UAAU,UAAU;CAElC,MAAM,eAAe,QAAQ,SAAS,WAAW,OAAO,QAAQ,EAAE,CAAC;CACnE,MAAM,aAAa,QAAQ,QACxB,KAAK,WAAW,OAAO,OAAO,SAAS,IACxC,EACD;AAGD,cAAa,MAAM,GAAG,OAAO,EAAE,cAAc,MAAM,EAAE,cAAc,GAAG;AAkBtE,QAhB2C;EACzC,aAAa,QAAQ,KAAK,OAAO;GAC/B,YAAY,EAAE;GACd,aAAa,EAAE;GACf,OAAO,EAAE;GACT,OAAO,EAAE,SAAS;GAClB,MAAM,EAAE;GACT,EAAE;EACH,OAAO;EACP,MAAM,aAAa,MAAM,GAAG,SAAS;EACrC;EACA,gBAAgB;GAAE;GAAU;GAAO;EACnC,eAAe;EACf,gBAAgB;EACjB;;;;;;;;ACpDH,MAAa,gCACX,OACA,YAO4B;CAC5B,MAAM,EACJ,MACA,UACA,eAAe,uBACf,SACA,mBACE;AAeJ,QAbwC;EACtC,uBAAuB,aAAa,KAAK,IAAI;EAC7C,WAAW;EACX;EACA;EACA,GAAG;EACH,UAAU,aAAa,KAAK,IAAI;EAChC,mBAAmB;EACnB,uBAAuB;EACP;EACP;EACV;;;;;;;;AC/BH,MAAa,8BAA8B,OACzC,iBACA,gBACA,QACA,YASoC;AACpC,KAAI;EACF,MAAMC,eAMF;GACF,MAAM,QAAQ;GACd,UAAU,QAAQ;GACnB;AAGD,MAAI,QAAQ,aACV,cAAa,eAAe,QAAQ;WAC3B,QAAQ;GACf,IAAIC,SAA6D,EAAE;AACnE,YAAS,OAAO;GAGhB,MAAM,eAAe,OAChB,QAAO,MACJ,EAAE,UAAU,UACX,EAAE,SAAS,YAAY,EAAE,SAAS,YACtC,CACA,KAAI,MAAK,EAAE,KAAK;AACrB,OAAI,aAAa,SAAS,EACtB,cAAa,eAAe;;AAIpC,MAAI,QAAQ,QACV,cAAa,UAAU,QAAQ;AAGjC,MAAI,QAAQ,eACV,cAAa,iBAAiB,QAAQ;EAGxC,MAAM,mBAAmB,6BACvB,QAAQ,OACR,aACD;AAID,MAAI,CAAC,QAAQ,gBACX,KAAI;AAOF,SALyB,MAAM,gBAC5B,YAAY,eAAe,CAC3B,UAAU,EAEuB,QAAQ,KAAI,MAAK,EAAE,KAAK,IAAI,EAAE,EACnD,SAAS,WAAW,CAEjC,kBAAiB,YAAY;WAGxBC,aAAsB;AAUjC,SAAO,0CALS,MAAM,gBACnB,YAAY,eAAe,CAC3B,WAAW,CACX,OAAO,iBAAiB,EAIzB,gBACA,OACD;UACM,OAAO;AACd,SAAO;GACL,YAAY;GACZ,aAAa,QAAQ,eAAe;GACpC,OAAO,iBAAiB,QAAQ,MAAM,UAAU;GAChD,OAAO;GACP,MAAM,EAAE;GACR,MAAM;GACP;;;;;;ACjGL,MAAa,0CAA0C,OACrD,iBACA,oBACA,OACA,YACkC;AAClC,UAAO,KAAK,kDAAkD;EAC5D;EACA,aAAa,mBAAmB,KAAK,CAAC,UAAU,KAAK;EACtD,CAAC;CAGF,MAAM,uBAAuB,QAAQ,WACjC,QAAQ,SAAS,MAAM,IAAI,CAAC,KAAI,MAAK,EAAE,MAAM,CAAC,GAC9C;CAEJ,MAAM,iBAAiB,mBAAmB,IACxC,OAAO,CAAC,gBAAgB,YAAY;AAClC,MAAI;AA+BF,UA9Be,MAAM,4BACnB,iBACA,gBACA,QACA;IACE;IACA,MAAM,QAAQ;IACd,UAAU,QAAQ;IAClB,GAAI,uBACA,EAAE,cAAc,sBAAsB,UAC/B;AAEL,SAAI,CAAC,OAAQ,QAAO,EAAE;KACtB,IAAIC,SAA6D,EAAE;AACnE,cAAS,OAAO;KAGhB,MAAM,eAAe,OAChB,QAAO,MACJ,EAAE,UAAU,UACX,EAAE,SAAS,YAAY,EAAE,SAAS,YACtC,CACA,KAAI,MAAK,EAAE,KAAK;AACrB,YAAO,aAAa,SAAS,IAAI,EAAE,cAAc,GAAG,EAAE;QACpD;IAER,GAAI,QAAQ,WAAW,EAAE,SAAS,QAAQ,SAAS;IACnD,GAAI,QAAQ,kBAAkB,EAAE,gBAAgB,QAAQ,gBAAgB;IACzE,CACF;WAEM,OAAO;AACd,YAAO,MAAM,8BAA8B,OAAgB;IACzD,YAAY;IACZ;IACD,CAAC;AACF,SAAM;;GAGX;CAGD,MAAM,iBAAiB,0BADP,MAAM,QAAQ,IAAI,eAAe,EACS;EACxD,MAAM,QAAQ;EACd,UAAU,QAAQ;EAClB;EACD,CAAC;AAEF,aAAY,IAAI,OAAO,gBAAgB,aAAa,QAAQ;AAC5D,QAAO;;;;;;;;AC7BT,MAAa,8BACX,oBACA,oBACA,YACyB;CACzB,MAAM,EACJ,WAAW,kBACX,OAAO,cACP,GACA,OACA,WACE;CAyCJ,MAAMC,WAvCa,mBAAmB,SAAS,KAAK,QAAmC,UAA2C;AAChI,MAAI,CAAC,mBAAmB,OACtB,QAAO;EAET,MAAM,CAAC,gBAAgB,UAAU,mBAAmB;AAEpD,SAAO;GACL,YAAY;GACZ,aAAa,QAAQ,eAAe;GACpC,MAAM;GACN,OAAO,OAAO,SAAS;GACvB,OAAO,OAAO,SAAS;GACvB,MACE,OAAO,MAAM,KAAK,QAAmB;IACnC,MAAM,MAAM,IAAI,YAAY,EAAE;IAC9B,MAAM,OAAO,IAAI,aACb,OAAO,IAAI,WAAW,CAAC,UAAU,GAAG,IAAI,GAAG,QAC3C,IAAI,cACF,OAAO,IAAI,YAAY,CAAC,UAAU,GAAG,IAAI,GAAG,QAC5C,IAAI;AAEV,WAAO;KACL,GAAG;KACH,YAAY;KACZ,aAAa,QAAQ,eAAe;KACpC,MAAM;KACN,UAAU;MACR,GAAG;MACH;MAEA,GAAI,IAAI,aAAa,EAAE,YAAY,IAAI,YAAY,GAAG,EAAE;MACzD;KACD,iBAAiB,IAAI;KACrB,YAAY,IAAI;KACjB;KACD,IAAI,EAAE;GACX;GACD,IAAI,EAAE,EAEuC,QAAQ,MAAsD,MAAM,KAAK;CAGxH,MAAM,eAAe,QAAQ,SAAS,WAAW,OAAO,KAAK;CAC7D,MAAM,aAAa,QAAQ,QACxB,KAAK,WAAW,MAAM,OAAO,OAC9B,EACD;AAGD,cAAa,MAAM,GAAG,MAAM;AAG1B,UAFkB,EAAE,mBAAmB,aACrB,EAAE,mBAAmB;GAEvC;AAuBF,QArB2C;EACzC,aAAa,QAAQ,KAAK,OAAyB;GACjD,YAAY,EAAE;GACd,aAAa,EAAE;GACf,OAAO,EAAE;GACT,OAAO,EAAE,SAAS;GAClB,MAAM,EAAE;GACT,EAAE;EACH,OAAO;EACP,MAAM,aAAa,MAAM,GAAG,SAAS;EACrC;EACA,gBAAgB;GACX;GACH;GACA,OAAO,SAAS;GAChB,QAAQ,SAAS,aAAa;GAC/B;EACD,eAAe;EACf,gBAAgB;EACjB;;;;;;;;AC1HH,MAAa,2BACX,cACA,YAC4B;CAC5B,MAAM,EACJ,OACA,IAAI,WACJ,SAAS,OACT,QAAQ,eACR,OAAO,cACP,WAAW,kBACX,WACA,SACA,iBACE;CAEJ,MAAMC,eAAwC;EAC5C,GAAG;EACH,cAAc,eAAe,aAAa,KAAK,IAAI,CAAC,OAAO,EAAE;EAC7D;EACA;EACA,gBAAgB;EACjB;AAGD,KAAI,UAAU,OAAO;AACnB,eAAa,IAAI;AACjB,eAAa,WAAW,cAAc,KAAK,IAAI,IAAI,sBAAsB,KAAK,IAAI;AAClF,eAAa,eAAe,eAAe,aAAa,KAAK,IAAI,CAAC,OAAO,EAAE,UAAU,MAAM;;AAI7F,KAAI,UACF,cAAa,YAAY;AAI3B,KAAI,QACF,cAAa,UAAU;AAGzB,QAAO;;;;;;;;AC9CT,MAAa,0CACX,cACA,oBACA,YACmC;CACnC,MAAM,EACJ,OACA,GACA,QACA,OACA,MACA,UACA,WACA,YACE;AAEJ,QAAO,mBAAmB,KAAK,CAAC,gBAAgB,YAAY;EAE1D,IAAIC;AACJ,MAAI,QAAQ;GACR,IAAIC,SAA6D,EAAE;AACnE,YAAS,OAAO;GAGhB,MAAM,YAAY,OACb,QAAO,MACJ,EAAE,UAAU,UACX,EAAE,SAAS,YAAY,EAAE,SAAS,YACtC,CACA,KAAI,MAAK,EAAE,KAAK;AACrB,OAAI,UAAU,SAAS,EACnB,gBAAe;;AAqBvB,SAAO;GACL,YAAY;GACZ,GAjB6B,wBAAwB,cAAc;IACnE,GAAI,UAAU,UAAa,EAAE,OAAO;IACpC,GAAI,MAAM,UAAa,EAAE,GAAG;IAC5B,GAAI,WAAW,UAAa,EAAE,QAAQ;IACtC,GAAI,UAAU,UAAa,EAAE,OAAO;IACpC,GAAI,SAAS,UAAa,EAAE,MAAM;IAClC,GAAI,aAAa,UAAa,EAAE,UAAU;IAE1C,GAAI,YAAY,UAAa,EAAE,SAAS;IACxC,GAAI,iBAAiB,UAAa,EAClB,cACf;IACF,CAAC;GAMA,YAAY;GACb;GACD;;;;;;;;AC1DJ,MAAa,sBAAsB,OACjC,OACA,QACA,oBAC6B;AAE7B,KAAI,UAAU,MAAM,QAAQ,OAAO,IAAI,OAAO,SAAS,EACrD,QAAO;AAIT,KAAI,OAAO;EACT,MAAM,eAAe,MAAM,kBAAkB,OAAO,gBAAgB;AACpE,MAAI,CAAC,gBAAgB,aAAa,WAAW,EAC3C,QAAO;AAET,SAAO;;AAGT,QAAO;;;;;ACbT,IAAa,gBAAb,MAA2B;CACzB,YACE,AAAQC,iBACR,AAAQC,eACR;EAFQ;EACA;;CAGV,MAAM,cACJ,OACA,mBACA,SAC+B;EAE/B,MAAM,WAAW,UAAU,MAAM,GAAG,KAAK,UAAU,QAAQ,CAAC,GAAG,kBAAkB,KAAI,MAAK,EAAE,GAAG,CAAC,KAAK,IAAI;EACzG,MAAM,eAAe,YAAY,IAAI,OAAO,UAAU,QAAQ;AAC9D,MAAI,aAAc,QAAO;AAKzB,OAHmB,QAAQ,QAAQ,gBAGhB,SAChB,QAAO,KAAK,yBAAyB,OAAO,mBAAmB,QAAQ;EAI1E,MAAM,eAAe,MAAM,oBACzB,OACA,QACA,KAAK,cAAc,SAAS,UAC7B;AAED,MAAI,CAAC,aAEH,QAAO,KAAK,yBAAyB,OAAO,mBAAmB,QAAQ;AAGzE,MAAI;GAEA,MAAM,UAAU,MAAM,KAAK,oBAAoB,OAAO,cAAc,mBAAmB,QAAQ;AAC/F,eAAY,IAAI,OAAO,SAAS,UAAU,QAAQ;AAClD,UAAO;WACF,OAAO;AACZ,YAAO,MAAM,qDAAqD,MAAe;AACjF,UAAO,KAAK,yBAAyB,OAAO,mBAAmB,QAAQ;;;CAI7E,MAAc,yBACZ,OACA,mBACA,SAC+B;AAC7B,SAAO,wCACL,KAAK,iBACL,mBACA,OACA,QACD;;CAGL,MAAc,oBACZ,OACA,cACA,mBACA,SAC+B;EAC7B,MAAM,WAAW,uCACf,cACA,mBACA;GACE;GACA,GAAG,KAAK,IAAI,IAAI,UAAU;GAC1B,QAAQ;GACR,OAAO;GACP,MAAM,QAAQ;GACd,UAAU,QAAQ;GAClB,GAAI,QAAQ,YAAY,UAAa,EAAE,SAAS,QAAQ,SAAS;GAClE,CACF;AAED,MAAI,SAAS,WAAW,EACpB,QAAO;GACH,aAAa,EAAE;GACf,OAAO;GACP,MAAM,EAAE;GACR,MAAM,QAAQ;GACd,gBAAgB;IACZ,UAAU,QAAQ;IACX;IACV;GACD,eAAe;GACf,gBAAgB;GACnB;AAKL,SAAO,2BAFoB,MAAM,KAAK,gBAAgB,YAAY,QAAQ,EAAE,UAAU,CAAC,EAIrF,mBACA;GACE,UAAU,QAAQ;GAClB,MAAM,QAAQ;GACd,GAAG;GACH;GACD,CACF;;;;;;;;;AChHP,SAAS,oBAAoB,gBAAgC;AAC3D,KAAI,eAAe,SAAS,UAAU,CAAE,QAAO;AAC/C,KAAI,eAAe,SAAS,OAAO,CAAE,QAAO;AAC5C,QAAO;;;;;AAiBT,SAAgB,wBAAwB,MAAiE;AACvG,KAAI,CAAC,KAAK,KACR,QAAO,EAAE,WAAW,EAAE,EAAE;AAiB1B,QAAO,EAAE,WAdS,KAAK,KAAK,KAAK,QAAmB;EAClD,MAAM,MAAM,IAAI,YAAY,EAAE;EAC9B,MAAM,kBAAkB,IAAI,cAAc,IAAI;EAC9C,MAAM,aAAa,OAAO,oBAAoB,WAAW,kBAAkB;AAE3E,SAAO;GACL,IAAI,OAAO,IAAI,MAAM,GAAG;GACxB,OAAO,OAAO,IAAI,SAAS,aAAa;GACxC,MAAM,OAAO,IAAI,QAAQ,GAAG;GAC5B,MAAM,oBAAoB,WAAW;GACzB;GACb;GACD,EAEkB;;;;;;;;;;;;;;;;AC/BtB,MAAa,8BACX,gBACA,gBACW;AACX,QAAO,YAAY,aAAa;;;;;ACdlC,IAAa,2BAAb,MAAsC;CACpC,AAAQ;CAER,YAAY,AAAQC,eAAoC;EAApC;AAClB,OAAK,oBAAoB,KAAK,qBAAqB,cAAc;;CAGnE,AAAQ,qBACN,eACU;EACV,MAAM,oBAAoB,cAAc,SAAS,QAAQ,UAAU,UAAU,EAAE;EAC/E,MAAMC,oCAAiC,IAAI,KAAK;EAChD,MAAMC,gCAA6B,IAAI,KAAK;AAE5C,OAAK,MAAM,CAAC,gBAAgB,iBAAiB,OAAO,QAClD,cAAc,eAAe,EAAE,CAChC,CACC,KAAI,MAAM,QAAQ,aAAa,CAC7B,MAAK,MAAM,eAAe,cAAc;AACtC,OAAI,CAAC,YAAY,QAAS;GAE1B,MAAM,YAAY,2BAA2B,gBAAgB,YAAY;AACzE,iBAAc,IAAI,UAAU;AAG5B,OAAI,kBAAkB,WAAW,GAAG;AAClC,sBAAkB,IAAI,UAAU;AAChC;;AAKF,OAAI,kBAAkB,SAAS,UAAU,CACvC,mBAAkB,IAAI,UAAU;;AAMxC,SAAO,MAAM,KAAK,kBAAkB;;;;;;;CAStC,oBACE,oBACA,sBACU;AAEV,MAAI,CAAC,oBAAoB;AACvB,OAAI,wBAAwB,qBAAqB,SAAS,EAExD,QAAO,qBAAqB,QAAQ,MAClC,KAAK,kBAAkB,SAAS,EAAE,CACnC;AAGH,UAAO,KAAK;;EAGd,MAAMC,eAAyB,EAAE;EACjC,MAAM,eACJ,KAAK,cAAc,cAAc,uBAAuB,EAAE;AAE5D,MAAI,MAAM,QAAQ,aAAa,EAC7B;QAAK,MAAM,UAAU,aACnB,KAAI,OAAO,SAAS;IAClB,MAAM,YAAY,2BAChB,oBACA,OACD;AACD,QAAI,KAAK,kBAAkB,SAAS,UAAU,CAC5C,cAAa,KAAK,UAAU;;;AAMpC,SAAO;;;;;;ACjFX,IAAa,qBAAb,MAAgC;CAC9B,YAAY,AAAQC,eAAoC;EAApC;;;;;;CAMpB,mBACE,kBAC8B;EAC9B,MAAMC,gBAA8C,EAAE;AAGtD,OAAK,MAAM,CAAC,MAAM,YAAY,OAAO,QACnC,KAAK,cAAc,eAAe,EAAE,CACrC,EAAE;AACD,OAAI,CAAC,MAAM,QAAQ,QAAQ,CAAE;AAE7B,QAAK,MAAM,UAAU,SAAS;AAC5B,QAAI,CAAC,OAAO,QAAS;IAErB,MAAM,YAAY,2BAA2B,MAAM,OAAO;AAG1D,QAAI,iBAAiB,SAAS,UAAU,CACtC,eAAc,KAAK,CAAC,WAAW,OAAO,CAAC;;;AAK7C,SAAO;;;;;;;;;;;;AClBX,SAAgB,oBAAoB,QAA0B;AAC5D,QAAO,OAAO,KAAK,OAAO,UAAU,GAAG,QAAQ,EAAE,IAAI,QAAQ,CAAC,KAAK,KAAK;;;;;AAM1E,MAAM,qBAAqB,EAAE,OAAO;CAClC,QAAQ,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,UAAU;CACtC,SAAS,EAAE,OAAO,EAAE,QAAQ,EAAE,EAAE,KAAK,CAAC,CAAC,UAAU;CACjD,kBAAkB,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,UAAU;CAChD,WAAW,EAAE,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,IAAI,EAAE,CAAC,UAAU,CAAC,QAAQ,EAAE;CAC/D,MAAM,EAAE,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,UAAU,CAAC,QAAQ,EAAE;CACnD,UAAU,EAAE,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,IAAI,IAAI,CAAC,UAAU,CAAC,QAAQ,GAAG;CACjE,GAAG,EAAE,QAAQ,CAAC,IAAI,GAAG,oCAAkC;CACvD,mBAAmB,EAAE,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,IAAI,IAAI,CAAC,UAAU,CAAC,QAAQ,GAAG;CAC1E,SAAS,EAAE,QAAQ,CAAC,UAAU;CAC9B,uBAAuB,EAAE,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,UAAU,CAAC,QAAQ,EAAE;CACrE,CAAC;;;;AAOF,SAAgB,qBAAqB,QAAmC;AACtE,KAAI;AAEF,SAAO;GACL,MAFsB,mBAAmB,MAAM,OAAO;GAGtD,SAAS;GACV;UACM,OAAO;AACd,MAAI,iBAAiB,EAAE,SAMrB,QAAO;GACL,QANa,MAAM,OAAO,KAAK,QAAQ;AAEvC,WAAO,GADM,IAAI,KAAK,SAAS,IAAI,GAAG,IAAI,KAAK,KAAK,IAAI,CAAC,MAAM,KAC9C,IAAI;KACrB;GAIA,SAAS;GACV;AAGH,SAAO;GACL,QAAQ,CAAC,mCAAmC;GAC5C,SAAS;GACV;;;;;;;;;AC3DL,MAAa,yBACX,YAC0D;CAC1D,IAAIC;CACJ,IAAIC;AAEJ,KAAI,QAAQ,OAAO,OAAO,QAAQ,QAAQ,UAAU;EAElD,MAAM,YADM,IAAI,IAAI,QAAQ,IAAI,CACV,SAAS,MAAM,IAAI;EACzC,MAAM,cAAc,UAAU,QAAQ,SAAS;AAC/C,MAAI,gBAAgB,MAAM,UAAU,cAAc,IAAI;AACpD,oBAAiB,UAAU,cAAc,MAAM;AAC/C,uBAAoB,OAAO,eAAe;SACrC;AACL,oBAAiB;AACjB,uBAAoB;;QAEjB;EAGL,MAAM,sBADS,QAAQ,aACa;AACpC,mBAAiB,OAAO,uBAAuB,GAAG;AAClD,sBAAoB;;AAGtB,QAAO;EAAE;EAAgB;EAAmB;;;;;;;;AC3B9C,MAAa,uBACX,UAYG;CACH,MAAM,IAAI,OAAO,OAAO,KAAK,GAAG;CAChC,MAAM,YAAY,OAAO;CACzB,MAAM,eAAe,OAAO;CAC5B,MAAM,OAAO,YAAY,SAAS,OAAO,UAAU,EAAE,GAAG,GAAG;CAC3D,MAAM,WAAW,eAAe,SAAS,OAAO,aAAa,EAAE,GAAG,GAAG;CACrE,MAAM,UAAU,OAAO;CACvB,MAAM,OAAO,OAAO;CAGpB,MAAM,kBAAkB,OAAO;CAC/B,MAAMC,cAAoC,kBACtC,MAAM,QAAQ,gBAAgB,GAC5B,gBAAgB,KAAI,MAAK,OAAO,EAAE,CAAC,GACnC,CAAC,OAAO,gBAAgB,CAAC,GAC3B;CAEJ,MAAM,iBAAiB,OAAO;CAC9B,MAAM,WAAW,OAAO;CACxB,MAAM,cAAc,OAAO;CAC3B,MAAM,SAAS,gBAAgB,UAAU,gBAAgB,QAAQ,gBAAgB;CAEjF,MAAMC,SAAmB,EAAE;AAG3B,KAAI,MAAM,KAAK,IAAI,OAAO,EACxB,QAAO,KAAK,yBAAyB;AAEvC,KAAI,MAAM,SAAS,IAAI,WAAW,KAAK,WAAW,IAChD,QAAO,KAAK,6BAA6B;CAG3C,MAAMC,SAWF;EACF;EACA;EACA;EACD;AAED,KAAI,QACF,QAAO,UAAU;AAGnB,KAAI,KACF,QAAO,OAAO;AAGhB,KAAI,eAAe,YAAY,SAAS,EACtC,QAAO,cAAc;AAGvB,KAAI,eACF,QAAO,iBAAiB;AAG1B,KAAI,SACF,QAAO,WAAW;AAGpB,KAAI,OACF,QAAO,SAAS;AAGlB,KAAI,OAAO,SAAS,EAClB,QAAO,SAAS;AAGlB,QAAO;;;;;;;;ACtET,SAAgB,sBAAsB,SAA2C;CAC/E,MAAM,EAAE,UAAU;CAClB,MAAM,EAAE,gBAAgB,sBAAsB,sBAAsB,QAAQ;CAC5E,MAAM,eAAe,oBAAoB,MAAiC;AAG1E,KAAI,aAAa,UAAU,aAAa,OAAO,SAAS,EACtD,QAAO;EACL,SAAS;EACT,OAAO,SAAS,KAAK,EAAE,OAAO,aAAa,OAAO,IAAI,EAAE,EAAE,QAAQ,KAAK,CAAC;EACzE;CAIH,MAAM,aAAa,qBAAqB;EACtC,MAAM,aAAa;EACnB,UAAU,aAAa;EACvB,GAAG,aAAa;EAChB,SAAS,aAAa;EACvB,CAAC;AAEF,KAAI,CAAC,WAAW,QACd,QAAO;EACL,SAAS;EACT,OAAO,SAAS,KACd;GACE,SAAS,oBAAoB,WAAW,UAAU,EAAE,CAAC;GACrD,OAAO;GACR,EACD,EAAE,QAAQ,KAAK,CAChB;EACF;AAGH,QAAO;EAAE,SAAS;EAAM;EAAgB;EAAmB;EAAc;;;;;;;;AC3C3E,MAAa,uBACX,iBACA,kBACmB;CACnB,MAAM,gBAAgB,IAAI,cAAc,iBAAiB,cAAc;CACvE,MAAM,iBAAiB,IAAI,yBAAyB,cAAc;CAClE,MAAM,eAAe,IAAI,mBAAmB,cAAc;AAE1D,QAAO,OAAO,YAA4B;AACxC,MAAI;GAEF,MAAM,YAAY,sBAAsB,QAAQ;AAChD,OAAI,CAAC,UAAU,QAAS,QAAO,UAAU;GAEzC,MAAM,EAAE,gBAAgB,iBAAiB;GAGzC,MAAM,oBAAoB,eAAe,oBACvC,gBACA,aAAa,YACd;AAGD,OAAI,kBAAkB,WAAW,GAAG;AAIlC,QAHsB,CAAC,kBACqB,aAAa,eAAe,aAAa,YAAY,SAAS,EAGrG,QAAO,SAAS,KAAK,EAAE,OAAO,iDAAiD,EAAE,EAAE,QAAQ,KAAK,CAAC;AAEtG,WAAO,SAAS,KAAK,EAAE,OAAO,yCAAyC,EAAE,EAAE,QAAQ,KAAK,CAAC;;AAG3F,OAAI,CAAC,aAAa,KAAK,aAAa,EAAE,MAAM,KAAK,GAC9C,QAAO,SAAS,KAAK,EAAE,OAAO,qCAAmC,EAAE,EAAE,QAAQ,KAAK,CAAC;GAItF,MAAM,gBAAgB,aAAa,mBAAmB,kBAAkB;GAGxE,MAAM,eAAe,MAAM,cAAc,cACvC,aAAa,GACb,eACA;IACI,SAAS,EAAE;IACX,MAAM,aAAa;IACnB,UAAU,aAAa;IACvB,SAAS,aAAa;IACtB,MAAM,aAAa;IACnB,gBAAgB,aAAa;IAC7B,UAAU,aAAa;IAC1B,CACF;AAGD,OAAI,aAAa,OACf,QAAO,SAAS,KAAK,wBAAwB,aAAa,CAAC;AAG7D,UAAO,SAAS,KAAK,aAAa;WAE3B,OAAO;AACd,UAAO,SAAS,KACd;IACE,SAAS,iBAAiB,QAAQ,MAAM,UAAU;IAClD,OAAO;IACR,EACD,EAAE,QAAQ,KAAK,CAChB;;;;;;;AC1EP,MAAa,yBACX,iBACA,kBACG;AACH,QAAO;EACL;GACE,SAAS,yBAAyB,cAAc;GAChD,QAAQ;GACR,MAAM;GACP;EACD;GACE,SAAS,oBAAoB,iBAAiB,cAAc;GAC5D,QAAQ;GACR,MAAM;GACP;EACD;GACE,SAAS,oBAAoB,iBAAiB,cAAc;GAC5D,QAAQ;GACR,MAAM;GACP;EACF;;;;;;;;;;;;ACjBH,MAAaC,iCAA+B;;;;AAS5C,MAAa,8BAA8B;;;;AAK3C,MAAa,uBAAuB;;;;AASpC,MAAa,uBAAuB,MAAS;;;;AAS7C,MAAa,yBAAyB;;;;AAKtC,MAAa,4BAA4B;;;;AAKzC,MAAa,0BAA0B;;;;AAKvC,MAAa,wBAAwB;;;;;;;ACvCrC,MAAM,sBAAsB;CAC1B;EAAE,MAAM;EAAM,MAAM;EAAmB;CACvC;EAAE,MAAM;EAAQ,MAAM;EAAmB;CACzC;EAAE,MAAM;EAAa,MAAM;EAAkB;CAC7C;EAAE,MAAM;EAAa,MAAM;EAAkB;CAC9C;;;;;;AAOD,MAAM,qBACJ,WAAoB,MACpB,aAAqBC,oCACjB;CACJ,MAAM;CACN,MAAM;CACN,SAAS;CACT,GAAI,YAAY,EAAE,UAAU,MAAM;CACnC;;;;AAKD,MAAM,4BAA4B,WAA4D;AAC1F,QAAO,OAAO,KAAI,WAAU;EACxB,MAAM,MAAM;EACZ,MAAM,MAAM,SAAS,SAAS,WAAW,MAAM;EAC/C,OAAO,MAAM;EACb,OAAO,MAAM;EACb,UAAU,MAAM;EACnB,EAAE;;;;;AAMP,MAAM,uBAAuB;CAC3B;EAAE,MAAM;EAAiB,MAAM;EAAmB,OAAO;EAAM;CAC/D;EAAE,MAAM;EAAe,MAAM;EAAkB;CAC/C;EAAE,MAAM;EAAc,MAAM;EAAmB;CAC/C;EAAE,MAAM;EAAY,MAAM;EAAiB;CAC3C;EAAE,MAAM;EAAW,MAAM;EAAqB,OAAO;EAAM,UAAU;EAAM;CAC5E;;;;AAKD,MAAa,4BACX,gBACA,aACA,sBAA8BA,mCAC3B;CACH,MAAM,SAAS,YAAY,SAAS,yBAAyB,YAAY,OAAO,GAAG,EAAE;CAGrF,MAAM,iBAAiB,IAAI,IAAI,CAC7B,GAAG,OAAO,KAAI,MAAK,EAAE,KAAK,EAC1B,GAAG,gBAAgB,CAAC,KAAI,MAAK,EAAE,KAAK,CACrC,CAAC;AAKF,QAAO;EACL,MAAM;EACN,QAAQ;GACN,GALe,eAAe,CAAC,QAAO,MAAK,CAAC,eAAe,IAAI,EAAE,KAAK,CAAC;GAMvE,GAAG,gBAAgB;GACnB,GAAG;GACH,kBAAkB,OAAO,oBAAoB;GAC9C;EACF;;;;;AAMH,MAAa,mCACX,gBACA,aACA,sBAA8BA,mCAC3B;CACH,MAAM,eAAe,yBAAyB,YAAY,OAAO;CAGjE,MAAM,iBAAiB,IAAI,IAAI,aAAa,KAAI,MAAK,EAAE,KAAK,CAAC;AAK7D,QAAO;EACL,MAAM;EACN,QAAQ;GACN,GALe,eAAe,CAAC,QAAO,MAAK,CAAC,eAAe,IAAI,EAAE,KAAK,CAAC;GAMvE,GAAG;GAEH,kBAAkB,MAAM,oBAAoB;GAC7C;EACF;;;;;AC3GH,IAAa,gBAAb,MAA2B;CACzB,YACE,AAAQC,QACR,AAAQC,QACR;EAFQ;EACA;;;;;CAMV,MAAM,kBAAiC;AACrC,MAAI,CAAC,KAAK,OAAO,YAAa;AAE9B,WAAO,KAAK,qCAAqC;EAEjD,MAAM,sBAAsB,KAAK,wBAAwB;AAEzD,OAAK,MAAM,CAAC,gBAAgB,iBAAiB,OAAO,QAAQ,KAAK,OAAO,YAAY,EAAE;AACpF,OAAI,CAAC,aAAc;AAEnB,QAAK,MAAM,eAAe,cAAsD;AAC9E,QAAI,CAAC,YAAY,QAAS;AAE1B,UAAM,KAAK,UAAU,gBAAgB,aAAa,oBAAoB;;;AAI1E,WAAO,KAAK,oCAAoC;;;;;CAMlD,MAAc,UACZ,gBACA,aACA,qBACe;EACf,MAAM,YAAY,2BAA2B,gBAAgB,YAAY;EAGzE,IAAIC;AAEJ,MAAI,YAAY,WAAW,SACvB,gBAAe,yBAAyB,WAAW,aAAa,oBAAoB;MAEpF,gBAAe,gCAAgC,WAAW,aAAa,oBAAoB;AAG/F,MAAI;GAEF,MAAM,aAAa,MAAM,KAAK,OAAO,YAAY,UAAU,CAAC,UAAU;AAItE,SAAM,KAAK,uBAAuB,WAAW,YAAY,aAAa;WAE/DC,OAAgB;AAEvB,OADwB,OACJ,eAAe,KAAK;AAEtC,aAAO,KAAK,wBAAwB,YAAY;AAChD,UAAM,KAAK,OAAO,aAAa,CAAC,OAAO,aAAa;UAC/C;AACL,aAAO,MAAM,6BAA6B,aAAa,MAAe;AACtE,UAAM;;;;CAKZ,MAAc,uBACZ,WACA,eACA,cACe;AACf,MAAI,CAAC,iBAAiB,CAAC,cAAc,OAAQ;EAE7C,MAAM,gBAAgB,IAAI,IAAI,cAAc,OAAO,KAAK,MAAW,EAAE,KAAK,CAAC;EAE3E,MAAM,YAAY,aAAa,QAAQ,QAAO,MAAK,CAAC,cAAc,IAAI,EAAE,KAAK,IAAI,EAAE,SAAS,KAAK,IAAI,EAAE;AAEvG,MAAI,UAAU,SAAS,GAAG;AACxB,YAAO,KAAK,uBAAuB,UAAU,QAAQ,UAAU,OAAO,cAAc,EAClF,QAAQ,UAAU,KAAI,MAAK,EAAE,KAAK,EACnC,CAAC;AAEF,OAAI;AAEF,UAAM,KAAK,OAAO,YAAY,UAAU,CAAC,OAAO,EAC9C,QAAQ,WACT,CAAC;YACK,OAAO;AACd,aAAO,MAAM,+BAA+B,aAAa,MAAe;;;;CAK9E,AAAQ,yBAAiC;AAGvC,MAFwB,KAAK,OAAO,SAAS,WAExB,YAAY;AAEjC,SAAOC;;;;;;ACpGX,IAAa,eAAb,MAA0B;CACxB,YACE,AAAQC,QACR,AAAQC,QACR;EAFQ;EACA;;;;;CAMV,MAAM,aAA4B;EAEhC,MAAM,SAAS,KAAK,OAAO,UAAU,EAAE;AAEvC,MAAI,OAAO,WAAW,EAAG;AAEzB,WAAO,KAAK,+BAA+B,OAAO,OAAO,gBAAgB;EAGzE,MAAM,qBAAqB,IAAI,IAAI,OAAO,KAAI,MAAK,EAAE,qBAAqB,uBAAuB,CAAC;AAClG,OAAK,MAAM,kBAAkB,mBAC3B,OAAM,6BAA6B,KAAK,QAAQ,eAAe;AAIjE,OAAK,MAAM,SAAS,OAClB,OAAM,KAAK,eAAe,MAAM;AAGlC,WAAO,KAAK,mCAAmC;;CAGjD,MAAc,eAAe,OAAsC;AACjE,MAAI;GAEF,MAAM,cAAc;IAClB,IAAI,MAAM;IACV,YAAY,MAAM;IAClB,eAAe,MAAM;IACrB,SAAS,MAAM;IACf,oBAAoB,MAAM,qBAAqB;IAC/C,WAAW,MAAM,mBAAmB;IACpC,KAAK,MAAM,OAAO;IAClB,WAAW,MAAM,YAAY;IAC9B;AAGD,UAAO,MAAM,KAAK,wBAAwB,YAAY;WAE/C,OAAO;AACd,YAAO,MAAM,wBAAwB,MAAM,QAAQ,MAAe;AAClE,UAAO;;;CAIX,MAAc,wBAAwB,aAAoC;EAExE,MAAM,gBAAgB,KAAK,OAAO;AAElC,MAAI,CAAC,iBAAiB,CAAC,cAAc,SAAS,cAAc,MAAM,WAAW,GAAG;AAC9E,YAAO,MAAM,yCAAyC;AACtD,UAAO;;EAGT,MAAM,OAAO,cAAc,MAAM;EACjC,MAAM,kBAAkB,cAAc;EACtC,MAAM,UAAU,GAAG,KAAK,SAAS,KAAK,KAAK,KAAK,GAAG,KAAK;AAExD,MAAI;GAEF,MAAM,iBAAiB,MAAM,MAAM,GAAG,QAAQ,wBAAwB;IACpE,QAAQ;IACR,SAAS;KACP,gBAAgB;KAChB,uBAAuB,mBAAmB;KAC3C;IACD,MAAM,KAAK,UAAU,YAAY;IAClC,CAAC;AAEF,OAAI,eAAe,IAAI;AACrB,aAAO,KAAK,wBAAwB,YAAY,KAAK;AACrD,WAAO;;AAGT,OAAI,eAAe,WAAW,KAAK;AAEjC,aAAO,MAAM,eAAe,YAAY,GAAG,sBAAsB;IACjE,MAAM,iBAAiB,MAAM,MAAM,GAAG,QAAQ,wBAAwB,YAAY,MAAM;KACtF,QAAQ;KACR,SAAS;MACP,gBAAgB;MAChB,uBAAuB,mBAAmB;MAC3C;KACD,MAAM,KAAK,UAAU,YAAY;KAClC,CAAC;AAEF,QAAI,eAAe,IAAI;AACpB,cAAO,KAAK,wBAAwB,YAAY,KAAK;AACrD,YAAO;WACH;KACH,MAAMC,QAAM,MAAM,eAAe,MAAM;AACvC,cAAO,MAAM,0BAA0B,YAAY,GAAG,IAAIA,QAAM;AAChE,YAAO;;;GAIb,MAAM,MAAM,MAAM,eAAe,MAAM;AACvC,YAAO,MAAM,0BAA0B,YAAY,GAAG,IAAI,MAAM;AAChE,UAAO;WAEA,cAAc;AACrB,YAAO,MAAM,qCAAqC,aAAsB;AACxE,UAAO;;;;;;;;;;;;;;;;;;;AC/Db,SAAgB,yBAAyB,QAAkC;CACzE,MAAMC,WAAS,IAAI,OAAO;EAAE,SAAS;EAAM,QAAQ;EAAuB,CAAC;AAE3E,SAAQ,kBAAkC;EAExC,MAAM,kBAAkB,sBAAsB,OAAO,UAAU;AAG/D,MAAI,OAAO,QAAQ,SAAS;GAC1B,MAAM,kBAAkB,sBAAsB,iBAAiB;IAC7D,WAAW,OAAO;IAClB,UAAU;KACR,WAAW,OAAO;KAClB,QAAQ,OAAO;KAChB;IACD,aAAa,OAAO,eAAe,EAAE;IACtC,CAAC;AAEF,iBAAc,YAAY,CACxB,GAAI,cAAc,aAAa,EAAE,EACjC,GAAG,gBACJ;AAED,YAAO,MAAM,+BAA+B,EAC1C,gBAAgB,gBAAgB,QACjC,CAAC;;AAIJ,MAAI,OAAO,UAAU,OAAO,OAAO,SAAS,KAAK,OAAO,WAAW;GACjE,MAAM,eAAe,yBAAyB;IAC5C,WAAW,OAAO;IAClB,iBAAiB,OAAO;IACxB,QAAQ,OAAO;IACf,WAAW,OAAO;IAClB,QAAQ,OAAO;IACf,MAAM,OAAO;IACb,UAAU,OAAO;IAClB,CAAC;AAEF,iBAAc,YAAY,CACxB,GAAI,cAAc,aAAa,EAAE,EACjC,GAAG,aACJ;AAED,YAAO,MAAM,4BAA4B;IACvC,gBAAgB,aAAa;IAC7B,aAAa,OAAO,OAAO;IAC5B,CAAC;;EAIJ,MAAM,iBAAiB,cAAc;AACrC,gBAAc,SAAS,OAAO,YAAY;AACxC,OAAI,eACF,OAAM,eAAe,QAAQ;AAG/B,OAAI;AAEF,QAAI,OAAO,eAAe,OAAO,KAAK,OAAO,YAAY,CAAC,SAAS,GAAG;AACpE,cAAO,KAAK,0CAA0C;AAQtD,WAPsB,IAAI,cAAc,iBAAiB;MACvD,WAAW,OAAO;MAClB,UAAU,EACR,WAAW,OAAO,iBACnB;MACD,aAAa,OAAO;MACrB,CAAC,CACkB,iBAAiB;;AAIvC,QAAI,OAAO,UAAU,OAAO,OAAO,SAAS,GAAG;AAC7C,cAAO,KAAK,6BAA6B;AAIzC,WAHqB,IAAI,aAAa,iBAAiB,EACrD,QAAQ,OAAO,QAChB,CAAC,CACiB,YAAY;;YAE1B,OAAO;AAEd,aAAO,MAAM,0CAA0C,MAAe;;;AAI1E,SAAO;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACvGX,IAAa,mBAAb,MAA0G;CACxG,AAAS,OAAO;CAEhB,YAAY,AAAQC,QAAgB;EAAhB;;;;;CAKpB,MAAM,iBAAmC;AACvC,MAAI;AACF,SAAM,KAAK,OAAO,OAAO,UAAU;AACnC,UAAO;WACA,OAAO;AACd,YAAO,MAAM,oCAAoC,MAAM;AACvD,UAAO;;;;;;CAOX,MAAM,iBAAiB,QAAkD;EACvE,MAAM,kBAAkB,KAAK,yBAAyB,OAAO;AAE7D,MAAI;GAEF,MAAM,WAAW,MAAM,KAAK,OAAO,YAAY,OAAO,KAAK,CAAC,UAAU;AAGtE,SAAM,KAAK,yBAAyB,OAAO,MAAM,UAAU,gBAAgB;WACpEC,OAAgB;AAEvB,OADuB,OACH,eAAe,KAAK;AAEtC,aAAO,KAAK,wBAAwB,OAAO,OAAO;AAClD,UAAM,KAAK,OAAO,aAAa,CAAC,OAAO,gBAAgB;SAEvD,OAAM;;;;;;CAQZ,MAAM,iBAAiB,gBAA0C;AAC/D,MAAI;AACF,SAAM,KAAK,OAAO,YAAY,eAAe,CAAC,UAAU;AACxD,UAAO;WACAA,OAAgB;AAEvB,OADuB,OACH,eAAe,IACjC,QAAO;AAET,SAAM;;;;;;CAOV,MAAM,iBAAiB,gBAAuC;AAC5D,MAAI;AACF,SAAM,KAAK,OAAO,YAAY,eAAe,CAAC,QAAQ;AACtD,YAAO,KAAK,uBAAuB,iBAAiB;WAC7CA,OAAgB;AAEvB,OADuB,OACH,eAAe,IACjC,OAAM;;;;;;CAQZ,MAAM,eAAe,gBAAwB,UAAwC;AACnF,MAAI;AACF,SAAM,KAAK,OAAO,YAAY,eAAe,CAAC,WAAW,CAAC,OAAO,SAAS;WACnE,OAAO;AACd,YAAO,MAAM,6BAA6B,SAAS,GAAG,MAAM,kBAAkB,MAAM;AACpF,SAAM;;;;;;CAOV,MAAM,gBAAgB,gBAAwB,WAA2C;AACvF,MAAI,UAAU,WAAW,EAAG;AAE5B,MAAI;AACF,SAAM,KAAK,OAAO,YAAY,eAAe,CAAC,WAAW,CAAC,OAAO,WAAW,EAC1E,QAAQ,UACT,CAAC;WACK,OAAO;AACd,YAAO,MAAM,0BAA0B,UAAU,OAAO,gBAAgB,kBAAkB,MAAM;AAChG,SAAM;;;;;;CAOV,MAAM,eAAe,gBAAwB,YAAmC;AAC9E,MAAI;AACF,SAAM,KAAK,OAAO,YAAY,eAAe,CAAC,UAAU,WAAW,CAAC,QAAQ;WACrEA,OAAgB;AAGvB,OAFuB,OAEH,eAAe,KAAK;AACtC,aAAO,MAAM,6BAA6B,WAAW,QAAQ,kBAAkB,MAAM;AACrF,UAAM;;;;;;;;CASZ,MAAM,wBACJ,gBACA,QACiB;EACjB,MAAM,YAAY,KAAK,kBAAkB,OAAO;AAEhD,MAAI;AAIF,WAHe,MAAM,KAAK,OAAO,YAAY,eAAe,CAAC,WAAW,CAAC,OAAO,EAC9E,WAAW,WACZ,CAAC,EACY,eAAe;WACtB,OAAO;AACd,YAAO,MAAM,6CAA6C,kBAAkB,OAAO,EAAE,QAAQ,CAAC;AAC9F,SAAM;;;;;;;CAQV,MAAM,aACJ,gBACA,QACA,UAA+B,EAAE,EACK;EACtC,MAAM,EAAE,QAAQ,IAAI,QAAQ,eAAe,kBAAkB;AAE7D,MAAI;GACF,MAAMC,eAAwC;IAC5C,GAAG;IACH,cAAc,eAAe,OAAO,KAAK,IAAI,CAAC,OAAO,MAAM;IAC5D;AAED,OAAI,OACF,cAAa,eAAe,KAAK,kBAAkB,OAAO;AAG5D,OAAI,cACF,cAAa,oBAAoB,cAAc,KAAK,IAAI;AAG1D,OAAI,cACF,cAAa,oBAAoB,cAAc,KAAK,IAAI;AAQ1D,YALe,MAAM,KAAK,OACvB,YAAY,eAAe,CAC3B,WAAW,CACX,OAAO,aAAa,EAER,QAAQ,EAAE,EAAE,KAAI,SAAQ;IACrC,IAAI,OAAQ,IAAI,UAAsC,MAAM,GAAG;IAC/D,OAAO,IAAI,mBAAmB;IAC9B,UAAU,IAAI;IACf,EAAE;WACI,OAAO;AACd,YAAO,MAAM,2BAA2B,kBAAkB,MAAM;AAChE,SAAM;;;;;;CASV,AAAQ,yBAAyB,QAA2D;AAC1F,SAAO;GACL,MAAM,OAAO;GACb,QAAQ,OAAO,OAAO,KAAI,UAAS,KAAK,aAAa,MAAM,CAAC;GAC5D,uBAAuB,OAAO;GAC/B;;;;;CAMH,AAAQ,aAAa,OAAoD;EACvE,MAAMC,iBAAwC;GAC5C,MAAM,MAAM;GACZ,MAAM,MAAM;GACZ,OAAO,MAAM;GACb,OAAO,MAAM;GACb,UAAU,MAAM;GACjB;AAGD,MAAI,MAAM,SAAS,aAAa,MAAM,iBACpC,gBAAe,UAAU,MAAM;AAGjC,SAAO;;;;;CAMT,MAAc,yBACZ,gBACA,eACA,cACe;AACf,MAAI,CAAC,eAAe,OAAQ;EAE5B,MAAM,gBAAgB,IAAI,IAAI,cAAc,OAAO,KAAI,MAAK,EAAE,KAAK,CAAC;EACpE,MAAM,YAAY,aAAa,QAAQ,QACrC,MAAK,CAAC,cAAc,IAAI,EAAE,KAAK,IAAI,EAAE,SAAS,KAC/C,IAAI,EAAE;AAEP,MAAI,UAAU,SAAS,GAAG;AACxB,YAAO,KAAK,uBAAuB,eAAe,QAAQ,UAAU,OAAO,cAAc,EACvF,QAAQ,UAAU,KAAI,MAAK,EAAE,KAAK,EACnC,CAAC;AAEF,OAAI;AACF,UAAM,KAAK,OAAO,YAAY,eAAe,CAAC,OAAO,EACnD,QAAQ,WACT,CAAC;YACK,OAAO;AACd,aAAO,MAAM,+BAA+B,kBAAkB,MAAM;;;;;;;CAQ1E,AAAQ,kBAAkB,QAAyC;EACjE,MAAMC,QAAkB,EAAE;AAE1B,OAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,OAAO,CAC/C,KAAI,MAAM,QAAQ,MAAM,CAEtB,OAAM,KAAK,GAAG,IAAI,IAAI,MAAM,KAAI,MAAK,OAAO,EAAE,CAAC,CAAC,KAAK,IAAI,CAAC,GAAG;WACpD,OAAO,UAAU,SAC1B,OAAM,KAAK,GAAG,IAAI,IAAI,QAAQ;WACrB,OAAO,UAAU,SAC1B,OAAM,KAAK,GAAG,IAAI,GAAG,QAAQ;WACpB,OAAO,UAAU,UAC1B,OAAM,KAAK,GAAG,IAAI,GAAG,QAAQ;AAIjC,SAAO,MAAM,KAAK,OAAO;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC1R7B,SAAgB,uBAAuB,QAAqD;AAS1F,QAAO,IAAI,iBARI,IAAI,OAAO;EACxB,QAAQ,OAAO;EACf,OAAO,OAAO;EACd,0BAA0B,OAAO,4BAA4B;EAC7D,sBAAsB,OAAO;EAC7B,YAAY,OAAO;EACpB,CAAC,CAEiC;;;;;;;;;AAUrC,SAAgB,iCAAiC,QAAkC;AACjF,QAAO,IAAI,iBAAiB,OAAO;;;;;;;;;ACvCrC,MAAa,8BAA8B,OACzC,iBACA,gBACA,OACA,gBACG;AACH,KAAI;EAEF,MAAM,YAAY,2BAA2B,gBAAgB,YAAY;AAEzE,WAAO,MAAM,gDAAgD;GAC3D,YAAY;GACZ,YAAY;GACZ;GACD,CAAC;AAGF,MAAI;AACF,SAAM,gBAAgB,YAAY,UAAU,CAAC,UAAU,MAAM,CAAC,QAAQ;AACtE,YAAO,KAAK,mCAAmC;IAC7C,YAAY;IACZ;IACD,CAAC;WACKC,gBAAyB;AAIhC,OAHuB,eAGJ,eAAe,KAAK;AACrC,aAAO,MAAM,mDAAmD;KAC9D,YAAY;KACZ;KACD,CAAC;AAEF,QAAI;AACF,WAAM,gBACH,YAAY,UAAU,CACtB,WAAW,CACX,OAAO,EACN,WAAW,iBAAiB,SAC7B,CAAC;AACJ,cAAO,KAAK,mCAAmC;MAC7C,YAAY;MACZ;MACD,CAAC;aACKC,kBAA2B;AAIlC,SAHmB,iBAGJ,eAAe,IAC5B,UAAO,MAAM,wCAAwC,kBAA2B;MAC9E,YAAY;MACZ;MACD,CAAC;SAEF,UAAO,MAAM,6BAA6B,EAAE,YAAY,OAAO,CAAC;;SAIpE,OAAM;;UAGHC,OAAgB;EAEvB,MAAM,YAAY,2BAA2B,gBAAgB,YAAY;AAEzE,WAAO,MAAM,4CAA4C,OAAgB;GACvE,YAAY;GACZ,YAAY;GACZ;GACD,CAAC"}
|