@n8n/n8n-nodes-langchain 1.86.1 → 1.87.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/credentials/AnthropicApi.credentials.js +1 -0
- package/dist/credentials/AnthropicApi.credentials.js.map +1 -0
- package/dist/credentials/AzureOpenAiApi.credentials.js +1 -0
- package/dist/credentials/AzureOpenAiApi.credentials.js.map +1 -0
- package/dist/credentials/CohereApi.credentials.js +1 -0
- package/dist/credentials/CohereApi.credentials.js.map +1 -0
- package/dist/credentials/DeepSeekApi.credentials.js +1 -0
- package/dist/credentials/DeepSeekApi.credentials.js.map +1 -0
- package/dist/credentials/GooglePalmApi.credentials.js +1 -0
- package/dist/credentials/GooglePalmApi.credentials.js.map +1 -0
- package/dist/credentials/GroqApi.credentials.js +1 -0
- package/dist/credentials/GroqApi.credentials.js.map +1 -0
- package/dist/credentials/HuggingFaceApi.credentials.js +1 -0
- package/dist/credentials/HuggingFaceApi.credentials.js.map +1 -0
- package/dist/credentials/MistralCloudApi.credentials.js +1 -0
- package/dist/credentials/MistralCloudApi.credentials.js.map +1 -0
- package/dist/credentials/MotorheadApi.credentials.js +1 -0
- package/dist/credentials/MotorheadApi.credentials.js.map +1 -0
- package/dist/credentials/OllamaApi.credentials.js +1 -0
- package/dist/credentials/OllamaApi.credentials.js.map +1 -0
- package/dist/credentials/OpenRouterApi.credentials.js +1 -0
- package/dist/credentials/OpenRouterApi.credentials.js.map +1 -0
- package/dist/credentials/PineconeApi.credentials.js +1 -0
- package/dist/credentials/PineconeApi.credentials.js.map +1 -0
- package/dist/credentials/QdrantApi.credentials.js +1 -0
- package/dist/credentials/QdrantApi.credentials.js.map +1 -0
- package/dist/credentials/SerpApi.credentials.js +1 -0
- package/dist/credentials/SerpApi.credentials.js.map +1 -0
- package/dist/credentials/WolframAlphaApi.credentials.js +1 -0
- package/dist/credentials/WolframAlphaApi.credentials.js.map +1 -0
- package/dist/credentials/XAiApi.credentials.js +1 -0
- package/dist/credentials/XAiApi.credentials.js.map +1 -0
- package/dist/credentials/XataApi.credentials.js +1 -0
- package/dist/credentials/XataApi.credentials.js.map +1 -0
- package/dist/credentials/ZepApi.credentials.js +1 -0
- package/dist/credentials/ZepApi.credentials.js.map +1 -0
- package/dist/known/nodes.json +12 -0
- package/dist/nodes/agents/Agent/Agent.node.js +1 -0
- package/dist/nodes/agents/Agent/Agent.node.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/ConversationalAgent/description.js +1 -0
- package/dist/nodes/agents/Agent/agents/ConversationalAgent/description.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/ConversationalAgent/execute.js +1 -0
- package/dist/nodes/agents/Agent/agents/ConversationalAgent/execute.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/ConversationalAgent/prompt.js +1 -0
- package/dist/nodes/agents/Agent/agents/ConversationalAgent/prompt.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/OpenAiFunctionsAgent/description.js +1 -0
- package/dist/nodes/agents/Agent/agents/OpenAiFunctionsAgent/description.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/OpenAiFunctionsAgent/execute.js +1 -0
- package/dist/nodes/agents/Agent/agents/OpenAiFunctionsAgent/execute.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/OpenAiFunctionsAgent/prompt.js +1 -0
- package/dist/nodes/agents/Agent/agents/OpenAiFunctionsAgent/prompt.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/PlanAndExecuteAgent/description.js +1 -0
- package/dist/nodes/agents/Agent/agents/PlanAndExecuteAgent/description.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/PlanAndExecuteAgent/execute.js +1 -0
- package/dist/nodes/agents/Agent/agents/PlanAndExecuteAgent/execute.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/PlanAndExecuteAgent/prompt.js +1 -0
- package/dist/nodes/agents/Agent/agents/PlanAndExecuteAgent/prompt.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/ReActAgent/description.js +1 -0
- package/dist/nodes/agents/Agent/agents/ReActAgent/description.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/ReActAgent/execute.js +1 -0
- package/dist/nodes/agents/Agent/agents/ReActAgent/execute.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/ReActAgent/prompt.js +1 -0
- package/dist/nodes/agents/Agent/agents/ReActAgent/prompt.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/SqlAgent/description.js +1 -0
- package/dist/nodes/agents/Agent/agents/SqlAgent/description.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/SqlAgent/execute.js +1 -0
- package/dist/nodes/agents/Agent/agents/SqlAgent/execute.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/SqlAgent/other/handlers/mysql.js +1 -0
- package/dist/nodes/agents/Agent/agents/SqlAgent/other/handlers/mysql.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/SqlAgent/other/handlers/postgres.js +1 -0
- package/dist/nodes/agents/Agent/agents/SqlAgent/other/handlers/postgres.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/SqlAgent/other/handlers/sqlite.js +1 -0
- package/dist/nodes/agents/Agent/agents/SqlAgent/other/handlers/sqlite.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/SqlAgent/other/prompts.js +1 -0
- package/dist/nodes/agents/Agent/agents/SqlAgent/other/prompts.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/ToolsAgent/description.js +1 -0
- package/dist/nodes/agents/Agent/agents/ToolsAgent/description.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/ToolsAgent/execute.js +1 -0
- package/dist/nodes/agents/Agent/agents/ToolsAgent/execute.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/ToolsAgent/prompt.js +1 -0
- package/dist/nodes/agents/Agent/agents/ToolsAgent/prompt.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/utils.js +1 -0
- package/dist/nodes/agents/Agent/agents/utils.js.map +1 -0
- package/dist/nodes/agents/OpenAiAssistant/OpenAiAssistant.node.js +1 -0
- package/dist/nodes/agents/OpenAiAssistant/OpenAiAssistant.node.js.map +1 -0
- package/dist/nodes/agents/OpenAiAssistant/utils.js +1 -0
- package/dist/nodes/agents/OpenAiAssistant/utils.js.map +1 -0
- package/dist/nodes/chains/ChainLLM/ChainLlm.node.js +1 -0
- package/dist/nodes/chains/ChainLLM/ChainLlm.node.js.map +1 -0
- package/dist/nodes/chains/ChainLLM/methods/chainExecutor.js +1 -0
- package/dist/nodes/chains/ChainLLM/methods/chainExecutor.js.map +1 -0
- package/dist/nodes/chains/ChainLLM/methods/config.js +1 -0
- package/dist/nodes/chains/ChainLLM/methods/config.js.map +1 -0
- package/dist/nodes/chains/ChainLLM/methods/imageUtils.js +1 -0
- package/dist/nodes/chains/ChainLLM/methods/imageUtils.js.map +1 -0
- package/dist/nodes/chains/ChainLLM/methods/index.js +1 -0
- package/dist/nodes/chains/ChainLLM/methods/index.js.map +1 -0
- package/dist/nodes/chains/ChainLLM/methods/promptUtils.js +1 -0
- package/dist/nodes/chains/ChainLLM/methods/promptUtils.js.map +1 -0
- package/dist/nodes/chains/ChainLLM/methods/responseFormatter.js +1 -0
- package/dist/nodes/chains/ChainLLM/methods/responseFormatter.js.map +1 -0
- package/dist/nodes/chains/ChainLLM/methods/types.js +1 -0
- package/dist/nodes/chains/ChainLLM/methods/types.js.map +1 -0
- package/dist/nodes/chains/ChainRetrievalQA/ChainRetrievalQa.node.js +1 -0
- package/dist/nodes/chains/ChainRetrievalQA/ChainRetrievalQa.node.js.map +1 -0
- package/dist/nodes/chains/ChainSummarization/ChainSummarization.node.js +1 -0
- package/dist/nodes/chains/ChainSummarization/ChainSummarization.node.js.map +1 -0
- package/dist/nodes/chains/ChainSummarization/V1/ChainSummarizationV1.node.js +1 -0
- package/dist/nodes/chains/ChainSummarization/V1/ChainSummarizationV1.node.js.map +1 -0
- package/dist/nodes/chains/ChainSummarization/V2/ChainSummarizationV2.node.js +1 -0
- package/dist/nodes/chains/ChainSummarization/V2/ChainSummarizationV2.node.js.map +1 -0
- package/dist/nodes/chains/ChainSummarization/helpers.js +1 -0
- package/dist/nodes/chains/ChainSummarization/helpers.js.map +1 -0
- package/dist/nodes/chains/ChainSummarization/prompt.js +1 -0
- package/dist/nodes/chains/ChainSummarization/prompt.js.map +1 -0
- package/dist/nodes/chains/InformationExtractor/InformationExtractor.node.js +1 -0
- package/dist/nodes/chains/InformationExtractor/InformationExtractor.node.js.map +1 -0
- package/dist/nodes/chains/InformationExtractor/helpers.js +1 -0
- package/dist/nodes/chains/InformationExtractor/helpers.js.map +1 -0
- package/dist/nodes/chains/InformationExtractor/types.js +1 -0
- package/dist/nodes/chains/InformationExtractor/types.js.map +1 -0
- package/dist/nodes/chains/SentimentAnalysis/SentimentAnalysis.node.js +1 -0
- package/dist/nodes/chains/SentimentAnalysis/SentimentAnalysis.node.js.map +1 -0
- package/dist/nodes/chains/TextClassifier/TextClassifier.node.js +1 -0
- package/dist/nodes/chains/TextClassifier/TextClassifier.node.js.map +1 -0
- package/dist/nodes/code/Code.node.js +1 -0
- package/dist/nodes/code/Code.node.js.map +1 -0
- package/dist/nodes/document_loaders/DocumentBinaryInputLoader/DocumentBinaryInputLoader.node.js +1 -0
- package/dist/nodes/document_loaders/DocumentBinaryInputLoader/DocumentBinaryInputLoader.node.js.map +1 -0
- package/dist/nodes/document_loaders/DocumentDefaultDataLoader/DocumentDefaultDataLoader.node.js +1 -0
- package/dist/nodes/document_loaders/DocumentDefaultDataLoader/DocumentDefaultDataLoader.node.js.map +1 -0
- package/dist/nodes/document_loaders/DocumentGithubLoader/DocumentGithubLoader.node.js +1 -0
- package/dist/nodes/document_loaders/DocumentGithubLoader/DocumentGithubLoader.node.js.map +1 -0
- package/dist/nodes/document_loaders/DocumentJSONInputLoader/DocumentJsonInputLoader.node.js +1 -0
- package/dist/nodes/document_loaders/DocumentJSONInputLoader/DocumentJsonInputLoader.node.js.map +1 -0
- package/dist/nodes/embeddings/EmbeddingsAwsBedrock/EmbeddingsAwsBedrock.node.js +1 -0
- package/dist/nodes/embeddings/EmbeddingsAwsBedrock/EmbeddingsAwsBedrock.node.js.map +1 -0
- package/dist/nodes/embeddings/EmbeddingsAzureOpenAi/EmbeddingsAzureOpenAi.node.js +1 -0
- package/dist/nodes/embeddings/EmbeddingsAzureOpenAi/EmbeddingsAzureOpenAi.node.js.map +1 -0
- package/dist/nodes/embeddings/EmbeddingsCohere/EmbeddingsCohere.node.js +1 -0
- package/dist/nodes/embeddings/EmbeddingsCohere/EmbeddingsCohere.node.js.map +1 -0
- package/dist/nodes/embeddings/EmbeddingsGoogleGemini/EmbeddingsGoogleGemini.node.js +1 -0
- package/dist/nodes/embeddings/EmbeddingsGoogleGemini/EmbeddingsGoogleGemini.node.js.map +1 -0
- package/dist/nodes/embeddings/EmbeddingsGoogleVertex/EmbeddingsGoogleVertex.node.js +166 -0
- package/dist/nodes/embeddings/EmbeddingsGoogleVertex/EmbeddingsGoogleVertex.node.js.map +1 -0
- package/dist/nodes/embeddings/EmbeddingsGoogleVertex/google.svg +1 -0
- package/dist/nodes/embeddings/EmbeddingsHuggingFaceInference/EmbeddingsHuggingFaceInference.node.js +1 -0
- package/dist/nodes/embeddings/EmbeddingsHuggingFaceInference/EmbeddingsHuggingFaceInference.node.js.map +1 -0
- package/dist/nodes/embeddings/EmbeddingsMistralCloud/EmbeddingsMistralCloud.node.js +1 -0
- package/dist/nodes/embeddings/EmbeddingsMistralCloud/EmbeddingsMistralCloud.node.js.map +1 -0
- package/dist/nodes/embeddings/EmbeddingsOllama/EmbeddingsOllama.node.js +1 -0
- package/dist/nodes/embeddings/EmbeddingsOllama/EmbeddingsOllama.node.js.map +1 -0
- package/dist/nodes/embeddings/EmbeddingsOpenAI/EmbeddingsOpenAi.node.js +1 -0
- package/dist/nodes/embeddings/EmbeddingsOpenAI/EmbeddingsOpenAi.node.js.map +1 -0
- package/dist/nodes/llms/LMChatAnthropic/LmChatAnthropic.node.js +1 -0
- package/dist/nodes/llms/LMChatAnthropic/LmChatAnthropic.node.js.map +1 -0
- package/dist/nodes/llms/LMChatAnthropic/methods/searchModels.js +1 -0
- package/dist/nodes/llms/LMChatAnthropic/methods/searchModels.js.map +1 -0
- package/dist/nodes/llms/LMChatOllama/LmChatOllama.node.js +1 -0
- package/dist/nodes/llms/LMChatOllama/LmChatOllama.node.js.map +1 -0
- package/dist/nodes/llms/LMChatOpenAi/LmChatOpenAi.node.js +1 -0
- package/dist/nodes/llms/LMChatOpenAi/LmChatOpenAi.node.js.map +1 -0
- package/dist/nodes/llms/LMChatOpenAi/methods/loadModels.js +1 -0
- package/dist/nodes/llms/LMChatOpenAi/methods/loadModels.js.map +1 -0
- package/dist/nodes/llms/LMCohere/LmCohere.node.js +1 -0
- package/dist/nodes/llms/LMCohere/LmCohere.node.js.map +1 -0
- package/dist/nodes/llms/LMOllama/LmOllama.node.js +1 -0
- package/dist/nodes/llms/LMOllama/LmOllama.node.js.map +1 -0
- package/dist/nodes/llms/LMOllama/description.js +1 -0
- package/dist/nodes/llms/LMOllama/description.js.map +1 -0
- package/dist/nodes/llms/LMOpenAi/LmOpenAi.node.js +1 -0
- package/dist/nodes/llms/LMOpenAi/LmOpenAi.node.js.map +1 -0
- package/dist/nodes/llms/LMOpenHuggingFaceInference/LmOpenHuggingFaceInference.node.js +1 -0
- package/dist/nodes/llms/LMOpenHuggingFaceInference/LmOpenHuggingFaceInference.node.js.map +1 -0
- package/dist/nodes/llms/LmChatAwsBedrock/LmChatAwsBedrock.node.js +1 -0
- package/dist/nodes/llms/LmChatAwsBedrock/LmChatAwsBedrock.node.js.map +1 -0
- package/dist/nodes/llms/LmChatAzureOpenAi/LmChatAzureOpenAi.node.js +1 -0
- package/dist/nodes/llms/LmChatAzureOpenAi/LmChatAzureOpenAi.node.js.map +1 -0
- package/dist/nodes/llms/LmChatDeepSeek/LmChatDeepSeek.node.js +1 -0
- package/dist/nodes/llms/LmChatDeepSeek/LmChatDeepSeek.node.js.map +1 -0
- package/dist/nodes/llms/LmChatGoogleGemini/LmChatGoogleGemini.node.js +1 -0
- package/dist/nodes/llms/LmChatGoogleGemini/LmChatGoogleGemini.node.js.map +1 -0
- package/dist/nodes/llms/LmChatGoogleVertex/LmChatGoogleVertex.node.js +1 -0
- package/dist/nodes/llms/LmChatGoogleVertex/LmChatGoogleVertex.node.js.map +1 -0
- package/dist/nodes/llms/LmChatGoogleVertex/error-handling.js +1 -0
- package/dist/nodes/llms/LmChatGoogleVertex/error-handling.js.map +1 -0
- package/dist/nodes/llms/LmChatGroq/LmChatGroq.node.js +1 -0
- package/dist/nodes/llms/LmChatGroq/LmChatGroq.node.js.map +1 -0
- package/dist/nodes/llms/LmChatMistralCloud/LmChatMistralCloud.node.js +1 -0
- package/dist/nodes/llms/LmChatMistralCloud/LmChatMistralCloud.node.js.map +1 -0
- package/dist/nodes/llms/LmChatOpenRouter/LmChatOpenRouter.node.js +1 -0
- package/dist/nodes/llms/LmChatOpenRouter/LmChatOpenRouter.node.js.map +1 -0
- package/dist/nodes/llms/LmChatXAiGrok/LmChatXAiGrok.node.js +1 -0
- package/dist/nodes/llms/LmChatXAiGrok/LmChatXAiGrok.node.js.map +1 -0
- package/dist/nodes/llms/N8nLlmTracing.js +1 -0
- package/dist/nodes/llms/N8nLlmTracing.js.map +1 -0
- package/dist/nodes/llms/gemini-common/additional-options.js +1 -0
- package/dist/nodes/llms/gemini-common/additional-options.js.map +1 -0
- package/dist/nodes/llms/gemini-common/safety-options.js +1 -0
- package/dist/nodes/llms/gemini-common/safety-options.js.map +1 -0
- package/dist/nodes/llms/n8nDefaultFailedAttemptHandler.js +1 -0
- package/dist/nodes/llms/n8nDefaultFailedAttemptHandler.js.map +1 -0
- package/dist/nodes/llms/n8nLlmFailedAttemptHandler.js +1 -0
- package/dist/nodes/llms/n8nLlmFailedAttemptHandler.js.map +1 -0
- package/dist/nodes/memory/MemoryBufferWindow/MemoryBufferWindow.node.js +1 -0
- package/dist/nodes/memory/MemoryBufferWindow/MemoryBufferWindow.node.js.map +1 -0
- package/dist/nodes/memory/MemoryChatRetriever/MemoryChatRetriever.node.js +1 -0
- package/dist/nodes/memory/MemoryChatRetriever/MemoryChatRetriever.node.js.map +1 -0
- package/dist/nodes/memory/MemoryManager/MemoryManager.node.js +1 -0
- package/dist/nodes/memory/MemoryManager/MemoryManager.node.js.map +1 -0
- package/dist/nodes/memory/MemoryMongoDbChat/MemoryMongoDbChat.node.js +155 -0
- package/dist/nodes/memory/MemoryMongoDbChat/MemoryMongoDbChat.node.js.map +1 -0
- package/dist/nodes/memory/MemoryMongoDbChat/mongodb.dark.svg +3 -0
- package/dist/nodes/memory/MemoryMongoDbChat/mongodb.svg +3 -0
- package/dist/nodes/memory/MemoryMotorhead/MemoryMotorhead.node.js +1 -0
- package/dist/nodes/memory/MemoryMotorhead/MemoryMotorhead.node.js.map +1 -0
- package/dist/nodes/memory/MemoryPostgresChat/MemoryPostgresChat.node.js +1 -0
- package/dist/nodes/memory/MemoryPostgresChat/MemoryPostgresChat.node.js.map +1 -0
- package/dist/nodes/memory/MemoryRedisChat/MemoryRedisChat.node.js +1 -0
- package/dist/nodes/memory/MemoryRedisChat/MemoryRedisChat.node.js.map +1 -0
- package/dist/nodes/memory/MemoryXata/MemoryXata.node.js +1 -0
- package/dist/nodes/memory/MemoryXata/MemoryXata.node.js.map +1 -0
- package/dist/nodes/memory/MemoryZep/MemoryZep.node.js +1 -0
- package/dist/nodes/memory/MemoryZep/MemoryZep.node.js.map +1 -0
- package/dist/nodes/memory/descriptions.js +1 -0
- package/dist/nodes/memory/descriptions.js.map +1 -0
- package/dist/nodes/output_parser/OutputParserAutofixing/OutputParserAutofixing.node.js +1 -0
- package/dist/nodes/output_parser/OutputParserAutofixing/OutputParserAutofixing.node.js.map +1 -0
- package/dist/nodes/output_parser/OutputParserAutofixing/prompt.js +1 -0
- package/dist/nodes/output_parser/OutputParserAutofixing/prompt.js.map +1 -0
- package/dist/nodes/output_parser/OutputParserItemList/OutputParserItemList.node.js +1 -0
- package/dist/nodes/output_parser/OutputParserItemList/OutputParserItemList.node.js.map +1 -0
- package/dist/nodes/output_parser/OutputParserStructured/OutputParserStructured.node.js +1 -0
- package/dist/nodes/output_parser/OutputParserStructured/OutputParserStructured.node.js.map +1 -0
- package/dist/nodes/retrievers/RetrieverContextualCompression/RetrieverContextualCompression.node.js +1 -0
- package/dist/nodes/retrievers/RetrieverContextualCompression/RetrieverContextualCompression.node.js.map +1 -0
- package/dist/nodes/retrievers/RetrieverMultiQuery/RetrieverMultiQuery.node.js +1 -0
- package/dist/nodes/retrievers/RetrieverMultiQuery/RetrieverMultiQuery.node.js.map +1 -0
- package/dist/nodes/retrievers/RetrieverVectorStore/RetrieverVectorStore.node.js +1 -0
- package/dist/nodes/retrievers/RetrieverVectorStore/RetrieverVectorStore.node.js.map +1 -0
- package/dist/nodes/retrievers/RetrieverWorkflow/RetrieverWorkflow.node.js +1 -0
- package/dist/nodes/retrievers/RetrieverWorkflow/RetrieverWorkflow.node.js.map +1 -0
- package/dist/nodes/text_splitters/TextSplitterCharacterTextSplitter/TextSplitterCharacterTextSplitter.node.js +1 -0
- package/dist/nodes/text_splitters/TextSplitterCharacterTextSplitter/TextSplitterCharacterTextSplitter.node.js.map +1 -0
- package/dist/nodes/text_splitters/TextSplitterRecursiveCharacterTextSplitter/TextSplitterRecursiveCharacterTextSplitter.node.js +1 -0
- package/dist/nodes/text_splitters/TextSplitterRecursiveCharacterTextSplitter/TextSplitterRecursiveCharacterTextSplitter.node.js.map +1 -0
- package/dist/nodes/text_splitters/TextSplitterTokenSplitter/TextSplitterTokenSplitter.node.js +1 -0
- package/dist/nodes/text_splitters/TextSplitterTokenSplitter/TextSplitterTokenSplitter.node.js.map +1 -0
- package/dist/nodes/tools/ToolCalculator/ToolCalculator.node.js +1 -0
- package/dist/nodes/tools/ToolCalculator/ToolCalculator.node.js.map +1 -0
- package/dist/nodes/tools/ToolCode/ToolCode.node.js +1 -0
- package/dist/nodes/tools/ToolCode/ToolCode.node.js.map +1 -0
- package/dist/nodes/tools/ToolHttpRequest/ToolHttpRequest.node.js +1 -0
- package/dist/nodes/tools/ToolHttpRequest/ToolHttpRequest.node.js.map +1 -0
- package/dist/nodes/tools/ToolHttpRequest/descriptions.js +1 -0
- package/dist/nodes/tools/ToolHttpRequest/descriptions.js.map +1 -0
- package/dist/nodes/tools/ToolHttpRequest/interfaces.js +1 -0
- package/dist/nodes/tools/ToolHttpRequest/interfaces.js.map +1 -0
- package/dist/nodes/tools/ToolHttpRequest/utils.js +3 -2
- package/dist/nodes/tools/ToolHttpRequest/utils.js.map +1 -0
- package/dist/nodes/tools/ToolSerpApi/ToolSerpApi.node.js +1 -0
- package/dist/nodes/tools/ToolSerpApi/ToolSerpApi.node.js.map +1 -0
- package/dist/nodes/tools/ToolThink/ToolThink.node.js +94 -0
- package/dist/nodes/tools/ToolThink/ToolThink.node.js.map +1 -0
- package/dist/nodes/tools/ToolVectorStore/ToolVectorStore.node.js +1 -0
- package/dist/nodes/tools/ToolVectorStore/ToolVectorStore.node.js.map +1 -0
- package/dist/nodes/tools/ToolWikipedia/ToolWikipedia.node.js +1 -0
- package/dist/nodes/tools/ToolWikipedia/ToolWikipedia.node.js.map +1 -0
- package/dist/nodes/tools/ToolWolframAlpha/ToolWolframAlpha.node.js +1 -0
- package/dist/nodes/tools/ToolWolframAlpha/ToolWolframAlpha.node.js.map +1 -0
- package/dist/nodes/tools/ToolWorkflow/ToolWorkflow.node.js +1 -0
- package/dist/nodes/tools/ToolWorkflow/ToolWorkflow.node.js.map +1 -0
- package/dist/nodes/tools/ToolWorkflow/v1/ToolWorkflowV1.node.js +1 -0
- package/dist/nodes/tools/ToolWorkflow/v1/ToolWorkflowV1.node.js.map +1 -0
- package/dist/nodes/tools/ToolWorkflow/v1/versionDescription.js +1 -0
- package/dist/nodes/tools/ToolWorkflow/v1/versionDescription.js.map +1 -0
- package/dist/nodes/tools/ToolWorkflow/v2/ToolWorkflowV2.node.js +1 -0
- package/dist/nodes/tools/ToolWorkflow/v2/ToolWorkflowV2.node.js.map +1 -0
- package/dist/nodes/tools/ToolWorkflow/v2/methods/index.js +1 -0
- package/dist/nodes/tools/ToolWorkflow/v2/methods/index.js.map +1 -0
- package/dist/nodes/tools/ToolWorkflow/v2/methods/localResourceMapping.js +1 -0
- package/dist/nodes/tools/ToolWorkflow/v2/methods/localResourceMapping.js.map +1 -0
- package/dist/nodes/tools/ToolWorkflow/v2/utils/WorkflowToolService.js +1 -0
- package/dist/nodes/tools/ToolWorkflow/v2/utils/WorkflowToolService.js.map +1 -0
- package/dist/nodes/tools/ToolWorkflow/v2/versionDescription.js +1 -0
- package/dist/nodes/tools/ToolWorkflow/v2/versionDescription.js.map +1 -0
- package/dist/nodes/trigger/ChatTrigger/ChatTrigger.node.js +1 -0
- package/dist/nodes/trigger/ChatTrigger/ChatTrigger.node.js.map +1 -0
- package/dist/nodes/trigger/ChatTrigger/GenericFunctions.js +1 -0
- package/dist/nodes/trigger/ChatTrigger/GenericFunctions.js.map +1 -0
- package/dist/nodes/trigger/ChatTrigger/constants.js +1 -0
- package/dist/nodes/trigger/ChatTrigger/constants.js.map +1 -0
- package/dist/nodes/trigger/ChatTrigger/error.js +1 -0
- package/dist/nodes/trigger/ChatTrigger/error.js.map +1 -0
- package/dist/nodes/trigger/ChatTrigger/templates.js +1 -0
- package/dist/nodes/trigger/ChatTrigger/templates.js.map +1 -0
- package/dist/nodes/trigger/ChatTrigger/types.js +1 -0
- package/dist/nodes/trigger/ChatTrigger/types.js.map +1 -0
- package/dist/nodes/trigger/ManualChatTrigger/ManualChatTrigger.node.js +1 -0
- package/dist/nodes/trigger/ManualChatTrigger/ManualChatTrigger.node.js.map +1 -0
- package/dist/nodes/vector_store/VectorStoreInMemory/VectorStoreInMemory.node.js +1 -0
- package/dist/nodes/vector_store/VectorStoreInMemory/VectorStoreInMemory.node.js.map +1 -0
- package/dist/nodes/vector_store/VectorStoreInMemoryInsert/VectorStoreInMemoryInsert.node.js +1 -0
- package/dist/nodes/vector_store/VectorStoreInMemoryInsert/VectorStoreInMemoryInsert.node.js.map +1 -0
- package/dist/nodes/vector_store/VectorStoreInMemoryLoad/VectorStoreInMemoryLoad.node.js +1 -0
- package/dist/nodes/vector_store/VectorStoreInMemoryLoad/VectorStoreInMemoryLoad.node.js.map +1 -0
- package/dist/nodes/vector_store/VectorStoreMongoDBAtlas/VectorStoreMongoDBAtlas.node.js +1 -0
- package/dist/nodes/vector_store/VectorStoreMongoDBAtlas/VectorStoreMongoDBAtlas.node.js.map +1 -0
- package/dist/nodes/vector_store/VectorStorePGVector/VectorStorePGVector.node.js +1 -0
- package/dist/nodes/vector_store/VectorStorePGVector/VectorStorePGVector.node.js.map +1 -0
- package/dist/nodes/vector_store/VectorStorePinecone/VectorStorePinecone.node.js +1 -0
- package/dist/nodes/vector_store/VectorStorePinecone/VectorStorePinecone.node.js.map +1 -0
- package/dist/nodes/vector_store/VectorStorePineconeInsert/VectorStorePineconeInsert.node.js +1 -0
- package/dist/nodes/vector_store/VectorStorePineconeInsert/VectorStorePineconeInsert.node.js.map +1 -0
- package/dist/nodes/vector_store/VectorStorePineconeLoad/VectorStorePineconeLoad.node.js +1 -0
- package/dist/nodes/vector_store/VectorStorePineconeLoad/VectorStorePineconeLoad.node.js.map +1 -0
- package/dist/nodes/vector_store/VectorStoreQdrant/VectorStoreQdrant.node.js +1 -0
- package/dist/nodes/vector_store/VectorStoreQdrant/VectorStoreQdrant.node.js.map +1 -0
- package/dist/nodes/vector_store/VectorStoreSupabase/VectorStoreSupabase.node.js +1 -0
- package/dist/nodes/vector_store/VectorStoreSupabase/VectorStoreSupabase.node.js.map +1 -0
- package/dist/nodes/vector_store/VectorStoreSupabaseInsert/VectorStoreSupabaseInsert.node.js +1 -0
- package/dist/nodes/vector_store/VectorStoreSupabaseInsert/VectorStoreSupabaseInsert.node.js.map +1 -0
- package/dist/nodes/vector_store/VectorStoreSupabaseLoad/VectorStoreSupabaseLoad.node.js +1 -0
- package/dist/nodes/vector_store/VectorStoreSupabaseLoad/VectorStoreSupabaseLoad.node.js.map +1 -0
- package/dist/nodes/vector_store/VectorStoreZep/VectorStoreZep.node.js +1 -0
- package/dist/nodes/vector_store/VectorStoreZep/VectorStoreZep.node.js.map +1 -0
- package/dist/nodes/vector_store/VectorStoreZepInsert/VectorStoreZepInsert.node.js +1 -0
- package/dist/nodes/vector_store/VectorStoreZepInsert/VectorStoreZepInsert.node.js.map +1 -0
- package/dist/nodes/vector_store/VectorStoreZepLoad/VectorStoreZepLoad.node.js +1 -0
- package/dist/nodes/vector_store/VectorStoreZepLoad/VectorStoreZepLoad.node.js.map +1 -0
- package/dist/nodes/vector_store/shared/MemoryManager/MemoryCalculator.js +1 -0
- package/dist/nodes/vector_store/shared/MemoryManager/MemoryCalculator.js.map +1 -0
- package/dist/nodes/vector_store/shared/MemoryManager/MemoryVectorStoreManager.js +1 -0
- package/dist/nodes/vector_store/shared/MemoryManager/MemoryVectorStoreManager.js.map +1 -0
- package/dist/nodes/vector_store/shared/MemoryManager/StoreCleanupService.js +1 -0
- package/dist/nodes/vector_store/shared/MemoryManager/StoreCleanupService.js.map +1 -0
- package/dist/nodes/vector_store/shared/MemoryManager/config.js +1 -0
- package/dist/nodes/vector_store/shared/MemoryManager/config.js.map +1 -0
- package/dist/nodes/vector_store/shared/MemoryManager/types.js +1 -0
- package/dist/nodes/vector_store/shared/MemoryManager/types.js.map +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/constants.js +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/constants.js.map +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/createVectorStoreNode.js +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/createVectorStoreNode.js.map +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/methods/listSearch.js +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/methods/listSearch.js.map +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/operations/index.js +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/operations/index.js.map +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/operations/insertOperation.js +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/operations/insertOperation.js.map +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/operations/loadOperation.js +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/operations/loadOperation.js.map +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/operations/retrieveAsToolOperation.js +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/operations/retrieveAsToolOperation.js.map +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/operations/retrieveOperation.js +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/operations/retrieveOperation.js.map +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/operations/updateOperation.js +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/operations/updateOperation.js.map +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/types.js +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/types.js.map +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/utils.js +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/utils.js.map +1 -0
- package/dist/nodes/vector_store/shared/descriptions.js +1 -0
- package/dist/nodes/vector_store/shared/descriptions.js.map +1 -0
- package/dist/nodes/vector_store/shared/processDocuments.js +1 -0
- package/dist/nodes/vector_store/shared/processDocuments.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/OpenAi.node.js +1 -0
- package/dist/nodes/vendors/OpenAi/OpenAi.node.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/assistant/create.operation.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/assistant/create.operation.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/assistant/deleteAssistant.operation.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/assistant/deleteAssistant.operation.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/assistant/index.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/assistant/index.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/assistant/list.operation.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/assistant/list.operation.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/assistant/message.operation.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/assistant/message.operation.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/assistant/update.operation.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/assistant/update.operation.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/audio/generate.operation.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/audio/generate.operation.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/audio/index.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/audio/index.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/audio/transcribe.operation.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/audio/transcribe.operation.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/audio/translate.operation.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/audio/translate.operation.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/descriptions.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/descriptions.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/file/deleteFile.operation.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/file/deleteFile.operation.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/file/index.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/file/index.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/file/list.operation.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/file/list.operation.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/file/upload.operation.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/file/upload.operation.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/image/analyze.operation.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/image/analyze.operation.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/image/generate.operation.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/image/generate.operation.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/image/index.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/image/index.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/node.type.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/node.type.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/router.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/router.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/text/classify.operation.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/text/classify.operation.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/text/index.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/text/index.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/text/message.operation.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/text/message.operation.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/versionDescription.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/versionDescription.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/helpers/constants.js +1 -0
- package/dist/nodes/vendors/OpenAi/helpers/constants.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/helpers/error-handling.js +1 -0
- package/dist/nodes/vendors/OpenAi/helpers/error-handling.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/helpers/interfaces.js +1 -0
- package/dist/nodes/vendors/OpenAi/helpers/interfaces.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/helpers/utils.js +1 -0
- package/dist/nodes/vendors/OpenAi/helpers/utils.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/methods/index.js +1 -0
- package/dist/nodes/vendors/OpenAi/methods/index.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/methods/listSearch.js +1 -0
- package/dist/nodes/vendors/OpenAi/methods/listSearch.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/methods/loadOptions.js +1 -0
- package/dist/nodes/vendors/OpenAi/methods/loadOptions.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/transport/index.js +1 -0
- package/dist/nodes/vendors/OpenAi/transport/index.js.map +1 -0
- package/dist/types/nodes.json +3 -0
- package/dist/types/types.js +1 -0
- package/dist/types/types.js.map +1 -0
- package/dist/types/zod.types.js +1 -0
- package/dist/types/zod.types.js.map +1 -0
- package/dist/utils/N8nBinaryLoader.js +1 -0
- package/dist/utils/N8nBinaryLoader.js.map +1 -0
- package/dist/utils/N8nJsonLoader.js +1 -0
- package/dist/utils/N8nJsonLoader.js.map +1 -0
- package/dist/utils/N8nTool.js +1 -0
- package/dist/utils/N8nTool.js.map +1 -0
- package/dist/utils/descriptions.js +1 -0
- package/dist/utils/descriptions.js.map +1 -0
- package/dist/utils/helpers.js +1 -0
- package/dist/utils/helpers.js.map +1 -0
- package/dist/utils/logWrapper.js +1 -0
- package/dist/utils/logWrapper.js.map +1 -0
- package/dist/utils/output_parsers/N8nItemListOutputParser.js +1 -0
- package/dist/utils/output_parsers/N8nItemListOutputParser.js.map +1 -0
- package/dist/utils/output_parsers/N8nOutputFixingParser.js +1 -0
- package/dist/utils/output_parsers/N8nOutputFixingParser.js.map +1 -0
- package/dist/utils/output_parsers/N8nOutputParser.js +1 -0
- package/dist/utils/output_parsers/N8nOutputParser.js.map +1 -0
- package/dist/utils/output_parsers/N8nStructuredOutputParser.js +1 -0
- package/dist/utils/output_parsers/N8nStructuredOutputParser.js.map +1 -0
- package/dist/utils/output_parsers/prompt.js +1 -0
- package/dist/utils/output_parsers/prompt.js.map +1 -0
- package/dist/utils/schemaParsing.js +1 -0
- package/dist/utils/schemaParsing.js.map +1 -0
- package/dist/utils/sharedFields.js +1 -0
- package/dist/utils/sharedFields.js.map +1 -0
- package/dist/utils/tracing.js +1 -0
- package/dist/utils/tracing.js.map +1 -0
- package/package.json +9 -6
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../nodes/vector_store/VectorStoreSupabaseLoad/VectorStoreSupabaseLoad.node.ts"],"sourcesContent":["import type { SupabaseLibArgs } from '@langchain/community/vectorstores/supabase';\nimport { SupabaseVectorStore } from '@langchain/community/vectorstores/supabase';\nimport type { Embeddings } from '@langchain/core/embeddings';\nimport { createClient } from '@supabase/supabase-js';\nimport {\n\ttype INodeType,\n\ttype INodeTypeDescription,\n\ttype ISupplyDataFunctions,\n\ttype SupplyData,\n\tNodeConnectionTypes,\n} from 'n8n-workflow';\n\nimport { getMetadataFiltersValues } from '@utils/helpers';\nimport { logWrapper } from '@utils/logWrapper';\nimport { metadataFilterField } from '@utils/sharedFields';\n\nimport { supabaseTableNameSearch } from '../shared/createVectorStoreNode/methods/listSearch';\nimport { supabaseTableNameRLC } from '../shared/descriptions';\n\n// This node is deprecated. Use VectorStoreSupabase instead.\nexport class VectorStoreSupabaseLoad implements INodeType {\n\tdescription: INodeTypeDescription = {\n\t\tdisplayName: 'Supabase: Load',\n\t\tname: 'vectorStoreSupabaseLoad',\n\t\ticon: 'file:supabase.svg',\n\t\t// Vector Store nodes got merged into a single node\n\t\thidden: true,\n\t\tgroup: ['transform'],\n\t\tversion: 1,\n\t\tdescription: 'Load data from Supabase Vector Store index',\n\t\tdefaults: {\n\t\t\tname: 'Supabase: Load',\n\t\t},\n\t\tcodex: {\n\t\t\tcategories: ['AI'],\n\t\t\tsubcategories: {\n\t\t\t\tAI: ['Vector Stores'],\n\t\t\t},\n\t\t\tresources: {\n\t\t\t\tprimaryDocumentation: [\n\t\t\t\t\t{\n\t\t\t\t\t\turl: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstoresupabase/',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t},\n\t\tcredentials: [\n\t\t\t{\n\t\t\t\tname: 'supabaseApi',\n\t\t\t\trequired: true,\n\t\t\t},\n\t\t],\n\t\tinputs: [\n\t\t\t{\n\t\t\t\tdisplayName: 'Embedding',\n\t\t\t\tmaxConnections: 1,\n\t\t\t\ttype: NodeConnectionTypes.AiEmbedding,\n\t\t\t\trequired: true,\n\t\t\t},\n\t\t],\n\t\toutputs: [NodeConnectionTypes.AiVectorStore],\n\t\toutputNames: ['Vector Store'],\n\t\tproperties: [\n\t\t\tsupabaseTableNameRLC,\n\t\t\t{\n\t\t\t\tdisplayName: 'Query Name',\n\t\t\t\tname: 'queryName',\n\t\t\t\ttype: 'string',\n\t\t\t\tdefault: 'match_documents',\n\t\t\t\trequired: true,\n\t\t\t\tdescription: 'Name of the query to use for matching documents',\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Options',\n\t\t\t\tname: 'options',\n\t\t\t\ttype: 'collection',\n\t\t\t\tplaceholder: 'Add Option',\n\t\t\t\tdefault: {},\n\t\t\t\toptions: [metadataFilterField],\n\t\t\t},\n\t\t],\n\t};\n\n\tmethods = { listSearch: { supabaseTableNameSearch } };\n\n\tasync supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {\n\t\tthis.logger.debug('Supply Supabase Load Vector Store');\n\n\t\tconst tableName = this.getNodeParameter('tableName', itemIndex, '', {\n\t\t\textractValue: true,\n\t\t}) as string;\n\t\tconst queryName = this.getNodeParameter('queryName', itemIndex) as string;\n\n\t\tconst credentials = await this.getCredentials('supabaseApi');\n\t\tconst embeddings = (await this.getInputConnectionData(\n\t\t\tNodeConnectionTypes.AiEmbedding,\n\t\t\t0,\n\t\t)) as Embeddings;\n\n\t\t// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment\n\t\tconst client = createClient(credentials.host as string, credentials.serviceRole as string);\n\t\tconst config: SupabaseLibArgs = {\n\t\t\tclient,\n\t\t\ttableName,\n\t\t\tqueryName,\n\t\t\tfilter: getMetadataFiltersValues(this, itemIndex),\n\t\t};\n\n\t\tconst vectorStore = await SupabaseVectorStore.fromExistingIndex(embeddings, config);\n\n\t\treturn {\n\t\t\tresponse: logWrapper(vectorStore, this),\n\t\t};\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,sBAAoC;AAEpC,yBAA6B;AAC7B,0BAMO;AAEP,qBAAyC;AACzC,wBAA2B;AAC3B,0BAAoC;AAEpC,wBAAwC;AACxC,0BAAqC;AAG9B,MAAM,wBAA6C;AAAA,EAAnD;AACN,uBAAoC;AAAA,MACnC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA;AAAA,MAEN,QAAQ;AAAA,MACR,OAAO,CAAC,WAAW;AAAA,MACnB,SAAS;AAAA,MACT,aAAa;AAAA,MACb,UAAU;AAAA,QACT,MAAM;AAAA,MACP;AAAA,MACA,OAAO;AAAA,QACN,YAAY,CAAC,IAAI;AAAA,QACjB,eAAe;AAAA,UACd,IAAI,CAAC,eAAe;AAAA,QACrB;AAAA,QACA,WAAW;AAAA,UACV,sBAAsB;AAAA,YACrB;AAAA,cACC,KAAK;AAAA,YACN;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,MACA,aAAa;AAAA,QACZ;AAAA,UACC,MAAM;AAAA,UACN,UAAU;AAAA,QACX;AAAA,MACD;AAAA,MACA,QAAQ;AAAA,QACP;AAAA,UACC,aAAa;AAAA,UACb,gBAAgB;AAAA,UAChB,MAAM,wCAAoB;AAAA,UAC1B,UAAU;AAAA,QACX;AAAA,MACD;AAAA,MACA,SAAS,CAAC,wCAAoB,aAAa;AAAA,MAC3C,aAAa,CAAC,cAAc;AAAA,MAC5B,YAAY;AAAA,QACX;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,UACT,UAAU;AAAA,UACV,aAAa;AAAA,QACd;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,aAAa;AAAA,UACb,SAAS,CAAC;AAAA,UACV,SAAS,CAAC,uCAAmB;AAAA,QAC9B;AAAA,MACD;AAAA,IACD;AAEA,mBAAU,EAAE,YAAY,EAAE,mEAAwB,EAAE;AAAA;AAAA,EAEpD,MAAM,WAAuC,WAAwC;AACpF,SAAK,OAAO,MAAM,mCAAmC;AAErD,UAAM,YAAY,KAAK,iBAAiB,aAAa,WAAW,IAAI;AAAA,MACnE,cAAc;AAAA,IACf,CAAC;AACD,UAAM,YAAY,KAAK,iBAAiB,aAAa,SAAS;AAE9D,UAAM,cAAc,MAAM,KAAK,eAAe,aAAa;AAC3D,UAAM,aAAc,MAAM,KAAK;AAAA,MAC9B,wCAAoB;AAAA,MACpB;AAAA,IACD;AAGA,UAAM,aAAS,iCAAa,YAAY,MAAgB,YAAY,WAAqB;AACzF,UAAM,SAA0B;AAAA,MAC/B;AAAA,MACA;AAAA,MACA;AAAA,MACA,YAAQ,yCAAyB,MAAM,SAAS;AAAA,IACjD;AAEA,UAAM,cAAc,MAAM,oCAAoB,kBAAkB,YAAY,MAAM;AAElF,WAAO;AAAA,MACN,cAAU,8BAAW,aAAa,IAAI;AAAA,IACvC;AAAA,EACD;AACD;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../nodes/vector_store/VectorStoreZep/VectorStoreZep.node.ts"],"sourcesContent":["import { ZepVectorStore } from '@langchain/community/vectorstores/zep';\nimport { ZepCloudVectorStore } from '@langchain/community/vectorstores/zep_cloud';\nimport type { IDataObject, INodeProperties } from 'n8n-workflow';\nimport { NodeOperationError } from 'n8n-workflow';\n\nimport { metadataFilterField } from '@utils/sharedFields';\n\nimport { createVectorStoreNode } from '../shared/createVectorStoreNode/createVectorStoreNode';\n\nconst embeddingDimensions: INodeProperties = {\n\tdisplayName: 'Embedding Dimensions',\n\tname: 'embeddingDimensions',\n\ttype: 'number',\n\tdefault: 1536,\n\tdescription: 'Whether to allow using characters from the Unicode surrogate blocks',\n};\n\nconst insertFields: INodeProperties[] = [\n\t{\n\t\tdisplayName: 'Options',\n\t\tname: 'options',\n\t\ttype: 'collection',\n\t\tplaceholder: 'Add Option',\n\t\tdefault: {},\n\t\toptions: [\n\t\t\tembeddingDimensions,\n\t\t\t{\n\t\t\t\tdisplayName: 'Is Auto Embedded',\n\t\t\t\tname: 'isAutoEmbedded',\n\t\t\t\ttype: 'boolean',\n\t\t\t\tdefault: true,\n\t\t\t\tdescription: 'Whether to automatically embed documents when they are added',\n\t\t\t},\n\t\t],\n\t},\n];\n\nconst retrieveFields: INodeProperties[] = [\n\t{\n\t\tdisplayName: 'Options',\n\t\tname: 'options',\n\t\ttype: 'collection',\n\t\tplaceholder: 'Add Option',\n\t\tdefault: {},\n\t\toptions: [embeddingDimensions, metadataFilterField],\n\t},\n];\n\nexport class VectorStoreZep extends createVectorStoreNode<ZepVectorStore | ZepCloudVectorStore>({\n\tmeta: {\n\t\tdisplayName: 'Zep Vector Store',\n\t\tname: 'vectorStoreZep',\n\t\tdescription: 'Work with your data in Zep Vector Store',\n\t\tcredentials: [\n\t\t\t{\n\t\t\t\tname: 'zepApi',\n\t\t\t\trequired: true,\n\t\t\t},\n\t\t],\n\t\ticon: 'file:zep.png',\n\t\tdocsUrl:\n\t\t\t'https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstorezep/',\n\t},\n\tsharedFields: [\n\t\t{\n\t\t\tdisplayName: 'Collection Name',\n\t\t\tname: 'collectionName',\n\t\t\ttype: 'string',\n\t\t\tdefault: '',\n\t\t\trequired: true,\n\t\t},\n\t],\n\tinsertFields,\n\tloadFields: retrieveFields,\n\tretrieveFields,\n\tasync getVectorStoreClient(context, filter, embeddings, itemIndex) {\n\t\tconst collectionName = context.getNodeParameter('collectionName', itemIndex) as string;\n\n\t\tconst options =\n\t\t\t(context.getNodeParameter('options', itemIndex) as {\n\t\t\t\tembeddingDimensions?: number;\n\t\t\t}) || {};\n\n\t\tconst credentials = await context.getCredentials<{\n\t\t\tapiKey?: string;\n\t\t\tapiUrl: string;\n\t\t\tcloud: boolean;\n\t\t}>('zepApi');\n\n\t\tconst zepConfig = {\n\t\t\tapiKey: credentials.apiKey,\n\t\t\tcollectionName,\n\t\t\tembeddingDimensions: options.embeddingDimensions ?? 1536,\n\t\t\tmetadata: filter,\n\t\t};\n\n\t\tif (credentials.cloud) {\n\t\t\treturn new ZepCloudVectorStore(embeddings, zepConfig);\n\t\t} else {\n\t\t\treturn new ZepVectorStore(embeddings, { ...zepConfig, apiUrl: credentials.apiUrl });\n\t\t}\n\t},\n\tasync populateVectorStore(context, embeddings, documents, itemIndex) {\n\t\tconst collectionName = context.getNodeParameter('collectionName', itemIndex) as string;\n\t\tconst options =\n\t\t\t(context.getNodeParameter('options', itemIndex) as {\n\t\t\t\tisAutoEmbedded?: boolean;\n\t\t\t\tembeddingDimensions?: number;\n\t\t\t}) || {};\n\n\t\tconst credentials = await context.getCredentials<{\n\t\t\tapiKey?: string;\n\t\t\tapiUrl: string;\n\t\t\tcloud: boolean;\n\t\t}>('zepApi');\n\n\t\tconst zepConfig = {\n\t\t\tapiKey: credentials.apiKey,\n\t\t\tcollectionName,\n\t\t\tembeddingDimensions: options.embeddingDimensions ?? 1536,\n\t\t\tisAutoEmbedded: options.isAutoEmbedded ?? true,\n\t\t};\n\n\t\ttry {\n\t\t\tif (credentials.cloud) {\n\t\t\t\tawait ZepCloudVectorStore.fromDocuments(documents, embeddings, zepConfig);\n\t\t\t} else {\n\t\t\t\tawait ZepVectorStore.fromDocuments(documents, embeddings, {\n\t\t\t\t\t...zepConfig,\n\t\t\t\t\tapiUrl: credentials.apiUrl,\n\t\t\t\t});\n\t\t\t}\n\t\t} catch (error) {\n\t\t\tconst errorCode = (error as IDataObject).code as number;\n\t\t\tconst responseData = (error as IDataObject).responseData as string;\n\t\t\tif (errorCode === 400 && responseData.includes('CreateDocumentCollectionRequest')) {\n\t\t\t\tthrow new NodeOperationError(context.getNode(), `Collection ${collectionName} not found`, {\n\t\t\t\t\titemIndex,\n\t\t\t\t\tdescription:\n\t\t\t\t\t\t'Please check that the collection exists in your vector store, or make sure that collection name contains only alphanumeric characters',\n\t\t\t\t});\n\t\t\t}\n\t\t\tthrow new NodeOperationError(context.getNode(), error as Error, { itemIndex });\n\t\t}\n\t},\n}) {}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAA+B;AAC/B,uBAAoC;AAEpC,0BAAmC;AAEnC,0BAAoC;AAEpC,mCAAsC;AAEtC,MAAM,sBAAuC;AAAA,EAC5C,aAAa;AAAA,EACb,MAAM;AAAA,EACN,MAAM;AAAA,EACN,SAAS;AAAA,EACT,aAAa;AACd;AAEA,MAAM,eAAkC;AAAA,EACvC;AAAA,IACC,aAAa;AAAA,IACb,MAAM;AAAA,IACN,MAAM;AAAA,IACN,aAAa;AAAA,IACb,SAAS,CAAC;AAAA,IACV,SAAS;AAAA,MACR;AAAA,MACA;AAAA,QACC,aAAa;AAAA,QACb,MAAM;AAAA,QACN,MAAM;AAAA,QACN,SAAS;AAAA,QACT,aAAa;AAAA,MACd;AAAA,IACD;AAAA,EACD;AACD;AAEA,MAAM,iBAAoC;AAAA,EACzC;AAAA,IACC,aAAa;AAAA,IACb,MAAM;AAAA,IACN,MAAM;AAAA,IACN,aAAa;AAAA,IACb,SAAS,CAAC;AAAA,IACV,SAAS,CAAC,qBAAqB,uCAAmB;AAAA,EACnD;AACD;AAEO,MAAM,2BAAuB,oDAA4D;AAAA,EAC/F,MAAM;AAAA,IACL,aAAa;AAAA,IACb,MAAM;AAAA,IACN,aAAa;AAAA,IACb,aAAa;AAAA,MACZ;AAAA,QACC,MAAM;AAAA,QACN,UAAU;AAAA,MACX;AAAA,IACD;AAAA,IACA,MAAM;AAAA,IACN,SACC;AAAA,EACF;AAAA,EACA,cAAc;AAAA,IACb;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,MACT,UAAU;AAAA,IACX;AAAA,EACD;AAAA,EACA;AAAA,EACA,YAAY;AAAA,EACZ;AAAA,EACA,MAAM,qBAAqB,SAAS,QAAQ,YAAY,WAAW;AAClE,UAAM,iBAAiB,QAAQ,iBAAiB,kBAAkB,SAAS;AAE3E,UAAM,UACJ,QAAQ,iBAAiB,WAAW,SAAS,KAExC,CAAC;AAER,UAAM,cAAc,MAAM,QAAQ,eAI/B,QAAQ;AAEX,UAAM,YAAY;AAAA,MACjB,QAAQ,YAAY;AAAA,MACpB;AAAA,MACA,qBAAqB,QAAQ,uBAAuB;AAAA,MACpD,UAAU;AAAA,IACX;AAEA,QAAI,YAAY,OAAO;AACtB,aAAO,IAAI,qCAAoB,YAAY,SAAS;AAAA,IACrD,OAAO;AACN,aAAO,IAAI,0BAAe,YAAY,EAAE,GAAG,WAAW,QAAQ,YAAY,OAAO,CAAC;AAAA,IACnF;AAAA,EACD;AAAA,EACA,MAAM,oBAAoB,SAAS,YAAY,WAAW,WAAW;AACpE,UAAM,iBAAiB,QAAQ,iBAAiB,kBAAkB,SAAS;AAC3E,UAAM,UACJ,QAAQ,iBAAiB,WAAW,SAAS,KAGxC,CAAC;AAER,UAAM,cAAc,MAAM,QAAQ,eAI/B,QAAQ;AAEX,UAAM,YAAY;AAAA,MACjB,QAAQ,YAAY;AAAA,MACpB;AAAA,MACA,qBAAqB,QAAQ,uBAAuB;AAAA,MACpD,gBAAgB,QAAQ,kBAAkB;AAAA,IAC3C;AAEA,QAAI;AACH,UAAI,YAAY,OAAO;AACtB,cAAM,qCAAoB,cAAc,WAAW,YAAY,SAAS;AAAA,MACzE,OAAO;AACN,cAAM,0BAAe,cAAc,WAAW,YAAY;AAAA,UACzD,GAAG;AAAA,UACH,QAAQ,YAAY;AAAA,QACrB,CAAC;AAAA,MACF;AAAA,IACD,SAAS,OAAO;AACf,YAAM,YAAa,MAAsB;AACzC,YAAM,eAAgB,MAAsB;AAC5C,UAAI,cAAc,OAAO,aAAa,SAAS,iCAAiC,GAAG;AAClF,cAAM,IAAI,uCAAmB,QAAQ,QAAQ,GAAG,cAAc,cAAc,cAAc;AAAA,UACzF;AAAA,UACA,aACC;AAAA,QACF,CAAC;AAAA,MACF;AACA,YAAM,IAAI,uCAAmB,QAAQ,QAAQ,GAAG,OAAgB,EAAE,UAAU,CAAC;AAAA,IAC9E;AAAA,EACD;AACD,CAAC,EAAE;AAAC;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../nodes/vector_store/VectorStoreZepInsert/VectorStoreZepInsert.node.ts"],"sourcesContent":["import { ZepVectorStore } from '@langchain/community/vectorstores/zep';\nimport type { Document } from '@langchain/core/documents';\nimport type { Embeddings } from '@langchain/core/embeddings';\nimport {\n\ttype IExecuteFunctions,\n\ttype INodeType,\n\ttype INodeTypeDescription,\n\ttype INodeExecutionData,\n\tNodeConnectionTypes,\n} from 'n8n-workflow';\n\nimport type { N8nJsonLoader } from '@utils/N8nJsonLoader';\n\nimport { processDocuments } from '../shared/processDocuments';\n\n// This node is deprecated. Use VectorStoreZep instead.\nexport class VectorStoreZepInsert implements INodeType {\n\tdescription: INodeTypeDescription = {\n\t\tdisplayName: 'Zep Vector Store: Insert',\n\t\tname: 'vectorStoreZepInsert',\n\t\thidden: true,\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-icon-not-svg\n\t\ticon: 'file:zep.png',\n\t\tgroup: ['transform'],\n\t\tversion: 1,\n\t\tdescription: 'Insert data into Zep Vector Store index',\n\t\tdefaults: {\n\t\t\tname: 'Zep: Insert',\n\t\t},\n\t\tcodex: {\n\t\t\tcategories: ['AI'],\n\t\t\tsubcategories: {\n\t\t\t\tAI: ['Vector Stores'],\n\t\t\t},\n\t\t\tresources: {\n\t\t\t\tprimaryDocumentation: [\n\t\t\t\t\t{\n\t\t\t\t\t\turl: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstorezep/',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t},\n\t\tcredentials: [\n\t\t\t{\n\t\t\t\tname: 'zepApi',\n\t\t\t\trequired: true,\n\t\t\t},\n\t\t],\n\t\tinputs: [\n\t\t\tNodeConnectionTypes.Main,\n\t\t\t{\n\t\t\t\tdisplayName: 'Document',\n\t\t\t\tmaxConnections: 1,\n\t\t\t\ttype: NodeConnectionTypes.AiDocument,\n\t\t\t\trequired: true,\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Embedding',\n\t\t\t\tmaxConnections: 1,\n\t\t\t\ttype: NodeConnectionTypes.AiEmbedding,\n\t\t\t\trequired: true,\n\t\t\t},\n\t\t],\n\t\toutputs: [NodeConnectionTypes.Main],\n\t\tproperties: [\n\t\t\t{\n\t\t\t\tdisplayName: 'Collection Name',\n\t\t\t\tname: 'collectionName',\n\t\t\t\ttype: 'string',\n\t\t\t\tdefault: '',\n\t\t\t\trequired: true,\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Specify the document to load in the document loader sub-node',\n\t\t\t\tname: 'notice',\n\t\t\t\ttype: 'notice',\n\t\t\t\tdefault: '',\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Options',\n\t\t\t\tname: 'options',\n\t\t\t\ttype: 'collection',\n\t\t\t\tplaceholder: 'Add Option',\n\t\t\t\tdefault: {},\n\t\t\t\toptions: [\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Embedding Dimensions',\n\t\t\t\t\t\tname: 'embeddingDimensions',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t\tdefault: 1536,\n\t\t\t\t\t\tdescription: 'Whether to allow using characters from the Unicode surrogate blocks',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Is Auto Embedded',\n\t\t\t\t\t\tname: 'isAutoEmbedded',\n\t\t\t\t\t\ttype: 'boolean',\n\t\t\t\t\t\tdefault: true,\n\t\t\t\t\t\tdescription: 'Whether to automatically embed documents when they are added',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t],\n\t};\n\n\tasync execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {\n\t\tthis.logger.debug('Executing data for Zep Insert Vector Store');\n\t\tconst items = this.getInputData(0);\n\t\tconst collectionName = this.getNodeParameter('collectionName', 0) as string;\n\t\tconst options =\n\t\t\t(this.getNodeParameter('options', 0, {}) as {\n\t\t\t\tisAutoEmbedded?: boolean;\n\t\t\t\tembeddingDimensions?: number;\n\t\t\t}) || {};\n\n\t\tconst credentials = await this.getCredentials<{\n\t\t\tapiKey?: string;\n\t\t\tapiUrl: string;\n\t\t}>('zepApi');\n\n\t\tconst documentInput = (await this.getInputConnectionData(NodeConnectionTypes.AiDocument, 0)) as\n\t\t\t| N8nJsonLoader\n\t\t\t| Array<Document<Record<string, unknown>>>;\n\n\t\tconst embeddings = (await this.getInputConnectionData(\n\t\t\tNodeConnectionTypes.AiEmbedding,\n\t\t\t0,\n\t\t)) as Embeddings;\n\n\t\tconst { processedDocuments, serializedDocuments } = await processDocuments(\n\t\t\tdocumentInput,\n\t\t\titems,\n\t\t);\n\n\t\tconst zepConfig = {\n\t\t\tapiUrl: credentials.apiUrl,\n\t\t\tapiKey: credentials.apiKey,\n\t\t\tcollectionName,\n\t\t\tembeddingDimensions: options.embeddingDimensions ?? 1536,\n\t\t\tisAutoEmbedded: options.isAutoEmbedded ?? true,\n\t\t};\n\n\t\tawait ZepVectorStore.fromDocuments(processedDocuments, embeddings, zepConfig);\n\n\t\treturn [serializedDocuments];\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAA+B;AAG/B,0BAMO;AAIP,8BAAiC;AAG1B,MAAM,qBAA0C;AAAA,EAAhD;AACN,uBAAoC;AAAA,MACnC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,QAAQ;AAAA;AAAA,MAER,MAAM;AAAA,MACN,OAAO,CAAC,WAAW;AAAA,MACnB,SAAS;AAAA,MACT,aAAa;AAAA,MACb,UAAU;AAAA,QACT,MAAM;AAAA,MACP;AAAA,MACA,OAAO;AAAA,QACN,YAAY,CAAC,IAAI;AAAA,QACjB,eAAe;AAAA,UACd,IAAI,CAAC,eAAe;AAAA,QACrB;AAAA,QACA,WAAW;AAAA,UACV,sBAAsB;AAAA,YACrB;AAAA,cACC,KAAK;AAAA,YACN;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,MACA,aAAa;AAAA,QACZ;AAAA,UACC,MAAM;AAAA,UACN,UAAU;AAAA,QACX;AAAA,MACD;AAAA,MACA,QAAQ;AAAA,QACP,wCAAoB;AAAA,QACpB;AAAA,UACC,aAAa;AAAA,UACb,gBAAgB;AAAA,UAChB,MAAM,wCAAoB;AAAA,UAC1B,UAAU;AAAA,QACX;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,gBAAgB;AAAA,UAChB,MAAM,wCAAoB;AAAA,UAC1B,UAAU;AAAA,QACX;AAAA,MACD;AAAA,MACA,SAAS,CAAC,wCAAoB,IAAI;AAAA,MAClC,YAAY;AAAA,QACX;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,UACT,UAAU;AAAA,QACX;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,QACV;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,aAAa;AAAA,UACb,SAAS,CAAC;AAAA,UACV,SAAS;AAAA,YACR;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,YACd;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,YACd;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAAA;AAAA,EAEA,MAAM,UAAkE;AACvE,SAAK,OAAO,MAAM,4CAA4C;AAC9D,UAAM,QAAQ,KAAK,aAAa,CAAC;AACjC,UAAM,iBAAiB,KAAK,iBAAiB,kBAAkB,CAAC;AAChE,UAAM,UACJ,KAAK,iBAAiB,WAAW,GAAG,CAAC,CAAC,KAGjC,CAAC;AAER,UAAM,cAAc,MAAM,KAAK,eAG5B,QAAQ;AAEX,UAAM,gBAAiB,MAAM,KAAK,uBAAuB,wCAAoB,YAAY,CAAC;AAI1F,UAAM,aAAc,MAAM,KAAK;AAAA,MAC9B,wCAAoB;AAAA,MACpB;AAAA,IACD;AAEA,UAAM,EAAE,oBAAoB,oBAAoB,IAAI,UAAM;AAAA,MACzD;AAAA,MACA;AAAA,IACD;AAEA,UAAM,YAAY;AAAA,MACjB,QAAQ,YAAY;AAAA,MACpB,QAAQ,YAAY;AAAA,MACpB;AAAA,MACA,qBAAqB,QAAQ,uBAAuB;AAAA,MACpD,gBAAgB,QAAQ,kBAAkB;AAAA,IAC3C;AAEA,UAAM,0BAAe,cAAc,oBAAoB,YAAY,SAAS;AAE5E,WAAO,CAAC,mBAAmB;AAAA,EAC5B;AACD;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../nodes/vector_store/VectorStoreZepLoad/VectorStoreZepLoad.node.ts"],"sourcesContent":["import type { IZepConfig } from '@langchain/community/vectorstores/zep';\nimport { ZepVectorStore } from '@langchain/community/vectorstores/zep';\nimport type { Embeddings } from '@langchain/core/embeddings';\nimport {\n\tNodeConnectionTypes,\n\ttype INodeType,\n\ttype INodeTypeDescription,\n\ttype ISupplyDataFunctions,\n\ttype SupplyData,\n} from 'n8n-workflow';\n\nimport { getMetadataFiltersValues } from '@utils/helpers';\nimport { logWrapper } from '@utils/logWrapper';\nimport { metadataFilterField } from '@utils/sharedFields';\n\n// This node is deprecated. Use VectorStoreZep instead.\nexport class VectorStoreZepLoad implements INodeType {\n\tdescription: INodeTypeDescription = {\n\t\tdisplayName: 'Zep Vector Store: Load',\n\t\tname: 'vectorStoreZepLoad',\n\t\thidden: true,\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-icon-not-svg\n\t\ticon: 'file:zep.png',\n\t\tgroup: ['transform'],\n\t\tversion: 1,\n\t\tdescription: 'Load data from Zep Vector Store index',\n\t\tdefaults: {\n\t\t\tname: 'Zep: Load',\n\t\t},\n\t\tcodex: {\n\t\t\tcategories: ['AI'],\n\t\t\tsubcategories: {\n\t\t\t\tAI: ['Vector Stores'],\n\t\t\t},\n\t\t\tresources: {\n\t\t\t\tprimaryDocumentation: [\n\t\t\t\t\t{\n\t\t\t\t\t\turl: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstorezep/',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t},\n\t\tcredentials: [\n\t\t\t{\n\t\t\t\tname: 'zepApi',\n\t\t\t\trequired: true,\n\t\t\t},\n\t\t],\n\t\tinputs: [\n\t\t\t{\n\t\t\t\tdisplayName: 'Embedding',\n\t\t\t\tmaxConnections: 1,\n\t\t\t\ttype: NodeConnectionTypes.AiEmbedding,\n\t\t\t\trequired: true,\n\t\t\t},\n\t\t],\n\t\toutputs: [NodeConnectionTypes.AiVectorStore],\n\t\toutputNames: ['Vector Store'],\n\t\tproperties: [\n\t\t\t{\n\t\t\t\tdisplayName: 'Collection Name',\n\t\t\t\tname: 'collectionName',\n\t\t\t\ttype: 'string',\n\t\t\t\tdefault: '',\n\t\t\t\trequired: true,\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Options',\n\t\t\t\tname: 'options',\n\t\t\t\ttype: 'collection',\n\t\t\t\tplaceholder: 'Add Option',\n\t\t\t\tdefault: {},\n\t\t\t\toptions: [\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Embedding Dimensions',\n\t\t\t\t\t\tname: 'embeddingDimensions',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t\tdefault: 1536,\n\t\t\t\t\t\tdescription: 'Whether to allow using characters from the Unicode surrogate blocks',\n\t\t\t\t\t},\n\t\t\t\t\tmetadataFilterField,\n\t\t\t\t],\n\t\t\t},\n\t\t],\n\t};\n\n\tasync supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {\n\t\tthis.logger.debug('Supplying data for Zep Load Vector Store');\n\n\t\tconst collectionName = this.getNodeParameter('collectionName', itemIndex) as string;\n\n\t\tconst options =\n\t\t\t(this.getNodeParameter('options', itemIndex) as {\n\t\t\t\tembeddingDimensions?: number;\n\t\t\t}) || {};\n\n\t\tconst credentials = await this.getCredentials<{\n\t\t\tapiKey?: string;\n\t\t\tapiUrl: string;\n\t\t}>('zepApi');\n\t\tconst embeddings = (await this.getInputConnectionData(\n\t\t\tNodeConnectionTypes.AiEmbedding,\n\t\t\t0,\n\t\t)) as Embeddings;\n\n\t\tconst zepConfig: IZepConfig = {\n\t\t\tapiUrl: credentials.apiUrl,\n\t\t\tapiKey: credentials.apiKey,\n\t\t\tcollectionName,\n\t\t\tembeddingDimensions: options.embeddingDimensions ?? 1536,\n\t\t\tmetadata: getMetadataFiltersValues(this, itemIndex),\n\t\t};\n\n\t\tconst vectorStore = new ZepVectorStore(embeddings, zepConfig);\n\n\t\treturn {\n\t\t\tresponse: logWrapper(vectorStore, this),\n\t\t};\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,iBAA+B;AAE/B,0BAMO;AAEP,qBAAyC;AACzC,wBAA2B;AAC3B,0BAAoC;AAG7B,MAAM,mBAAwC;AAAA,EAA9C;AACN,uBAAoC;AAAA,MACnC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,QAAQ;AAAA;AAAA,MAER,MAAM;AAAA,MACN,OAAO,CAAC,WAAW;AAAA,MACnB,SAAS;AAAA,MACT,aAAa;AAAA,MACb,UAAU;AAAA,QACT,MAAM;AAAA,MACP;AAAA,MACA,OAAO;AAAA,QACN,YAAY,CAAC,IAAI;AAAA,QACjB,eAAe;AAAA,UACd,IAAI,CAAC,eAAe;AAAA,QACrB;AAAA,QACA,WAAW;AAAA,UACV,sBAAsB;AAAA,YACrB;AAAA,cACC,KAAK;AAAA,YACN;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,MACA,aAAa;AAAA,QACZ;AAAA,UACC,MAAM;AAAA,UACN,UAAU;AAAA,QACX;AAAA,MACD;AAAA,MACA,QAAQ;AAAA,QACP;AAAA,UACC,aAAa;AAAA,UACb,gBAAgB;AAAA,UAChB,MAAM,wCAAoB;AAAA,UAC1B,UAAU;AAAA,QACX;AAAA,MACD;AAAA,MACA,SAAS,CAAC,wCAAoB,aAAa;AAAA,MAC3C,aAAa,CAAC,cAAc;AAAA,MAC5B,YAAY;AAAA,QACX;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,UACT,UAAU;AAAA,QACX;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,aAAa;AAAA,UACb,SAAS,CAAC;AAAA,UACV,SAAS;AAAA,YACR;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,YACd;AAAA,YACA;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAAA;AAAA,EAEA,MAAM,WAAuC,WAAwC;AACpF,SAAK,OAAO,MAAM,0CAA0C;AAE5D,UAAM,iBAAiB,KAAK,iBAAiB,kBAAkB,SAAS;AAExE,UAAM,UACJ,KAAK,iBAAiB,WAAW,SAAS,KAErC,CAAC;AAER,UAAM,cAAc,MAAM,KAAK,eAG5B,QAAQ;AACX,UAAM,aAAc,MAAM,KAAK;AAAA,MAC9B,wCAAoB;AAAA,MACpB;AAAA,IACD;AAEA,UAAM,YAAwB;AAAA,MAC7B,QAAQ,YAAY;AAAA,MACpB,QAAQ,YAAY;AAAA,MACpB;AAAA,MACA,qBAAqB,QAAQ,uBAAuB;AAAA,MACpD,cAAU,yCAAyB,MAAM,SAAS;AAAA,IACnD;AAEA,UAAM,cAAc,IAAI,0BAAe,YAAY,SAAS;AAE5D,WAAO;AAAA,MACN,cAAU,8BAAW,aAAa,IAAI;AAAA,IACvC;AAAA,EACD;AACD;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../../nodes/vector_store/shared/MemoryManager/MemoryCalculator.ts"],"sourcesContent":["import type { Document } from '@langchain/core/documents';\nimport type { MemoryVectorStore } from 'langchain/vectorstores/memory';\n\nimport type { IMemoryCalculator } from './types';\n\n// Memory estimation constants\nconst FLOAT_SIZE_BYTES = 8; // Size of a float64 in bytes\nconst CHAR_SIZE_BYTES = 2; // Size of a JavaScript character in bytes(2 bytes per character in UTF-16)\nconst VECTOR_OVERHEAD_BYTES = 200; // Estimated overhead per vector\nconst EMBEDDING_DIMENSIONS = 1536; // Fixed embedding dimensions\nconst EMBEDDING_SIZE_BYTES = EMBEDDING_DIMENSIONS * FLOAT_SIZE_BYTES;\nconst AVG_METADATA_SIZE_BYTES = 100; // Average size for simple metadata\n\n/**\n * Calculates memory usage for vector stores and documents\n */\nexport class MemoryCalculator implements IMemoryCalculator {\n\t/**\n\t * Fast batch size estimation for multiple documents\n\t */\n\testimateBatchSize(documents: Document[]): number {\n\t\tif (documents.length === 0) return 0;\n\n\t\tlet totalContentSize = 0;\n\t\tlet totalMetadataSize = 0;\n\n\t\t// Single pass through documents for content and metadata estimation\n\t\tfor (const doc of documents) {\n\t\t\tif (doc.pageContent) {\n\t\t\t\ttotalContentSize += doc.pageContent.length * CHAR_SIZE_BYTES;\n\t\t\t}\n\n\t\t\t// Metadata size estimation\n\t\t\tif (doc.metadata) {\n\t\t\t\t// For simple objects, estimate based on key count\n\t\t\t\tconst metadataKeys = Object.keys(doc.metadata).length;\n\t\t\t\tif (metadataKeys > 0) {\n\t\t\t\t\t// For each key, estimate the key name plus a typical value\n\t\t\t\t\t// plus some overhead for object structure\n\t\t\t\t\ttotalMetadataSize += metadataKeys * AVG_METADATA_SIZE_BYTES;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\t// Fixed size components (embedding vectors and overhead)\n\t\t// Each embedding is a fixed-size array of floating point numbers\n\t\tconst embeddingSize = documents.length * EMBEDDING_SIZE_BYTES;\n\n\t\t// Object overhead, each vector is stored with additional JS object structure\n\t\tconst overhead = documents.length * VECTOR_OVERHEAD_BYTES;\n\n\t\t// Calculate total batch size with a safety factor to avoid underestimation\n\t\tconst calculatedSize = totalContentSize + totalMetadataSize + embeddingSize + overhead;\n\n\t\treturn Math.ceil(calculatedSize);\n\t}\n\n\t/**\n\t * Calculate the size of a vector store by examining its contents\n\t */\n\tcalculateVectorStoreSize(vectorStore: MemoryVectorStore): number {\n\t\tif (!vectorStore.memoryVectors || vectorStore.memoryVectors.length === 0) {\n\t\t\treturn 0;\n\t\t}\n\n\t\tlet storeSize = 0;\n\n\t\t// Calculate size of each vector\n\t\tfor (const vector of vectorStore.memoryVectors) {\n\t\t\t// Size of embedding (float64 array)\n\t\t\tstoreSize += vector.embedding.length * FLOAT_SIZE_BYTES;\n\n\t\t\t// Size of content string (2 bytes per character in JS)\n\t\t\tstoreSize += vector.content ? vector.content.length * CHAR_SIZE_BYTES : 0;\n\n\t\t\t// Estimate metadata size\n\t\t\tif (vector.metadata) {\n\t\t\t\t// Use a more accurate calculation for metadata\n\t\t\t\tconst metadataStr = JSON.stringify(vector.metadata);\n\t\t\t\tstoreSize += metadataStr.length * CHAR_SIZE_BYTES;\n\t\t\t}\n\n\t\t\t// Add overhead for object structure\n\t\t\tstoreSize += VECTOR_OVERHEAD_BYTES;\n\t\t}\n\n\t\treturn Math.ceil(storeSize);\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAMA,MAAM,mBAAmB;AACzB,MAAM,kBAAkB;AACxB,MAAM,wBAAwB;AAC9B,MAAM,uBAAuB;AAC7B,MAAM,uBAAuB,uBAAuB;AACpD,MAAM,0BAA0B;AAKzB,MAAM,iBAA8C;AAAA;AAAA;AAAA;AAAA,EAI1D,kBAAkB,WAA+B;AAChD,QAAI,UAAU,WAAW,EAAG,QAAO;AAEnC,QAAI,mBAAmB;AACvB,QAAI,oBAAoB;AAGxB,eAAW,OAAO,WAAW;AAC5B,UAAI,IAAI,aAAa;AACpB,4BAAoB,IAAI,YAAY,SAAS;AAAA,MAC9C;AAGA,UAAI,IAAI,UAAU;AAEjB,cAAM,eAAe,OAAO,KAAK,IAAI,QAAQ,EAAE;AAC/C,YAAI,eAAe,GAAG;AAGrB,+BAAqB,eAAe;AAAA,QACrC;AAAA,MACD;AAAA,IACD;AAIA,UAAM,gBAAgB,UAAU,SAAS;AAGzC,UAAM,WAAW,UAAU,SAAS;AAGpC,UAAM,iBAAiB,mBAAmB,oBAAoB,gBAAgB;AAE9E,WAAO,KAAK,KAAK,cAAc;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA,EAKA,yBAAyB,aAAwC;AAChE,QAAI,CAAC,YAAY,iBAAiB,YAAY,cAAc,WAAW,GAAG;AACzE,aAAO;AAAA,IACR;AAEA,QAAI,YAAY;AAGhB,eAAW,UAAU,YAAY,eAAe;AAE/C,mBAAa,OAAO,UAAU,SAAS;AAGvC,mBAAa,OAAO,UAAU,OAAO,QAAQ,SAAS,kBAAkB;AAGxE,UAAI,OAAO,UAAU;AAEpB,cAAM,cAAc,KAAK,UAAU,OAAO,QAAQ;AAClD,qBAAa,YAAY,SAAS;AAAA,MACnC;AAGA,mBAAa;AAAA,IACd;AAEA,WAAO,KAAK,KAAK,SAAS;AAAA,EAC3B;AACD;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../../nodes/vector_store/shared/MemoryManager/MemoryVectorStoreManager.ts"],"sourcesContent":["import type { Document } from '@langchain/core/documents';\nimport type { Embeddings } from '@langchain/core/embeddings';\nimport { MemoryVectorStore } from 'langchain/vectorstores/memory';\nimport type { Logger } from 'n8n-workflow';\n\nimport { getConfig, mbToBytes, hoursToMs } from './config';\nimport { MemoryCalculator } from './MemoryCalculator';\nimport { StoreCleanupService } from './StoreCleanupService';\nimport type { VectorStoreMetadata, VectorStoreStats } from './types';\n\n/**\n * Manages in-memory vector stores with memory limits and auto-cleanup\n */\nexport class MemoryVectorStoreManager {\n\tprivate static instance: MemoryVectorStoreManager | null = null;\n\n\t// Storage\n\tprotected vectorStoreBuffer: Map<string, MemoryVectorStore>;\n\n\tprotected storeMetadata: Map<string, VectorStoreMetadata>;\n\n\tprotected memoryUsageBytes: number = 0;\n\n\t// Dependencies\n\tprotected memoryCalculator: MemoryCalculator;\n\n\tprotected cleanupService: StoreCleanupService;\n\n\tprotected static logger: Logger;\n\n\t// Config values\n\tprotected maxMemorySizeBytes: number;\n\n\tprotected inactiveTtlMs: number;\n\n\t// Inactive TTL cleanup timer\n\tprotected ttlCleanupIntervalId: NodeJS.Timeout | null = null;\n\n\tprotected constructor(\n\t\tprotected embeddings: Embeddings,\n\t\tprotected logger: Logger,\n\t) {\n\t\t// Initialize storage\n\t\tthis.vectorStoreBuffer = new Map();\n\t\tthis.storeMetadata = new Map();\n\t\tthis.logger = logger;\n\n\t\tconst config = getConfig();\n\t\tthis.maxMemorySizeBytes = mbToBytes(config.maxMemoryMB);\n\t\tthis.inactiveTtlMs = hoursToMs(config.ttlHours);\n\n\t\t// Initialize services\n\t\tthis.memoryCalculator = new MemoryCalculator();\n\t\tthis.cleanupService = new StoreCleanupService(\n\t\t\tthis.maxMemorySizeBytes,\n\t\t\tthis.inactiveTtlMs,\n\t\t\tthis.vectorStoreBuffer,\n\t\t\tthis.storeMetadata,\n\t\t\tthis.handleCleanup.bind(this),\n\t\t);\n\n\t\tthis.setupTtlCleanup();\n\t}\n\n\t/**\n\t * Get singleton instance\n\t */\n\tstatic getInstance(embeddings: Embeddings, logger: Logger): MemoryVectorStoreManager {\n\t\tif (!MemoryVectorStoreManager.instance) {\n\t\t\tMemoryVectorStoreManager.instance = new MemoryVectorStoreManager(embeddings, logger);\n\t\t} else {\n\t\t\t// We need to update the embeddings in the existing instance.\n\t\t\t// This is important as embeddings instance is wrapped in a logWrapper,\n\t\t\t// which relies on supplyDataFunctions context which changes on each workflow run\n\t\t\tMemoryVectorStoreManager.instance.embeddings = embeddings;\n\t\t\tMemoryVectorStoreManager.instance.vectorStoreBuffer.forEach((vectorStoreInstance) => {\n\t\t\t\tvectorStoreInstance.embeddings = embeddings;\n\t\t\t});\n\t\t}\n\n\t\treturn MemoryVectorStoreManager.instance;\n\t}\n\n\t/**\n\t * Set up timer for TTL-based cleanup\n\t */\n\tprivate setupTtlCleanup(): void {\n\t\t// Skip setup if TTL is disabled\n\t\tif (this.inactiveTtlMs <= 0) {\n\t\t\treturn;\n\t\t}\n\n\t\t// Cleanup check interval (run every hour)\n\t\tconst CLEANUP_INTERVAL_MS = 60 * 60 * 1000;\n\n\t\t// Clear any existing interval\n\t\tif (this.ttlCleanupIntervalId) {\n\t\t\tclearInterval(this.ttlCleanupIntervalId);\n\t\t}\n\n\t\t// Setup new interval for TTL cleanup\n\t\tthis.ttlCleanupIntervalId = setInterval(() => {\n\t\t\tthis.cleanupService.cleanupInactiveStores();\n\t\t}, CLEANUP_INTERVAL_MS);\n\t}\n\n\t/**\n\t * Handle cleanup events from the cleanup service\n\t */\n\tprivate handleCleanup(removedKeys: string[], freedBytes: number, reason: 'ttl' | 'memory'): void {\n\t\t// Update total memory usage\n\t\tthis.memoryUsageBytes -= freedBytes;\n\n\t\t// Log cleanup event\n\t\tif (reason === 'ttl') {\n\t\t\tconst ttlHours = Math.round(this.inactiveTtlMs / (60 * 60 * 1000));\n\t\t\tthis.logger.info(\n\t\t\t\t`TTL cleanup: removed ${removedKeys.length} inactive vector stores (${ttlHours}h TTL) to free ${Math.round(freedBytes / (1024 * 1024))}MB of memory`,\n\t\t\t);\n\t\t} else {\n\t\t\tthis.logger.info(\n\t\t\t\t`Memory cleanup: removed ${removedKeys.length} oldest vector stores to free ${Math.round(freedBytes / (1024 * 1024))}MB of memory`,\n\t\t\t);\n\t\t}\n\t}\n\n\t/**\n\t * Get or create a vector store by key\n\t */\n\tasync getVectorStore(memoryKey: string): Promise<MemoryVectorStore> {\n\t\tlet vectorStoreInstance = this.vectorStoreBuffer.get(memoryKey);\n\n\t\tif (!vectorStoreInstance) {\n\t\t\tvectorStoreInstance = await MemoryVectorStore.fromExistingIndex(this.embeddings);\n\t\t\tthis.vectorStoreBuffer.set(memoryKey, vectorStoreInstance);\n\n\t\t\tthis.storeMetadata.set(memoryKey, {\n\t\t\t\tsize: 0,\n\t\t\t\tcreatedAt: new Date(),\n\t\t\t\tlastAccessed: new Date(),\n\t\t\t});\n\t\t} else {\n\t\t\tconst metadata = this.storeMetadata.get(memoryKey);\n\t\t\tif (metadata) {\n\t\t\t\tmetadata.lastAccessed = new Date();\n\t\t\t}\n\t\t}\n\n\t\treturn vectorStoreInstance;\n\t}\n\n\t/**\n\t * Reset a store's metadata when it's cleared\n\t */\n\tprotected clearStoreMetadata(memoryKey: string): void {\n\t\tconst metadata = this.storeMetadata.get(memoryKey);\n\t\tif (metadata) {\n\t\t\tthis.memoryUsageBytes -= metadata.size;\n\t\t\tmetadata.size = 0;\n\t\t\tmetadata.lastAccessed = new Date();\n\t\t}\n\t}\n\n\t/**\n\t * Get memory usage in bytes\n\t */\n\tgetMemoryUsage(): number {\n\t\treturn this.memoryUsageBytes;\n\t}\n\n\t/**\n\t * Get memory usage as a formatted string (MB)\n\t */\n\tgetMemoryUsageFormatted(): string {\n\t\treturn `${Math.round(this.memoryUsageBytes / (1024 * 1024))}MB`;\n\t}\n\n\t/**\n\t * Recalculate memory usage from actual vector store contents\n\t * This ensures tracking accuracy for large stores\n\t */\n\trecalculateMemoryUsage(): void {\n\t\tthis.memoryUsageBytes = 0;\n\n\t\t// Recalculate for each store\n\t\tfor (const [key, vectorStore] of this.vectorStoreBuffer.entries()) {\n\t\t\tconst storeSize = this.memoryCalculator.calculateVectorStoreSize(vectorStore);\n\n\t\t\t// Update metadata\n\t\t\tconst metadata = this.storeMetadata.get(key);\n\t\t\tif (metadata) {\n\t\t\t\tmetadata.size = storeSize;\n\t\t\t\tthis.memoryUsageBytes += storeSize;\n\t\t\t}\n\t\t}\n\n\t\tthis.logger.debug(`Recalculated vector store memory: ${this.getMemoryUsageFormatted()}`);\n\t}\n\n\t/**\n\t * Add documents to a vector store\n\t */\n\tasync addDocuments(\n\t\tmemoryKey: string,\n\t\tdocuments: Document[],\n\t\tclearStore?: boolean,\n\t): Promise<void> {\n\t\tif (clearStore) {\n\t\t\tthis.clearStoreMetadata(memoryKey);\n\t\t\tthis.vectorStoreBuffer.delete(memoryKey);\n\t\t}\n\n\t\t// Fast batch estimation instead of per-document calculation\n\t\tconst estimatedAddedSize = this.memoryCalculator.estimateBatchSize(documents);\n\n\t\t// Clean up old stores if necessary\n\t\tthis.cleanupService.cleanupOldestStores(estimatedAddedSize);\n\n\t\tconst vectorStoreInstance = await this.getVectorStore(memoryKey);\n\n\t\t// Get vector count before adding documents\n\t\tconst vectorCountBefore = vectorStoreInstance.memoryVectors?.length || 0;\n\n\t\tawait vectorStoreInstance.addDocuments(documents);\n\n\t\t// Update store metadata and memory tracking\n\t\tconst metadata = this.storeMetadata.get(memoryKey);\n\t\tif (metadata) {\n\t\t\tmetadata.size += estimatedAddedSize;\n\t\t\tmetadata.lastAccessed = new Date();\n\t\t\tthis.memoryUsageBytes += estimatedAddedSize;\n\t\t}\n\n\t\t// Get updated vector count\n\t\tconst vectorCount = vectorStoreInstance.memoryVectors?.length || 0;\n\n\t\t// Periodically recalculate actual memory usage to avoid drift\n\t\tif (\n\t\t\t(vectorCount > 0 && vectorCount % 100 === 0) ||\n\t\t\tdocuments.length > 20 ||\n\t\t\t(vectorCountBefore === 0 && vectorCount > 0)\n\t\t) {\n\t\t\tthis.recalculateMemoryUsage();\n\t\t}\n\n\t\t// Logging memory usage\n\t\tconst maxMemoryMB =\n\t\t\tthis.maxMemorySizeBytes > 0\n\t\t\t\t? (this.maxMemorySizeBytes / (1024 * 1024)).toFixed(0)\n\t\t\t\t: 'unlimited';\n\n\t\tthis.logger.debug(\n\t\t\t`Vector store memory: ${this.getMemoryUsageFormatted()}/${maxMemoryMB}MB (${vectorCount} vectors in ${this.vectorStoreBuffer.size} stores)`,\n\t\t);\n\t}\n\n\t/**\n\t * Get statistics about the vector store memory usage\n\t */\n\tgetStats(): VectorStoreStats {\n\t\tconst now = Date.now();\n\t\tlet inactiveStoreCount = 0;\n\n\t\t// Always recalculate when getting stats to ensure accuracy\n\t\tthis.recalculateMemoryUsage();\n\n\t\tconst stats: VectorStoreStats = {\n\t\t\ttotalSizeBytes: this.memoryUsageBytes,\n\t\t\ttotalSizeMB: Math.round((this.memoryUsageBytes / (1024 * 1024)) * 100) / 100,\n\t\t\tpercentOfLimit:\n\t\t\t\tthis.maxMemorySizeBytes > 0\n\t\t\t\t\t? Math.round((this.memoryUsageBytes / this.maxMemorySizeBytes) * 100)\n\t\t\t\t\t: 0,\n\t\t\tmaxMemoryMB: this.maxMemorySizeBytes > 0 ? this.maxMemorySizeBytes / (1024 * 1024) : -1, // -1 indicates unlimited\n\t\t\tstoreCount: this.vectorStoreBuffer.size,\n\t\t\tinactiveStoreCount: 0,\n\t\t\tttlHours: this.inactiveTtlMs > 0 ? this.inactiveTtlMs / (60 * 60 * 1000) : -1, // -1 indicates disabled\n\t\t\tstores: {},\n\t\t};\n\n\t\t// Add stats for each store\n\t\tfor (const [key, metadata] of this.storeMetadata.entries()) {\n\t\t\tconst store = this.vectorStoreBuffer.get(key);\n\n\t\t\tif (store) {\n\t\t\t\tconst lastAccessedTime = metadata.lastAccessed.getTime();\n\t\t\t\tconst inactiveTimeMs = now - lastAccessedTime;\n\t\t\t\tconst isInactive = this.cleanupService.isStoreInactive(metadata);\n\n\t\t\t\tif (isInactive) {\n\t\t\t\t\tinactiveStoreCount++;\n\t\t\t\t}\n\n\t\t\t\tstats.stores[key] = {\n\t\t\t\t\tsizeBytes: metadata.size,\n\t\t\t\t\tsizeMB: Math.round((metadata.size / (1024 * 1024)) * 100) / 100,\n\t\t\t\t\tpercentOfTotal: Math.round((metadata.size / this.memoryUsageBytes) * 100) || 0,\n\t\t\t\t\tvectors: store.memoryVectors?.length || 0,\n\t\t\t\t\tcreatedAt: metadata.createdAt.toISOString(),\n\t\t\t\t\tlastAccessed: metadata.lastAccessed.toISOString(),\n\t\t\t\t\tinactive: isInactive,\n\t\t\t\t\tinactiveForHours: Math.round(inactiveTimeMs / (60 * 60 * 1000)),\n\t\t\t\t};\n\t\t\t}\n\t\t}\n\n\t\tstats.inactiveStoreCount = inactiveStoreCount;\n\n\t\treturn stats;\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,oBAAkC;AAGlC,oBAAgD;AAChD,8BAAiC;AACjC,iCAAoC;AAM7B,MAAM,4BAAN,MAAM,0BAAyB;AAAA,EAyB3B,YACC,YACA,QACT;AAFS;AACA;AAnBX,SAAU,mBAA2B;AAerC;AAAA,SAAU,uBAA8C;AAOvD,SAAK,oBAAoB,oBAAI,IAAI;AACjC,SAAK,gBAAgB,oBAAI,IAAI;AAC7B,SAAK,SAAS;AAEd,UAAM,aAAS,yBAAU;AACzB,SAAK,yBAAqB,yBAAU,OAAO,WAAW;AACtD,SAAK,oBAAgB,yBAAU,OAAO,QAAQ;AAG9C,SAAK,mBAAmB,IAAI,yCAAiB;AAC7C,SAAK,iBAAiB,IAAI;AAAA,MACzB,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK,cAAc,KAAK,IAAI;AAAA,IAC7B;AAEA,SAAK,gBAAgB;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,YAAY,YAAwB,QAA0C;AACpF,QAAI,CAAC,0BAAyB,UAAU;AACvC,gCAAyB,WAAW,IAAI,0BAAyB,YAAY,MAAM;AAAA,IACpF,OAAO;AAIN,gCAAyB,SAAS,aAAa;AAC/C,gCAAyB,SAAS,kBAAkB,QAAQ,CAAC,wBAAwB;AACpF,4BAAoB,aAAa;AAAA,MAClC,CAAC;AAAA,IACF;AAEA,WAAO,0BAAyB;AAAA,EACjC;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAwB;AAE/B,QAAI,KAAK,iBAAiB,GAAG;AAC5B;AAAA,IACD;AAGA,UAAM,sBAAsB,KAAK,KAAK;AAGtC,QAAI,KAAK,sBAAsB;AAC9B,oBAAc,KAAK,oBAAoB;AAAA,IACxC;AAGA,SAAK,uBAAuB,YAAY,MAAM;AAC7C,WAAK,eAAe,sBAAsB;AAAA,IAC3C,GAAG,mBAAmB;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA,EAKQ,cAAc,aAAuB,YAAoB,QAAgC;AAEhG,SAAK,oBAAoB;AAGzB,QAAI,WAAW,OAAO;AACrB,YAAM,WAAW,KAAK,MAAM,KAAK,iBAAiB,KAAK,KAAK,IAAK;AACjE,WAAK,OAAO;AAAA,QACX,wBAAwB,YAAY,MAAM,4BAA4B,QAAQ,kBAAkB,KAAK,MAAM,cAAc,OAAO,KAAK,CAAC;AAAA,MACvI;AAAA,IACD,OAAO;AACN,WAAK,OAAO;AAAA,QACX,2BAA2B,YAAY,MAAM,iCAAiC,KAAK,MAAM,cAAc,OAAO,KAAK,CAAC;AAAA,MACrH;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,eAAe,WAA+C;AACnE,QAAI,sBAAsB,KAAK,kBAAkB,IAAI,SAAS;AAE9D,QAAI,CAAC,qBAAqB;AACzB,4BAAsB,MAAM,gCAAkB,kBAAkB,KAAK,UAAU;AAC/E,WAAK,kBAAkB,IAAI,WAAW,mBAAmB;AAEzD,WAAK,cAAc,IAAI,WAAW;AAAA,QACjC,MAAM;AAAA,QACN,WAAW,oBAAI,KAAK;AAAA,QACpB,cAAc,oBAAI,KAAK;AAAA,MACxB,CAAC;AAAA,IACF,OAAO;AACN,YAAM,WAAW,KAAK,cAAc,IAAI,SAAS;AACjD,UAAI,UAAU;AACb,iBAAS,eAAe,oBAAI,KAAK;AAAA,MAClC;AAAA,IACD;AAEA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA,EAKU,mBAAmB,WAAyB;AACrD,UAAM,WAAW,KAAK,cAAc,IAAI,SAAS;AACjD,QAAI,UAAU;AACb,WAAK,oBAAoB,SAAS;AAClC,eAAS,OAAO;AAChB,eAAS,eAAe,oBAAI,KAAK;AAAA,IAClC;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAyB;AACxB,WAAO,KAAK;AAAA,EACb;AAAA;AAAA;AAAA;AAAA,EAKA,0BAAkC;AACjC,WAAO,GAAG,KAAK,MAAM,KAAK,oBAAoB,OAAO,KAAK,CAAC;AAAA,EAC5D;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,yBAA+B;AAC9B,SAAK,mBAAmB;AAGxB,eAAW,CAAC,KAAK,WAAW,KAAK,KAAK,kBAAkB,QAAQ,GAAG;AAClE,YAAM,YAAY,KAAK,iBAAiB,yBAAyB,WAAW;AAG5E,YAAM,WAAW,KAAK,cAAc,IAAI,GAAG;AAC3C,UAAI,UAAU;AACb,iBAAS,OAAO;AAChB,aAAK,oBAAoB;AAAA,MAC1B;AAAA,IACD;AAEA,SAAK,OAAO,MAAM,qCAAqC,KAAK,wBAAwB,CAAC,EAAE;AAAA,EACxF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aACL,WACA,WACA,YACgB;AAChB,QAAI,YAAY;AACf,WAAK,mBAAmB,SAAS;AACjC,WAAK,kBAAkB,OAAO,SAAS;AAAA,IACxC;AAGA,UAAM,qBAAqB,KAAK,iBAAiB,kBAAkB,SAAS;AAG5E,SAAK,eAAe,oBAAoB,kBAAkB;AAE1D,UAAM,sBAAsB,MAAM,KAAK,eAAe,SAAS;AAG/D,UAAM,oBAAoB,oBAAoB,eAAe,UAAU;AAEvE,UAAM,oBAAoB,aAAa,SAAS;AAGhD,UAAM,WAAW,KAAK,cAAc,IAAI,SAAS;AACjD,QAAI,UAAU;AACb,eAAS,QAAQ;AACjB,eAAS,eAAe,oBAAI,KAAK;AACjC,WAAK,oBAAoB;AAAA,IAC1B;AAGA,UAAM,cAAc,oBAAoB,eAAe,UAAU;AAGjE,QACE,cAAc,KAAK,cAAc,QAAQ,KAC1C,UAAU,SAAS,MAClB,sBAAsB,KAAK,cAAc,GACzC;AACD,WAAK,uBAAuB;AAAA,IAC7B;AAGA,UAAM,cACL,KAAK,qBAAqB,KACtB,KAAK,sBAAsB,OAAO,OAAO,QAAQ,CAAC,IACnD;AAEJ,SAAK,OAAO;AAAA,MACX,wBAAwB,KAAK,wBAAwB,CAAC,IAAI,WAAW,OAAO,WAAW,eAAe,KAAK,kBAAkB,IAAI;AAAA,IAClI;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,WAA6B;AAC5B,UAAM,MAAM,KAAK,IAAI;AACrB,QAAI,qBAAqB;AAGzB,SAAK,uBAAuB;AAE5B,UAAM,QAA0B;AAAA,MAC/B,gBAAgB,KAAK;AAAA,MACrB,aAAa,KAAK,MAAO,KAAK,oBAAoB,OAAO,QAAS,GAAG,IAAI;AAAA,MACzE,gBACC,KAAK,qBAAqB,IACvB,KAAK,MAAO,KAAK,mBAAmB,KAAK,qBAAsB,GAAG,IAClE;AAAA,MACJ,aAAa,KAAK,qBAAqB,IAAI,KAAK,sBAAsB,OAAO,QAAQ;AAAA;AAAA,MACrF,YAAY,KAAK,kBAAkB;AAAA,MACnC,oBAAoB;AAAA,MACpB,UAAU,KAAK,gBAAgB,IAAI,KAAK,iBAAiB,KAAK,KAAK,OAAQ;AAAA;AAAA,MAC3E,QAAQ,CAAC;AAAA,IACV;AAGA,eAAW,CAAC,KAAK,QAAQ,KAAK,KAAK,cAAc,QAAQ,GAAG;AAC3D,YAAM,QAAQ,KAAK,kBAAkB,IAAI,GAAG;AAE5C,UAAI,OAAO;AACV,cAAM,mBAAmB,SAAS,aAAa,QAAQ;AACvD,cAAM,iBAAiB,MAAM;AAC7B,cAAM,aAAa,KAAK,eAAe,gBAAgB,QAAQ;AAE/D,YAAI,YAAY;AACf;AAAA,QACD;AAEA,cAAM,OAAO,GAAG,IAAI;AAAA,UACnB,WAAW,SAAS;AAAA,UACpB,QAAQ,KAAK,MAAO,SAAS,QAAQ,OAAO,QAAS,GAAG,IAAI;AAAA,UAC5D,gBAAgB,KAAK,MAAO,SAAS,OAAO,KAAK,mBAAoB,GAAG,KAAK;AAAA,UAC7E,SAAS,MAAM,eAAe,UAAU;AAAA,UACxC,WAAW,SAAS,UAAU,YAAY;AAAA,UAC1C,cAAc,SAAS,aAAa,YAAY;AAAA,UAChD,UAAU;AAAA,UACV,kBAAkB,KAAK,MAAM,kBAAkB,KAAK,KAAK,IAAK;AAAA,QAC/D;AAAA,MACD;AAAA,IACD;AAEA,UAAM,qBAAqB;AAE3B,WAAO;AAAA,EACR;AACD;AAzSa,0BACG,WAA4C;AADrD,IAAM,2BAAN;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../../nodes/vector_store/shared/MemoryManager/StoreCleanupService.ts"],"sourcesContent":["import type { MemoryVectorStore } from 'langchain/vectorstores/memory';\n\nimport type { VectorStoreMetadata, IStoreCleanupService } from './types';\n\n/**\n * Service for cleaning up vector stores based on inactivity or memory pressure\n */\nexport class StoreCleanupService implements IStoreCleanupService {\n\t// Cache for oldest stores sorted by creation time\n\tprivate oldestStoreKeys: string[] = [];\n\n\tprivate lastSortTime = 0;\n\n\tprivate readonly CACHE_TTL_MS = 5000; // 5 seconds\n\n\tconstructor(\n\t\tprivate readonly maxMemorySizeBytes: number,\n\t\tprivate readonly inactiveTtlMs: number,\n\t\tprivate readonly vectorStores: Map<string, MemoryVectorStore>,\n\t\tprivate readonly storeMetadata: Map<string, VectorStoreMetadata>,\n\t\tprivate readonly onCleanup: (\n\t\t\tremovedKeys: string[],\n\t\t\tfreedBytes: number,\n\t\t\treason: 'ttl' | 'memory',\n\t\t) => void,\n\t) {}\n\n\t/**\n\t * Check if a store has been inactive for longer than the TTL\n\t */\n\tisStoreInactive(metadata: VectorStoreMetadata): boolean {\n\t\t// If TTL is disabled, nothing is considered inactive\n\t\tif (this.inactiveTtlMs <= 0) {\n\t\t\treturn false;\n\t\t}\n\n\t\tconst now = Date.now();\n\t\tconst lastAccessedTime = metadata.lastAccessed.getTime();\n\t\treturn now - lastAccessedTime > this.inactiveTtlMs;\n\t}\n\n\t/**\n\t * Remove vector stores that haven't been accessed for longer than TTL\n\t */\n\tcleanupInactiveStores(): void {\n\t\t// Skip if TTL is disabled\n\t\tif (this.inactiveTtlMs <= 0) {\n\t\t\treturn;\n\t\t}\n\n\t\tlet freedBytes = 0;\n\t\tconst removedStores: string[] = [];\n\n\t\t// Find and remove inactive stores\n\t\tfor (const [key, metadata] of this.storeMetadata.entries()) {\n\t\t\tif (this.isStoreInactive(metadata)) {\n\t\t\t\t// Remove this inactive store\n\t\t\t\tthis.vectorStores.delete(key);\n\t\t\t\tfreedBytes += metadata.size;\n\t\t\t\tremovedStores.push(key);\n\t\t\t}\n\t\t}\n\n\t\t// Remove from metadata after iteration to avoid concurrent modification\n\t\tfor (const key of removedStores) {\n\t\t\tthis.storeMetadata.delete(key);\n\t\t}\n\n\t\t// Invalidate cache if we removed any stores\n\t\tif (removedStores.length > 0) {\n\t\t\tthis.oldestStoreKeys = [];\n\t\t\tthis.onCleanup(removedStores, freedBytes, 'ttl');\n\t\t}\n\t}\n\n\t/**\n\t * Remove the oldest vector stores to free up memory\n\t */\n\tcleanupOldestStores(requiredBytes: number): void {\n\t\t// Skip if memory limit is disabled\n\t\tif (this.maxMemorySizeBytes <= 0) {\n\t\t\treturn;\n\t\t}\n\n\t\t// Calculate current total memory usage\n\t\tlet currentMemoryUsage = 0;\n\t\tfor (const metadata of this.storeMetadata.values()) {\n\t\t\tcurrentMemoryUsage += metadata.size;\n\t\t}\n\n\t\t// First, try to clean up inactive stores\n\t\tthis.cleanupInactiveStores();\n\n\t\t// Recalculate memory usage after inactive cleanup\n\t\tcurrentMemoryUsage = 0;\n\t\tfor (const metadata of this.storeMetadata.values()) {\n\t\t\tcurrentMemoryUsage += metadata.size;\n\t\t}\n\n\t\t// If no more cleanup needed, return early\n\t\tif (currentMemoryUsage + requiredBytes <= this.maxMemorySizeBytes) {\n\t\t\treturn;\n\t\t}\n\n\t\tconst now = Date.now();\n\n\t\t// Reuse cached ordering if available and not stale\n\t\tif (this.oldestStoreKeys.length === 0 || now - this.lastSortTime > this.CACHE_TTL_MS) {\n\t\t\t// Collect and sort store keys by age\n\t\t\tconst stores: Array<[string, number]> = [];\n\n\t\t\tfor (const [key, metadata] of this.storeMetadata.entries()) {\n\t\t\t\tstores.push([key, metadata.createdAt.getTime()]);\n\t\t\t}\n\n\t\t\t// Sort by creation time (oldest first)\n\t\t\tstores.sort((a, b) => a[1] - b[1]);\n\n\t\t\t// Extract just the keys\n\t\t\tthis.oldestStoreKeys = stores.map(([key]) => key);\n\t\t\tthis.lastSortTime = now;\n\t\t}\n\n\t\tlet freedBytes = 0;\n\t\tconst removedStores: string[] = [];\n\n\t\t// Remove stores in order until we have enough space\n\t\tfor (const key of this.oldestStoreKeys) {\n\t\t\t// Skip if store no longer exists\n\t\t\tif (!this.storeMetadata.has(key)) continue;\n\n\t\t\t// Stop if we've freed enough space\n\t\t\tif (currentMemoryUsage - freedBytes + requiredBytes <= this.maxMemorySizeBytes) {\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tconst metadata = this.storeMetadata.get(key);\n\t\t\tif (metadata) {\n\t\t\t\tthis.vectorStores.delete(key);\n\t\t\t\tfreedBytes += metadata.size;\n\t\t\t\tremovedStores.push(key);\n\t\t\t}\n\t\t}\n\n\t\t// Remove from metadata after iteration to avoid concurrent modification\n\t\tfor (const key of removedStores) {\n\t\t\tthis.storeMetadata.delete(key);\n\t\t}\n\n\t\t// Update our cache if we removed stores\n\t\tif (removedStores.length > 0) {\n\t\t\t// Filter out removed stores from cached keys\n\t\t\tthis.oldestStoreKeys = this.oldestStoreKeys.filter((key) => !removedStores.includes(key));\n\t\t\tthis.onCleanup(removedStores, freedBytes, 'memory');\n\t\t}\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAOO,MAAM,oBAAoD;AAAA;AAAA,EAQhE,YACkB,oBACA,eACA,cACA,eACA,WAKhB;AATgB;AACA;AACA;AACA;AACA;AAXlB;AAAA,SAAQ,kBAA4B,CAAC;AAErC,SAAQ,eAAe;AAEvB,SAAiB,eAAe;AAAA,EAY7B;AAAA;AAAA;AAAA;AAAA,EAKH,gBAAgB,UAAwC;AAEvD,QAAI,KAAK,iBAAiB,GAAG;AAC5B,aAAO;AAAA,IACR;AAEA,UAAM,MAAM,KAAK,IAAI;AACrB,UAAM,mBAAmB,SAAS,aAAa,QAAQ;AACvD,WAAO,MAAM,mBAAmB,KAAK;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA,EAKA,wBAA8B;AAE7B,QAAI,KAAK,iBAAiB,GAAG;AAC5B;AAAA,IACD;AAEA,QAAI,aAAa;AACjB,UAAM,gBAA0B,CAAC;AAGjC,eAAW,CAAC,KAAK,QAAQ,KAAK,KAAK,cAAc,QAAQ,GAAG;AAC3D,UAAI,KAAK,gBAAgB,QAAQ,GAAG;AAEnC,aAAK,aAAa,OAAO,GAAG;AAC5B,sBAAc,SAAS;AACvB,sBAAc,KAAK,GAAG;AAAA,MACvB;AAAA,IACD;AAGA,eAAW,OAAO,eAAe;AAChC,WAAK,cAAc,OAAO,GAAG;AAAA,IAC9B;AAGA,QAAI,cAAc,SAAS,GAAG;AAC7B,WAAK,kBAAkB,CAAC;AACxB,WAAK,UAAU,eAAe,YAAY,KAAK;AAAA,IAChD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,oBAAoB,eAA6B;AAEhD,QAAI,KAAK,sBAAsB,GAAG;AACjC;AAAA,IACD;AAGA,QAAI,qBAAqB;AACzB,eAAW,YAAY,KAAK,cAAc,OAAO,GAAG;AACnD,4BAAsB,SAAS;AAAA,IAChC;AAGA,SAAK,sBAAsB;AAG3B,yBAAqB;AACrB,eAAW,YAAY,KAAK,cAAc,OAAO,GAAG;AACnD,4BAAsB,SAAS;AAAA,IAChC;AAGA,QAAI,qBAAqB,iBAAiB,KAAK,oBAAoB;AAClE;AAAA,IACD;AAEA,UAAM,MAAM,KAAK,IAAI;AAGrB,QAAI,KAAK,gBAAgB,WAAW,KAAK,MAAM,KAAK,eAAe,KAAK,cAAc;AAErF,YAAM,SAAkC,CAAC;AAEzC,iBAAW,CAAC,KAAK,QAAQ,KAAK,KAAK,cAAc,QAAQ,GAAG;AAC3D,eAAO,KAAK,CAAC,KAAK,SAAS,UAAU,QAAQ,CAAC,CAAC;AAAA,MAChD;AAGA,aAAO,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC;AAGjC,WAAK,kBAAkB,OAAO,IAAI,CAAC,CAAC,GAAG,MAAM,GAAG;AAChD,WAAK,eAAe;AAAA,IACrB;AAEA,QAAI,aAAa;AACjB,UAAM,gBAA0B,CAAC;AAGjC,eAAW,OAAO,KAAK,iBAAiB;AAEvC,UAAI,CAAC,KAAK,cAAc,IAAI,GAAG,EAAG;AAGlC,UAAI,qBAAqB,aAAa,iBAAiB,KAAK,oBAAoB;AAC/E;AAAA,MACD;AAEA,YAAM,WAAW,KAAK,cAAc,IAAI,GAAG;AAC3C,UAAI,UAAU;AACb,aAAK,aAAa,OAAO,GAAG;AAC5B,sBAAc,SAAS;AACvB,sBAAc,KAAK,GAAG;AAAA,MACvB;AAAA,IACD;AAGA,eAAW,OAAO,eAAe;AAChC,WAAK,cAAc,OAAO,GAAG;AAAA,IAC9B;AAGA,QAAI,cAAc,SAAS,GAAG;AAE7B,WAAK,kBAAkB,KAAK,gBAAgB,OAAO,CAAC,QAAQ,CAAC,cAAc,SAAS,GAAG,CAAC;AACxF,WAAK,UAAU,eAAe,YAAY,QAAQ;AAAA,IACnD;AAAA,EACD;AACD;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../../nodes/vector_store/shared/MemoryManager/config.ts"],"sourcesContent":["import type { MemoryVectorStoreConfig } from './types';\n\n// Defaults\nconst DEFAULT_MAX_MEMORY_MB = -1;\nconst DEFAULT_INACTIVE_TTL_HOURS = -1;\n\n/**\n * Helper function to get the configuration from environment variables\n */\nexport function getConfig(): MemoryVectorStoreConfig {\n\t// Get memory limit from env var or use default\n\tlet maxMemoryMB = DEFAULT_MAX_MEMORY_MB;\n\tif (process.env.N8N_VECTOR_STORE_MAX_MEMORY) {\n\t\tconst parsed = parseInt(process.env.N8N_VECTOR_STORE_MAX_MEMORY, 10);\n\t\tif (!isNaN(parsed)) {\n\t\t\tmaxMemoryMB = parsed;\n\t\t}\n\t}\n\n\t// Get TTL from env var or use default\n\tlet ttlHours = DEFAULT_INACTIVE_TTL_HOURS;\n\tif (process.env.N8N_VECTOR_STORE_TTL_HOURS) {\n\t\tconst parsed = parseInt(process.env.N8N_VECTOR_STORE_TTL_HOURS, 10);\n\t\tif (!isNaN(parsed)) {\n\t\t\tttlHours = parsed;\n\t\t}\n\t}\n\n\treturn {\n\t\tmaxMemoryMB,\n\t\tttlHours,\n\t};\n}\n\n/**\n * Convert memory size from MB to bytes\n */\nexport function mbToBytes(mb: number): number {\n\t// -1 - \"unlimited\"\n\tif (mb <= 0) return -1;\n\treturn mb * 1024 * 1024;\n}\n\n/**\n * Convert TTL from hours to milliseconds\n */\nexport function hoursToMs(hours: number): number {\n\t// -1 - \"disabled\"\n\tif (hours <= 0) return -1;\n\treturn hours * 60 * 60 * 1000;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAGA,MAAM,wBAAwB;AAC9B,MAAM,6BAA6B;AAK5B,SAAS,YAAqC;AAEpD,MAAI,cAAc;AAClB,MAAI,QAAQ,IAAI,6BAA6B;AAC5C,UAAM,SAAS,SAAS,QAAQ,IAAI,6BAA6B,EAAE;AACnE,QAAI,CAAC,MAAM,MAAM,GAAG;AACnB,oBAAc;AAAA,IACf;AAAA,EACD;AAGA,MAAI,WAAW;AACf,MAAI,QAAQ,IAAI,4BAA4B;AAC3C,UAAM,SAAS,SAAS,QAAQ,IAAI,4BAA4B,EAAE;AAClE,QAAI,CAAC,MAAM,MAAM,GAAG;AACnB,iBAAW;AAAA,IACZ;AAAA,EACD;AAEA,SAAO;AAAA,IACN;AAAA,IACA;AAAA,EACD;AACD;AAKO,SAAS,UAAU,IAAoB;AAE7C,MAAI,MAAM,EAAG,QAAO;AACpB,SAAO,KAAK,OAAO;AACpB;AAKO,SAAS,UAAU,OAAuB;AAEhD,MAAI,SAAS,EAAG,QAAO;AACvB,SAAO,QAAQ,KAAK,KAAK;AAC1B;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../../nodes/vector_store/shared/MemoryManager/types.ts"],"sourcesContent":["import type { Document } from '@langchain/core/documents';\nimport type { MemoryVectorStore } from 'langchain/vectorstores/memory';\n\n/**\n * Configuration options for the memory vector store\n */\nexport interface MemoryVectorStoreConfig {\n\t/**\n\t * Maximum memory size in MB, -1 to disable\n\t */\n\tmaxMemoryMB: number;\n\n\t/**\n\t * TTL for inactive stores in hours, -1 to disable\n\t */\n\tttlHours: number;\n}\n\n/**\n * Vector store metadata for tracking usage\n */\nexport interface VectorStoreMetadata {\n\tsize: number;\n\tcreatedAt: Date;\n\tlastAccessed: Date;\n}\n\n/**\n * Per-store statistics for reporting\n */\nexport interface StoreStats {\n\tsizeBytes: number;\n\tsizeMB: number;\n\tpercentOfTotal: number;\n\tvectors: number;\n\tcreatedAt: string;\n\tlastAccessed: string;\n\tinactive?: boolean;\n\tinactiveForHours?: number;\n}\n\n/**\n * Overall vector store statistics\n */\nexport interface VectorStoreStats {\n\ttotalSizeBytes: number;\n\ttotalSizeMB: number;\n\tpercentOfLimit: number;\n\tmaxMemoryMB: number;\n\tstoreCount: number;\n\tinactiveStoreCount: number;\n\tttlHours: number;\n\tstores: Record<string, StoreStats>;\n}\n\n/**\n * Service for calculating memory usage\n */\nexport interface IMemoryCalculator {\n\testimateBatchSize(documents: Document[]): number;\n\tcalculateVectorStoreSize(vectorStore: MemoryVectorStore): number;\n}\n\n/**\n * Service for cleaning up vector stores\n */\nexport interface IStoreCleanupService {\n\tcleanupInactiveStores(): void;\n\tcleanupOldestStores(requiredBytes: number): void;\n}\n"],"mappings":";;;;;;;;;;;;;;AAAA;AAAA;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../../nodes/vector_store/shared/createVectorStoreNode/constants.ts"],"sourcesContent":["import { NodeConnectionTypes } from 'n8n-workflow';\nimport type { INodePropertyOptions } from 'n8n-workflow';\n\nimport type { NodeOperationMode } from './types';\n\nexport const DEFAULT_OPERATION_MODES: NodeOperationMode[] = [\n\t'load',\n\t'insert',\n\t'retrieve',\n\t'retrieve-as-tool',\n];\n\nexport const OPERATION_MODE_DESCRIPTIONS: INodePropertyOptions[] = [\n\t{\n\t\tname: 'Get Many',\n\t\tvalue: 'load',\n\t\tdescription: 'Get many ranked documents from vector store for query',\n\t\taction: 'Get ranked documents from vector store',\n\t},\n\t{\n\t\tname: 'Insert Documents',\n\t\tvalue: 'insert',\n\t\tdescription: 'Insert documents into vector store',\n\t\taction: 'Add documents to vector store',\n\t},\n\t{\n\t\tname: 'Retrieve Documents (As Vector Store for Chain/Tool)',\n\t\tvalue: 'retrieve',\n\t\tdescription: 'Retrieve documents from vector store to be used as vector store with AI nodes',\n\t\taction: 'Retrieve documents for Chain/Tool as Vector Store',\n\t\toutputConnectionType: NodeConnectionTypes.AiVectorStore,\n\t},\n\t{\n\t\tname: 'Retrieve Documents (As Tool for AI Agent)',\n\t\tvalue: 'retrieve-as-tool',\n\t\tdescription: 'Retrieve documents from vector store to be used as tool with AI nodes',\n\t\taction: 'Retrieve documents for AI Agent as Tool',\n\t\toutputConnectionType: NodeConnectionTypes.AiTool,\n\t},\n\t{\n\t\tname: 'Update Documents',\n\t\tvalue: 'update',\n\t\tdescription: 'Update documents in vector store by ID',\n\t\taction: 'Update vector store documents',\n\t},\n];\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0BAAoC;AAK7B,MAAM,0BAA+C;AAAA,EAC3D;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACD;AAEO,MAAM,8BAAsD;AAAA,EAClE;AAAA,IACC,MAAM;AAAA,IACN,OAAO;AAAA,IACP,aAAa;AAAA,IACb,QAAQ;AAAA,EACT;AAAA,EACA;AAAA,IACC,MAAM;AAAA,IACN,OAAO;AAAA,IACP,aAAa;AAAA,IACb,QAAQ;AAAA,EACT;AAAA,EACA;AAAA,IACC,MAAM;AAAA,IACN,OAAO;AAAA,IACP,aAAa;AAAA,IACb,QAAQ;AAAA,IACR,sBAAsB,wCAAoB;AAAA,EAC3C;AAAA,EACA;AAAA,IACC,MAAM;AAAA,IACN,OAAO;AAAA,IACP,aAAa;AAAA,IACb,QAAQ;AAAA,IACR,sBAAsB,wCAAoB;AAAA,EAC3C;AAAA,EACA;AAAA,IACC,MAAM;AAAA,IACN,OAAO;AAAA,IACP,aAAa;AAAA,IACb,QAAQ;AAAA,EACT;AACD;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../../nodes/vector_store/shared/createVectorStoreNode/createVectorStoreNode.ts"],"sourcesContent":["/* eslint-disable n8n-nodes-base/node-filename-against-convention */\n/* eslint-disable n8n-nodes-base/node-dirname-against-convention */\nimport type { Embeddings } from '@langchain/core/embeddings';\nimport type { VectorStore } from '@langchain/core/vectorstores';\nimport { NodeConnectionTypes, NodeOperationError } from 'n8n-workflow';\nimport type {\n\tIExecuteFunctions,\n\tINodeExecutionData,\n\tINodeTypeDescription,\n\tSupplyData,\n\tISupplyDataFunctions,\n\tINodeType,\n} from 'n8n-workflow';\n\nimport { getConnectionHintNoticeField } from '@utils/sharedFields';\n\n// Import custom types\nimport {\n\thandleLoadOperation,\n\thandleInsertOperation,\n\thandleUpdateOperation,\n\thandleRetrieveOperation,\n\thandleRetrieveAsToolOperation,\n} from './operations';\nimport type { NodeOperationMode, VectorStoreNodeConstructorArgs } from './types';\n// Import utility functions\nimport { transformDescriptionForOperationMode, getOperationModeOptions } from './utils';\n\n// Import operation handlers\n\n/**\n * Creates a vector store node with the given configuration\n * This factory function produces a complete node class that implements all vector store operations\n */\nexport const createVectorStoreNode = <T extends VectorStore = VectorStore>(\n\targs: VectorStoreNodeConstructorArgs<T>,\n) =>\n\tclass VectorStoreNodeType implements INodeType {\n\t\tdescription: INodeTypeDescription = {\n\t\t\tdisplayName: args.meta.displayName,\n\t\t\tname: args.meta.name,\n\t\t\tdescription: args.meta.description,\n\t\t\ticon: args.meta.icon,\n\t\t\ticonColor: args.meta.iconColor,\n\t\t\tgroup: ['transform'],\n\t\t\tversion: [1, 1.1],\n\t\t\tdefaults: {\n\t\t\t\tname: args.meta.displayName,\n\t\t\t},\n\t\t\tcodex: {\n\t\t\t\tcategories: ['AI'],\n\t\t\t\tsubcategories: {\n\t\t\t\t\tAI: ['Vector Stores', 'Tools', 'Root Nodes'],\n\t\t\t\t\tTools: ['Other Tools'],\n\t\t\t\t},\n\t\t\t\tresources: {\n\t\t\t\t\tprimaryDocumentation: [\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\turl: args.meta.docsUrl,\n\t\t\t\t\t\t},\n\t\t\t\t\t],\n\t\t\t\t},\n\t\t\t},\n\t\t\tcredentials: args.meta.credentials,\n\t\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node\n\t\t\tinputs: `={{\n\t\t\t((parameters) => {\n\t\t\t\tconst mode = parameters?.mode;\n\t\t\t\tconst inputs = [{ displayName: \"Embedding\", type: \"${NodeConnectionTypes.AiEmbedding}\", required: true, maxConnections: 1}]\n\n\t\t\t\tif (mode === 'retrieve-as-tool') {\n\t\t\t\t\treturn inputs;\n\t\t\t\t}\n\n\t\t\t\tif (['insert', 'load', 'update'].includes(mode)) {\n\t\t\t\t\tinputs.push({ displayName: \"\", type: \"${NodeConnectionTypes.Main}\"})\n\t\t\t\t}\n\n\t\t\t\tif (['insert'].includes(mode)) {\n\t\t\t\t\tinputs.push({ displayName: \"Document\", type: \"${NodeConnectionTypes.AiDocument}\", required: true, maxConnections: 1})\n\t\t\t\t}\n\t\t\t\treturn inputs\n\t\t\t})($parameter)\n\t\t}}`,\n\t\t\toutputs: `={{\n\t\t\t((parameters) => {\n\t\t\t\tconst mode = parameters?.mode ?? 'retrieve';\n\n\t\t\t\tif (mode === 'retrieve-as-tool') {\n\t\t\t\t\treturn [{ displayName: \"Tool\", type: \"${NodeConnectionTypes.AiTool}\"}]\n\t\t\t\t}\n\n\t\t\t\tif (mode === 'retrieve') {\n\t\t\t\t\treturn [{ displayName: \"Vector Store\", type: \"${NodeConnectionTypes.AiVectorStore}\"}]\n\t\t\t\t}\n\t\t\t\treturn [{ displayName: \"\", type: \"${NodeConnectionTypes.Main}\"}]\n\t\t\t})($parameter)\n\t\t}}`,\n\t\t\tproperties: [\n\t\t\t\t{\n\t\t\t\t\tdisplayName: 'Operation Mode',\n\t\t\t\t\tname: 'mode',\n\t\t\t\t\ttype: 'options',\n\t\t\t\t\tnoDataExpression: true,\n\t\t\t\t\tdefault: 'retrieve',\n\t\t\t\t\toptions: getOperationModeOptions(args),\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\t...getConnectionHintNoticeField([NodeConnectionTypes.AiRetriever]),\n\t\t\t\t\tdisplayOptions: {\n\t\t\t\t\t\tshow: {\n\t\t\t\t\t\t\tmode: ['retrieve'],\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tdisplayName: 'Name',\n\t\t\t\t\tname: 'toolName',\n\t\t\t\t\ttype: 'string',\n\t\t\t\t\tdefault: '',\n\t\t\t\t\trequired: true,\n\t\t\t\t\tdescription: 'Name of the vector store',\n\t\t\t\t\tplaceholder: 'e.g. company_knowledge_base',\n\t\t\t\t\tvalidateType: 'string-alphanumeric',\n\t\t\t\t\tdisplayOptions: {\n\t\t\t\t\t\tshow: {\n\t\t\t\t\t\t\tmode: ['retrieve-as-tool'],\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tdisplayName: 'Description',\n\t\t\t\t\tname: 'toolDescription',\n\t\t\t\t\ttype: 'string',\n\t\t\t\t\tdefault: '',\n\t\t\t\t\trequired: true,\n\t\t\t\t\ttypeOptions: { rows: 2 },\n\t\t\t\t\tdescription:\n\t\t\t\t\t\t'Explain to the LLM what this tool does, a good, specific description would allow LLMs to produce expected results much more often',\n\t\t\t\t\tplaceholder: `e.g. ${args.meta.description}`,\n\t\t\t\t\tdisplayOptions: {\n\t\t\t\t\t\tshow: {\n\t\t\t\t\t\t\tmode: ['retrieve-as-tool'],\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t...args.sharedFields,\n\t\t\t\t{\n\t\t\t\t\tdisplayName: 'Embedding Batch Size',\n\t\t\t\t\tname: 'embeddingBatchSize',\n\t\t\t\t\ttype: 'number',\n\t\t\t\t\tdefault: 200,\n\t\t\t\t\tdescription: 'Number of documents to embed in a single batch',\n\t\t\t\t\tdisplayOptions: {\n\t\t\t\t\t\tshow: {\n\t\t\t\t\t\t\tmode: ['insert'],\n\t\t\t\t\t\t\t'@version': [{ _cnd: { gte: 1.1 } }],\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t...transformDescriptionForOperationMode(args.insertFields ?? [], 'insert'),\n\t\t\t\t// Prompt and topK are always used for the load operation\n\t\t\t\t{\n\t\t\t\t\tdisplayName: 'Prompt',\n\t\t\t\t\tname: 'prompt',\n\t\t\t\t\ttype: 'string',\n\t\t\t\t\tdefault: '',\n\t\t\t\t\trequired: true,\n\t\t\t\t\tdescription:\n\t\t\t\t\t\t'Search prompt to retrieve matching documents from the vector store using similarity-based ranking',\n\t\t\t\t\tdisplayOptions: {\n\t\t\t\t\t\tshow: {\n\t\t\t\t\t\t\tmode: ['load'],\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tdisplayName: 'Limit',\n\t\t\t\t\tname: 'topK',\n\t\t\t\t\ttype: 'number',\n\t\t\t\t\tdefault: 4,\n\t\t\t\t\tdescription: 'Number of top results to fetch from vector store',\n\t\t\t\t\tdisplayOptions: {\n\t\t\t\t\t\tshow: {\n\t\t\t\t\t\t\tmode: ['load', 'retrieve-as-tool'],\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tdisplayName: 'Include Metadata',\n\t\t\t\t\tname: 'includeDocumentMetadata',\n\t\t\t\t\ttype: 'boolean',\n\t\t\t\t\tdefault: true,\n\t\t\t\t\tdescription: 'Whether or not to include document metadata',\n\t\t\t\t\tdisplayOptions: {\n\t\t\t\t\t\tshow: {\n\t\t\t\t\t\t\tmode: ['load', 'retrieve-as-tool'],\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t// ID is always used for update operation\n\t\t\t\t{\n\t\t\t\t\tdisplayName: 'ID',\n\t\t\t\t\tname: 'id',\n\t\t\t\t\ttype: 'string',\n\t\t\t\t\tdefault: '',\n\t\t\t\t\trequired: true,\n\t\t\t\t\tdescription: 'ID of an embedding entry',\n\t\t\t\t\tdisplayOptions: {\n\t\t\t\t\t\tshow: {\n\t\t\t\t\t\t\tmode: ['update'],\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t...transformDescriptionForOperationMode(args.loadFields ?? [], [\n\t\t\t\t\t'load',\n\t\t\t\t\t'retrieve-as-tool',\n\t\t\t\t]),\n\t\t\t\t...transformDescriptionForOperationMode(args.retrieveFields ?? [], 'retrieve'),\n\t\t\t\t...transformDescriptionForOperationMode(args.updateFields ?? [], 'update'),\n\t\t\t],\n\t\t};\n\n\t\tmethods = args.methods;\n\n\t\t/**\n\t\t * Method to execute the node in regular workflow mode\n\t\t * Supports 'load', 'insert', and 'update' operation modes\n\t\t */\n\t\tasync execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {\n\t\t\tconst mode = this.getNodeParameter('mode', 0) as NodeOperationMode;\n\n\t\t\t// Get the embeddings model connected to this node\n\t\t\tconst embeddings = (await this.getInputConnectionData(\n\t\t\t\tNodeConnectionTypes.AiEmbedding,\n\t\t\t\t0,\n\t\t\t)) as Embeddings;\n\n\t\t\t// Handle each operation mode with dedicated modules\n\t\t\tif (mode === 'load') {\n\t\t\t\tconst items = this.getInputData(0);\n\t\t\t\tconst resultData = [];\n\n\t\t\t\tfor (let itemIndex = 0; itemIndex < items.length; itemIndex++) {\n\t\t\t\t\tconst docs = await handleLoadOperation(this, args, embeddings, itemIndex);\n\t\t\t\t\tresultData.push(...docs);\n\t\t\t\t}\n\n\t\t\t\treturn [resultData];\n\t\t\t}\n\n\t\t\tif (mode === 'insert') {\n\t\t\t\tconst resultData = await handleInsertOperation(this, args, embeddings);\n\t\t\t\treturn [resultData];\n\t\t\t}\n\n\t\t\tif (mode === 'update') {\n\t\t\t\tconst resultData = await handleUpdateOperation(this, args, embeddings);\n\t\t\t\treturn [resultData];\n\t\t\t}\n\n\t\t\tthrow new NodeOperationError(\n\t\t\t\tthis.getNode(),\n\t\t\t\t'Only the \"load\", \"update\" and \"insert\" operation modes are supported with execute',\n\t\t\t);\n\t\t}\n\n\t\t/**\n\t\t * Method to supply data to AI nodes\n\t\t * Supports 'retrieve' and 'retrieve-as-tool' operation modes\n\t\t */\n\t\tasync supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {\n\t\t\tconst mode = this.getNodeParameter('mode', 0) as NodeOperationMode;\n\n\t\t\t// Get the embeddings model connected to this node\n\t\t\tconst embeddings = (await this.getInputConnectionData(\n\t\t\t\tNodeConnectionTypes.AiEmbedding,\n\t\t\t\t0,\n\t\t\t)) as Embeddings;\n\n\t\t\t// Handle each supply data operation mode with dedicated modules\n\t\t\tif (mode === 'retrieve') {\n\t\t\t\treturn await handleRetrieveOperation(this, args, embeddings, itemIndex);\n\t\t\t}\n\n\t\t\tif (mode === 'retrieve-as-tool') {\n\t\t\t\treturn await handleRetrieveAsToolOperation(this, args, embeddings, itemIndex);\n\t\t\t}\n\n\t\t\tthrow new NodeOperationError(\n\t\t\t\tthis.getNode(),\n\t\t\t\t'Only the \"retrieve\" and \"retrieve-as-tool\" operation mode is supported to supply data',\n\t\t\t);\n\t\t}\n\t};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAIA,0BAAwD;AAUxD,0BAA6C;AAG7C,wBAMO;AAGP,mBAA8E;AAQvE,MAAM,wBAAwB,CACpC,SAEA,MAAM,oBAAyC;AAAA,EAA/C;AACC,uBAAoC;AAAA,MACnC,aAAa,KAAK,KAAK;AAAA,MACvB,MAAM,KAAK,KAAK;AAAA,MAChB,aAAa,KAAK,KAAK;AAAA,MACvB,MAAM,KAAK,KAAK;AAAA,MAChB,WAAW,KAAK,KAAK;AAAA,MACrB,OAAO,CAAC,WAAW;AAAA,MACnB,SAAS,CAAC,GAAG,GAAG;AAAA,MAChB,UAAU;AAAA,QACT,MAAM,KAAK,KAAK;AAAA,MACjB;AAAA,MACA,OAAO;AAAA,QACN,YAAY,CAAC,IAAI;AAAA,QACjB,eAAe;AAAA,UACd,IAAI,CAAC,iBAAiB,SAAS,YAAY;AAAA,UAC3C,OAAO,CAAC,aAAa;AAAA,QACtB;AAAA,QACA,WAAW;AAAA,UACV,sBAAsB;AAAA,YACrB;AAAA,cACC,KAAK,KAAK,KAAK;AAAA,YAChB;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,MACA,aAAa,KAAK,KAAK;AAAA;AAAA,MAEvB,QAAQ;AAAA;AAAA;AAAA,yDAG8C,wCAAoB,WAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,6CAO3C,wCAAoB,IAAI;AAAA;AAAA;AAAA;AAAA,qDAIhB,wCAAoB,UAAU;AAAA;AAAA;AAAA;AAAA;AAAA,MAKhF,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA,6CAKiC,wCAAoB,MAAM;AAAA;AAAA;AAAA;AAAA,qDAIlB,wCAAoB,aAAa;AAAA;AAAA,wCAE9C,wCAAoB,IAAI;AAAA;AAAA;AAAA,MAG7D,YAAY;AAAA,QACX;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,kBAAkB;AAAA,UAClB,SAAS;AAAA,UACT,aAAS,sCAAwB,IAAI;AAAA,QACtC;AAAA,QACA;AAAA,UACC,OAAG,kDAA6B,CAAC,wCAAoB,WAAW,CAAC;AAAA,UACjE,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,MAAM,CAAC,UAAU;AAAA,YAClB;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,UACT,UAAU;AAAA,UACV,aAAa;AAAA,UACb,aAAa;AAAA,UACb,cAAc;AAAA,UACd,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,MAAM,CAAC,kBAAkB;AAAA,YAC1B;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,UACT,UAAU;AAAA,UACV,aAAa,EAAE,MAAM,EAAE;AAAA,UACvB,aACC;AAAA,UACD,aAAa,QAAQ,KAAK,KAAK,WAAW;AAAA,UAC1C,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,MAAM,CAAC,kBAAkB;AAAA,YAC1B;AAAA,UACD;AAAA,QACD;AAAA,QACA,GAAG,KAAK;AAAA,QACR;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,UACT,aAAa;AAAA,UACb,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,MAAM,CAAC,QAAQ;AAAA,cACf,YAAY,CAAC,EAAE,MAAM,EAAE,KAAK,IAAI,EAAE,CAAC;AAAA,YACpC;AAAA,UACD;AAAA,QACD;AAAA,QACA,OAAG,mDAAqC,KAAK,gBAAgB,CAAC,GAAG,QAAQ;AAAA;AAAA,QAEzE;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,UACT,UAAU;AAAA,UACV,aACC;AAAA,UACD,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,MAAM,CAAC,MAAM;AAAA,YACd;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,UACT,aAAa;AAAA,UACb,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,MAAM,CAAC,QAAQ,kBAAkB;AAAA,YAClC;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,UACT,aAAa;AAAA,UACb,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,MAAM,CAAC,QAAQ,kBAAkB;AAAA,YAClC;AAAA,UACD;AAAA,QACD;AAAA;AAAA,QAEA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,UACT,UAAU;AAAA,UACV,aAAa;AAAA,UACb,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,MAAM,CAAC,QAAQ;AAAA,YAChB;AAAA,UACD;AAAA,QACD;AAAA,QACA,OAAG,mDAAqC,KAAK,cAAc,CAAC,GAAG;AAAA,UAC9D;AAAA,UACA;AAAA,QACD,CAAC;AAAA,QACD,OAAG,mDAAqC,KAAK,kBAAkB,CAAC,GAAG,UAAU;AAAA,QAC7E,OAAG,mDAAqC,KAAK,gBAAgB,CAAC,GAAG,QAAQ;AAAA,MAC1E;AAAA,IACD;AAEA,mBAAU,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMf,MAAM,UAAkE;AACvE,UAAM,OAAO,KAAK,iBAAiB,QAAQ,CAAC;AAG5C,UAAM,aAAc,MAAM,KAAK;AAAA,MAC9B,wCAAoB;AAAA,MACpB;AAAA,IACD;AAGA,QAAI,SAAS,QAAQ;AACpB,YAAM,QAAQ,KAAK,aAAa,CAAC;AACjC,YAAM,aAAa,CAAC;AAEpB,eAAS,YAAY,GAAG,YAAY,MAAM,QAAQ,aAAa;AAC9D,cAAM,OAAO,UAAM,uCAAoB,MAAM,MAAM,YAAY,SAAS;AACxE,mBAAW,KAAK,GAAG,IAAI;AAAA,MACxB;AAEA,aAAO,CAAC,UAAU;AAAA,IACnB;AAEA,QAAI,SAAS,UAAU;AACtB,YAAM,aAAa,UAAM,yCAAsB,MAAM,MAAM,UAAU;AACrE,aAAO,CAAC,UAAU;AAAA,IACnB;AAEA,QAAI,SAAS,UAAU;AACtB,YAAM,aAAa,UAAM,yCAAsB,MAAM,MAAM,UAAU;AACrE,aAAO,CAAC,UAAU;AAAA,IACnB;AAEA,UAAM,IAAI;AAAA,MACT,KAAK,QAAQ;AAAA,MACb;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,WAAuC,WAAwC;AACpF,UAAM,OAAO,KAAK,iBAAiB,QAAQ,CAAC;AAG5C,UAAM,aAAc,MAAM,KAAK;AAAA,MAC9B,wCAAoB;AAAA,MACpB;AAAA,IACD;AAGA,QAAI,SAAS,YAAY;AACxB,aAAO,UAAM,2CAAwB,MAAM,MAAM,YAAY,SAAS;AAAA,IACvE;AAEA,QAAI,SAAS,oBAAoB;AAChC,aAAO,UAAM,iDAA8B,MAAM,MAAM,YAAY,SAAS;AAAA,IAC7E;AAEA,UAAM,IAAI;AAAA,MACT,KAAK,QAAQ;AAAA,MACb;AAAA,IACD;AAAA,EACD;AACD;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../../../nodes/vector_store/shared/createVectorStoreNode/methods/listSearch.ts"],"sourcesContent":["import { Pinecone } from '@pinecone-database/pinecone';\nimport { QdrantClient } from '@qdrant/js-client-rest';\nimport { ApplicationError, type IDataObject, type ILoadOptionsFunctions } from 'n8n-workflow';\n\nexport async function pineconeIndexSearch(this: ILoadOptionsFunctions) {\n\tconst credentials = await this.getCredentials('pineconeApi');\n\n\tconst client = new Pinecone({\n\t\tapiKey: credentials.apiKey as string,\n\t});\n\n\tconst indexes = await client.listIndexes();\n\n\tconst results = (indexes.indexes ?? []).map((index) => ({\n\t\tname: index.name,\n\t\tvalue: index.name,\n\t}));\n\n\treturn { results };\n}\n\nexport async function supabaseTableNameSearch(this: ILoadOptionsFunctions) {\n\tconst credentials = await this.getCredentials('supabaseApi');\n\n\tconst results = [];\n\n\tif (typeof credentials.host !== 'string') {\n\t\tthrow new ApplicationError('Expected Supabase credentials host to be a string');\n\t}\n\n\tconst { paths } = (await this.helpers.requestWithAuthentication.call(this, 'supabaseApi', {\n\t\theaders: {\n\t\t\tPrefer: 'return=representation',\n\t\t},\n\t\tmethod: 'GET',\n\t\turi: `${credentials.host}/rest/v1/`,\n\t\tjson: true,\n\t})) as { paths: IDataObject };\n\n\tfor (const path of Object.keys(paths)) {\n\t\t//omit introspection path\n\t\tif (path === '/') continue;\n\n\t\tresults.push({\n\t\t\tname: path.replace('/', ''),\n\t\t\tvalue: path.replace('/', ''),\n\t\t});\n\t}\n\n\treturn { results };\n}\n\nexport async function qdrantCollectionsSearch(this: ILoadOptionsFunctions) {\n\tconst credentials = await this.getCredentials('qdrantApi');\n\n\tconst client = new QdrantClient({\n\t\turl: credentials.qdrantUrl as string,\n\t\tapiKey: credentials.apiKey as string,\n\t});\n\n\tconst response = await client.getCollections();\n\n\tconst results = response.collections.map((collection) => ({\n\t\tname: collection.name,\n\t\tvalue: collection.name,\n\t}));\n\n\treturn { results };\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAyB;AACzB,4BAA6B;AAC7B,0BAA+E;AAE/E,eAAsB,sBAAiD;AACtE,QAAM,cAAc,MAAM,KAAK,eAAe,aAAa;AAE3D,QAAM,SAAS,IAAI,yBAAS;AAAA,IAC3B,QAAQ,YAAY;AAAA,EACrB,CAAC;AAED,QAAM,UAAU,MAAM,OAAO,YAAY;AAEzC,QAAM,WAAW,QAAQ,WAAW,CAAC,GAAG,IAAI,CAAC,WAAW;AAAA,IACvD,MAAM,MAAM;AAAA,IACZ,OAAO,MAAM;AAAA,EACd,EAAE;AAEF,SAAO,EAAE,QAAQ;AAClB;AAEA,eAAsB,0BAAqD;AAC1E,QAAM,cAAc,MAAM,KAAK,eAAe,aAAa;AAE3D,QAAM,UAAU,CAAC;AAEjB,MAAI,OAAO,YAAY,SAAS,UAAU;AACzC,UAAM,IAAI,qCAAiB,mDAAmD;AAAA,EAC/E;AAEA,QAAM,EAAE,MAAM,IAAK,MAAM,KAAK,QAAQ,0BAA0B,KAAK,MAAM,eAAe;AAAA,IACzF,SAAS;AAAA,MACR,QAAQ;AAAA,IACT;AAAA,IACA,QAAQ;AAAA,IACR,KAAK,GAAG,YAAY,IAAI;AAAA,IACxB,MAAM;AAAA,EACP,CAAC;AAED,aAAW,QAAQ,OAAO,KAAK,KAAK,GAAG;AAEtC,QAAI,SAAS,IAAK;AAElB,YAAQ,KAAK;AAAA,MACZ,MAAM,KAAK,QAAQ,KAAK,EAAE;AAAA,MAC1B,OAAO,KAAK,QAAQ,KAAK,EAAE;AAAA,IAC5B,CAAC;AAAA,EACF;AAEA,SAAO,EAAE,QAAQ;AAClB;AAEA,eAAsB,0BAAqD;AAC1E,QAAM,cAAc,MAAM,KAAK,eAAe,WAAW;AAEzD,QAAM,SAAS,IAAI,mCAAa;AAAA,IAC/B,KAAK,YAAY;AAAA,IACjB,QAAQ,YAAY;AAAA,EACrB,CAAC;AAED,QAAM,WAAW,MAAM,OAAO,eAAe;AAE7C,QAAM,UAAU,SAAS,YAAY,IAAI,CAAC,gBAAgB;AAAA,IACzD,MAAM,WAAW;AAAA,IACjB,OAAO,WAAW;AAAA,EACnB,EAAE;AAEF,SAAO,EAAE,QAAQ;AAClB;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../../../nodes/vector_store/shared/createVectorStoreNode/operations/index.ts"],"sourcesContent":["export * from './loadOperation';\nexport * from './insertOperation';\nexport * from './updateOperation';\nexport * from './retrieveOperation';\nexport * from './retrieveAsToolOperation';\n"],"mappings":";;;;;;;;;;;;;;;AAAA;AAAA;AAAA,+BAAc,4BAAd;AACA,+BAAc,8BADd;AAEA,+BAAc,8BAFd;AAGA,+BAAc,gCAHd;AAIA,+BAAc,sCAJd;","names":[]}
|
package/dist/nodes/vector_store/shared/createVectorStoreNode/operations/insertOperation.js.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../../../nodes/vector_store/shared/createVectorStoreNode/operations/insertOperation.ts"],"sourcesContent":["import type { Document } from '@langchain/core/documents';\nimport type { Embeddings } from '@langchain/core/embeddings';\nimport type { VectorStore } from '@langchain/core/vectorstores';\nimport type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';\nimport { NodeConnectionTypes } from 'n8n-workflow';\n\nimport { logAiEvent } from '@utils/helpers';\nimport type { N8nBinaryLoader } from '@utils/N8nBinaryLoader';\nimport type { N8nJsonLoader } from '@utils/N8nJsonLoader';\n\nimport { processDocument } from '../../processDocuments';\nimport type { VectorStoreNodeConstructorArgs } from '../types';\n\n/**\n * Handles the 'insert' operation mode\n * Inserts documents from the input into the vector store\n */\nexport async function handleInsertOperation<T extends VectorStore = VectorStore>(\n\tcontext: IExecuteFunctions,\n\targs: VectorStoreNodeConstructorArgs<T>,\n\tembeddings: Embeddings,\n): Promise<INodeExecutionData[]> {\n\tconst nodeVersion = context.getNode().typeVersion;\n\t// Get the input items and document data\n\tconst items = context.getInputData();\n\tconst documentInput = (await context.getInputConnectionData(NodeConnectionTypes.AiDocument, 0)) as\n\t\t| N8nJsonLoader\n\t\t| N8nBinaryLoader\n\t\t| Array<Document<Record<string, unknown>>>;\n\n\tconst resultData: INodeExecutionData[] = [];\n\tconst documentsForEmbedding: Array<Document<Record<string, unknown>>> = [];\n\n\t// Process each input item\n\tfor (let itemIndex = 0; itemIndex < items.length; itemIndex++) {\n\t\t// Check if execution is being cancelled\n\t\tif (context.getExecutionCancelSignal()?.aborted) {\n\t\t\tbreak;\n\t\t}\n\n\t\tconst itemData = items[itemIndex];\n\n\t\t// Process the document from the input\n\t\tconst processedDocuments = await processDocument(documentInput, itemData, itemIndex);\n\n\t\t// Add the serialized documents to the result\n\t\tresultData.push(...processedDocuments.serializedDocuments);\n\n\t\t// Add the processed documents to the documents to embedd\n\t\tdocumentsForEmbedding.push(...processedDocuments.processedDocuments);\n\n\t\t// For the version 1, we run the populateVectorStore(embedding and insert) function for each item\n\t\tif (nodeVersion === 1) {\n\t\t\tawait args.populateVectorStore(\n\t\t\t\tcontext,\n\t\t\t\tembeddings,\n\t\t\t\tprocessedDocuments.processedDocuments,\n\t\t\t\titemIndex,\n\t\t\t);\n\t\t}\n\t\t// Log the AI event for analytics\n\t\tlogAiEvent(context, 'ai-vector-store-populated');\n\t}\n\n\t// For the version 1.1, we run the populateVectorStore in batches\n\tif (nodeVersion >= 1.1) {\n\t\tconst embeddingBatchSize =\n\t\t\t(context.getNodeParameter('embeddingBatchSize', 0, 200) as number) ?? 200;\n\n\t\t// Populate the vector store with the processed documents in batches\n\t\tfor (let i = 0; i < documentsForEmbedding.length; i += embeddingBatchSize) {\n\t\t\tconst nextBatch = documentsForEmbedding.slice(i, i + embeddingBatchSize);\n\t\t\tawait args.populateVectorStore(context, embeddings, nextBatch, 0);\n\t\t}\n\t}\n\n\treturn resultData;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAIA,0BAAoC;AAEpC,qBAA2B;AAI3B,8BAAgC;AAOhC,eAAsB,sBACrB,SACA,MACA,YACgC;AAChC,QAAM,cAAc,QAAQ,QAAQ,EAAE;AAEtC,QAAM,QAAQ,QAAQ,aAAa;AACnC,QAAM,gBAAiB,MAAM,QAAQ,uBAAuB,wCAAoB,YAAY,CAAC;AAK7F,QAAM,aAAmC,CAAC;AAC1C,QAAM,wBAAkE,CAAC;AAGzE,WAAS,YAAY,GAAG,YAAY,MAAM,QAAQ,aAAa;AAE9D,QAAI,QAAQ,yBAAyB,GAAG,SAAS;AAChD;AAAA,IACD;AAEA,UAAM,WAAW,MAAM,SAAS;AAGhC,UAAM,qBAAqB,UAAM,yCAAgB,eAAe,UAAU,SAAS;AAGnF,eAAW,KAAK,GAAG,mBAAmB,mBAAmB;AAGzD,0BAAsB,KAAK,GAAG,mBAAmB,kBAAkB;AAGnE,QAAI,gBAAgB,GAAG;AACtB,YAAM,KAAK;AAAA,QACV;AAAA,QACA;AAAA,QACA,mBAAmB;AAAA,QACnB;AAAA,MACD;AAAA,IACD;AAEA,mCAAW,SAAS,2BAA2B;AAAA,EAChD;AAGA,MAAI,eAAe,KAAK;AACvB,UAAM,qBACJ,QAAQ,iBAAiB,sBAAsB,GAAG,GAAG,KAAgB;AAGvE,aAAS,IAAI,GAAG,IAAI,sBAAsB,QAAQ,KAAK,oBAAoB;AAC1E,YAAM,YAAY,sBAAsB,MAAM,GAAG,IAAI,kBAAkB;AACvE,YAAM,KAAK,oBAAoB,SAAS,YAAY,WAAW,CAAC;AAAA,IACjE;AAAA,EACD;AAEA,SAAO;AACR;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../../../nodes/vector_store/shared/createVectorStoreNode/operations/loadOperation.ts"],"sourcesContent":["import type { Embeddings } from '@langchain/core/embeddings';\nimport type { VectorStore } from '@langchain/core/vectorstores';\nimport type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';\n\nimport { getMetadataFiltersValues, logAiEvent } from '@utils/helpers';\n\nimport type { VectorStoreNodeConstructorArgs } from '../types';\n\n/**\n * Handles the 'load' operation mode\n * Searches the vector store for documents similar to a query\n */\nexport async function handleLoadOperation<T extends VectorStore = VectorStore>(\n\tcontext: IExecuteFunctions,\n\targs: VectorStoreNodeConstructorArgs<T>,\n\tembeddings: Embeddings,\n\titemIndex: number,\n): Promise<INodeExecutionData[]> {\n\tconst filter = getMetadataFiltersValues(context, itemIndex);\n\tconst vectorStore = await args.getVectorStoreClient(\n\t\tcontext,\n\t\t// We'll pass filter to similaritySearchVectorWithScore instead of getVectorStoreClient\n\t\tundefined,\n\t\tembeddings,\n\t\titemIndex,\n\t);\n\n\ttry {\n\t\t// Get the search parameters from the node\n\t\tconst prompt = context.getNodeParameter('prompt', itemIndex) as string;\n\t\tconst topK = context.getNodeParameter('topK', itemIndex, 4) as number;\n\t\tconst includeDocumentMetadata = context.getNodeParameter(\n\t\t\t'includeDocumentMetadata',\n\t\t\titemIndex,\n\t\t\ttrue,\n\t\t) as boolean;\n\n\t\t// Embed the prompt to prepare for vector similarity search\n\t\tconst embeddedPrompt = await embeddings.embedQuery(prompt);\n\n\t\t// Get the most similar documents to the embedded prompt\n\t\tconst docs = await vectorStore.similaritySearchVectorWithScore(embeddedPrompt, topK, filter);\n\n\t\t// Format the documents for the output\n\t\tconst serializedDocs = docs.map(([doc, score]) => {\n\t\t\tconst document = {\n\t\t\t\tpageContent: doc.pageContent,\n\t\t\t\t...(includeDocumentMetadata ? { metadata: doc.metadata } : {}),\n\t\t\t};\n\n\t\t\treturn {\n\t\t\t\tjson: { document, score },\n\t\t\t\tpairedItem: {\n\t\t\t\t\titem: itemIndex,\n\t\t\t\t},\n\t\t\t};\n\t\t});\n\n\t\t// Log the AI event for analytics\n\t\tlogAiEvent(context, 'ai-vector-store-searched', { query: prompt });\n\n\t\treturn serializedDocs;\n\t} finally {\n\t\t// Release the vector store client if a release method was provided\n\t\targs.releaseVectorStoreClient?.(vectorStore);\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAIA,qBAAqD;AAQrD,eAAsB,oBACrB,SACA,MACA,YACA,WACgC;AAChC,QAAM,aAAS,yCAAyB,SAAS,SAAS;AAC1D,QAAM,cAAc,MAAM,KAAK;AAAA,IAC9B;AAAA;AAAA,IAEA;AAAA,IACA;AAAA,IACA;AAAA,EACD;AAEA,MAAI;AAEH,UAAM,SAAS,QAAQ,iBAAiB,UAAU,SAAS;AAC3D,UAAM,OAAO,QAAQ,iBAAiB,QAAQ,WAAW,CAAC;AAC1D,UAAM,0BAA0B,QAAQ;AAAA,MACvC;AAAA,MACA;AAAA,MACA;AAAA,IACD;AAGA,UAAM,iBAAiB,MAAM,WAAW,WAAW,MAAM;AAGzD,UAAM,OAAO,MAAM,YAAY,gCAAgC,gBAAgB,MAAM,MAAM;AAG3F,UAAM,iBAAiB,KAAK,IAAI,CAAC,CAAC,KAAK,KAAK,MAAM;AACjD,YAAM,WAAW;AAAA,QAChB,aAAa,IAAI;AAAA,QACjB,GAAI,0BAA0B,EAAE,UAAU,IAAI,SAAS,IAAI,CAAC;AAAA,MAC7D;AAEA,aAAO;AAAA,QACN,MAAM,EAAE,UAAU,MAAM;AAAA,QACxB,YAAY;AAAA,UACX,MAAM;AAAA,QACP;AAAA,MACD;AAAA,IACD,CAAC;AAGD,mCAAW,SAAS,4BAA4B,EAAE,OAAO,OAAO,CAAC;AAEjE,WAAO;AAAA,EACR,UAAE;AAED,SAAK,2BAA2B,WAAW;AAAA,EAC5C;AACD;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../../../nodes/vector_store/shared/createVectorStoreNode/operations/retrieveAsToolOperation.ts"],"sourcesContent":["import type { Embeddings } from '@langchain/core/embeddings';\nimport type { VectorStore } from '@langchain/core/vectorstores';\nimport { DynamicTool } from 'langchain/tools';\nimport type { ISupplyDataFunctions, SupplyData } from 'n8n-workflow';\n\nimport { getMetadataFiltersValues } from '@utils/helpers';\nimport { logWrapper } from '@utils/logWrapper';\n\nimport type { VectorStoreNodeConstructorArgs } from '../types';\n\n/**\n * Handles the 'retrieve-as-tool' operation mode\n * Returns a tool that can be used with AI Agent nodes\n */\nexport async function handleRetrieveAsToolOperation<T extends VectorStore = VectorStore>(\n\tcontext: ISupplyDataFunctions,\n\targs: VectorStoreNodeConstructorArgs<T>,\n\tembeddings: Embeddings,\n\titemIndex: number,\n): Promise<SupplyData> {\n\t// Get the tool configuration parameters\n\tconst toolDescription = context.getNodeParameter('toolDescription', itemIndex) as string;\n\tconst toolName = context.getNodeParameter('toolName', itemIndex) as string;\n\tconst topK = context.getNodeParameter('topK', itemIndex, 4) as number;\n\tconst includeDocumentMetadata = context.getNodeParameter(\n\t\t'includeDocumentMetadata',\n\t\titemIndex,\n\t\ttrue,\n\t) as boolean;\n\n\t// Get metadata filters\n\tconst filter = getMetadataFiltersValues(context, itemIndex);\n\n\t// Create a Dynamic Tool that wraps vector store search functionality\n\tconst vectorStoreTool = new DynamicTool({\n\t\tname: toolName,\n\t\tdescription: toolDescription,\n\t\tfunc: async (input) => {\n\t\t\t// For each tool use, get a fresh vector store client.\n\t\t\t// We don't pass in a filter here only later in the similaritySearchVectorWithScore\n\t\t\t// method to avoid an exception with some vector stores like Supabase or Pinecone(#AI-740)\n\t\t\tconst vectorStore = await args.getVectorStoreClient(\n\t\t\t\tcontext,\n\t\t\t\tundefined,\n\t\t\t\tembeddings,\n\t\t\t\titemIndex,\n\t\t\t);\n\n\t\t\ttry {\n\t\t\t\t// Embed the input query\n\t\t\t\tconst embeddedPrompt = await embeddings.embedQuery(input);\n\n\t\t\t\t// Search for similar documents\n\t\t\t\tconst documents = await vectorStore.similaritySearchVectorWithScore(\n\t\t\t\t\tembeddedPrompt,\n\t\t\t\t\ttopK,\n\t\t\t\t\tfilter,\n\t\t\t\t);\n\n\t\t\t\t// Format the documents for the tool output\n\t\t\t\treturn documents\n\t\t\t\t\t.map((document) => {\n\t\t\t\t\t\tif (includeDocumentMetadata) {\n\t\t\t\t\t\t\treturn { type: 'text', text: JSON.stringify(document[0]) };\n\t\t\t\t\t\t}\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\ttype: 'text',\n\t\t\t\t\t\t\ttext: JSON.stringify({ pageContent: document[0].pageContent }),\n\t\t\t\t\t\t};\n\t\t\t\t\t})\n\t\t\t\t\t.filter((document) => !!document);\n\t\t\t} finally {\n\t\t\t\t// Release the vector store client if a release method was provided\n\t\t\t\targs.releaseVectorStoreClient?.(vectorStore);\n\t\t\t}\n\t\t},\n\t});\n\n\t// Return the vector store tool with logging wrapper\n\treturn {\n\t\tresponse: logWrapper(vectorStoreTool, context),\n\t};\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,mBAA4B;AAG5B,qBAAyC;AACzC,wBAA2B;AAQ3B,eAAsB,8BACrB,SACA,MACA,YACA,WACsB;AAEtB,QAAM,kBAAkB,QAAQ,iBAAiB,mBAAmB,SAAS;AAC7E,QAAM,WAAW,QAAQ,iBAAiB,YAAY,SAAS;AAC/D,QAAM,OAAO,QAAQ,iBAAiB,QAAQ,WAAW,CAAC;AAC1D,QAAM,0BAA0B,QAAQ;AAAA,IACvC;AAAA,IACA;AAAA,IACA;AAAA,EACD;AAGA,QAAM,aAAS,yCAAyB,SAAS,SAAS;AAG1D,QAAM,kBAAkB,IAAI,yBAAY;AAAA,IACvC,MAAM;AAAA,IACN,aAAa;AAAA,IACb,MAAM,OAAO,UAAU;AAItB,YAAM,cAAc,MAAM,KAAK;AAAA,QAC9B;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACD;AAEA,UAAI;AAEH,cAAM,iBAAiB,MAAM,WAAW,WAAW,KAAK;AAGxD,cAAM,YAAY,MAAM,YAAY;AAAA,UACnC;AAAA,UACA;AAAA,UACA;AAAA,QACD;AAGA,eAAO,UACL,IAAI,CAAC,aAAa;AAClB,cAAI,yBAAyB;AAC5B,mBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,UAAU,SAAS,CAAC,CAAC,EAAE;AAAA,UAC1D;AACA,iBAAO;AAAA,YACN,MAAM;AAAA,YACN,MAAM,KAAK,UAAU,EAAE,aAAa,SAAS,CAAC,EAAE,YAAY,CAAC;AAAA,UAC9D;AAAA,QACD,CAAC,EACA,OAAO,CAAC,aAAa,CAAC,CAAC,QAAQ;AAAA,MAClC,UAAE;AAED,aAAK,2BAA2B,WAAW;AAAA,MAC5C;AAAA,IACD;AAAA,EACD,CAAC;AAGD,SAAO;AAAA,IACN,cAAU,8BAAW,iBAAiB,OAAO;AAAA,EAC9C;AACD;","names":[]}
|
package/dist/nodes/vector_store/shared/createVectorStoreNode/operations/retrieveOperation.js.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../../../nodes/vector_store/shared/createVectorStoreNode/operations/retrieveOperation.ts"],"sourcesContent":["import type { Embeddings } from '@langchain/core/embeddings';\nimport type { VectorStore } from '@langchain/core/vectorstores';\nimport type { ISupplyDataFunctions, SupplyData } from 'n8n-workflow';\n\nimport { getMetadataFiltersValues } from '@utils/helpers';\nimport { logWrapper } from '@utils/logWrapper';\n\nimport type { VectorStoreNodeConstructorArgs } from '../types';\n\n/**\n * Handles the 'retrieve' operation mode\n * Returns the vector store to be used with AI nodes\n */\nexport async function handleRetrieveOperation<T extends VectorStore = VectorStore>(\n\tcontext: ISupplyDataFunctions,\n\targs: VectorStoreNodeConstructorArgs<T>,\n\tembeddings: Embeddings,\n\titemIndex: number,\n): Promise<SupplyData> {\n\t// Get metadata filters\n\tconst filter = getMetadataFiltersValues(context, itemIndex);\n\n\t// Get the vector store client\n\tconst vectorStore = await args.getVectorStoreClient(context, filter, embeddings, itemIndex);\n\n\t// Return the vector store with logging wrapper and cleanup function\n\treturn {\n\t\tresponse: logWrapper(vectorStore, context),\n\t\tcloseFunction: async () => {\n\t\t\t// Release the vector store client if a release method was provided\n\t\t\targs.releaseVectorStoreClient?.(vectorStore);\n\t\t},\n\t};\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAIA,qBAAyC;AACzC,wBAA2B;AAQ3B,eAAsB,wBACrB,SACA,MACA,YACA,WACsB;AAEtB,QAAM,aAAS,yCAAyB,SAAS,SAAS;AAG1D,QAAM,cAAc,MAAM,KAAK,qBAAqB,SAAS,QAAQ,YAAY,SAAS;AAG1F,SAAO;AAAA,IACN,cAAU,8BAAW,aAAa,OAAO;AAAA,IACzC,eAAe,YAAY;AAE1B,WAAK,2BAA2B,WAAW;AAAA,IAC5C;AAAA,EACD;AACD;","names":[]}
|
package/dist/nodes/vector_store/shared/createVectorStoreNode/operations/updateOperation.js.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../../../nodes/vector_store/shared/createVectorStoreNode/operations/updateOperation.ts"],"sourcesContent":["import type { Embeddings } from '@langchain/core/embeddings';\nimport type { VectorStore } from '@langchain/core/vectorstores';\nimport type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';\nimport { NodeOperationError } from 'n8n-workflow';\n\nimport { logAiEvent } from '@utils/helpers';\nimport { N8nJsonLoader } from '@utils/N8nJsonLoader';\n\nimport { processDocument } from '../../processDocuments';\nimport type { VectorStoreNodeConstructorArgs } from '../types';\nimport { isUpdateSupported } from '../utils';\n\n/**\n * Handles the 'update' operation mode\n * Updates existing documents in the vector store by ID\n */\nexport async function handleUpdateOperation<T extends VectorStore = VectorStore>(\n\tcontext: IExecuteFunctions,\n\targs: VectorStoreNodeConstructorArgs<T>,\n\tembeddings: Embeddings,\n): Promise<INodeExecutionData[]> {\n\t// First check if update operation is supported by this vector store\n\tif (!isUpdateSupported(args)) {\n\t\tthrow new NodeOperationError(\n\t\t\tcontext.getNode(),\n\t\t\t'Update operation is not implemented for this Vector Store',\n\t\t);\n\t}\n\n\t// Get input items\n\tconst items = context.getInputData();\n\t// Create a loader for processing document data\n\tconst loader = new N8nJsonLoader(context);\n\n\tconst resultData: INodeExecutionData[] = [];\n\n\t// Process each input item\n\tfor (let itemIndex = 0; itemIndex < items.length; itemIndex++) {\n\t\tconst itemData = items[itemIndex];\n\n\t\t// Get the document ID to update\n\t\tconst documentId = context.getNodeParameter('id', itemIndex, '', {\n\t\t\textractValue: true,\n\t\t}) as string;\n\n\t\t// Get the vector store client\n\t\tconst vectorStore = await args.getVectorStoreClient(context, undefined, embeddings, itemIndex);\n\n\t\ttry {\n\t\t\t// Process the document from the input\n\t\t\tconst { processedDocuments, serializedDocuments } = await processDocument(\n\t\t\t\tloader,\n\t\t\t\titemData,\n\t\t\t\titemIndex,\n\t\t\t);\n\n\t\t\t// Validate that we have exactly one document to update\n\t\t\tif (processedDocuments?.length !== 1) {\n\t\t\t\tthrow new NodeOperationError(context.getNode(), 'Single document per item expected');\n\t\t\t}\n\n\t\t\t// Add the serialized document to the result\n\t\t\tresultData.push(...serializedDocuments);\n\n\t\t\t// Use document ID to update the existing document\n\t\t\tawait vectorStore.addDocuments(processedDocuments, {\n\t\t\t\tids: [documentId],\n\t\t\t});\n\n\t\t\t// Log the AI event for analytics\n\t\t\tlogAiEvent(context, 'ai-vector-store-updated');\n\t\t} finally {\n\t\t\t// Release the vector store client if a release method was provided\n\t\t\targs.releaseVectorStoreClient?.(vectorStore);\n\t\t}\n\t}\n\n\treturn resultData;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAGA,0BAAmC;AAEnC,qBAA2B;AAC3B,2BAA8B;AAE9B,8BAAgC;AAEhC,mBAAkC;AAMlC,eAAsB,sBACrB,SACA,MACA,YACgC;AAEhC,MAAI,KAAC,gCAAkB,IAAI,GAAG;AAC7B,UAAM,IAAI;AAAA,MACT,QAAQ,QAAQ;AAAA,MAChB;AAAA,IACD;AAAA,EACD;AAGA,QAAM,QAAQ,QAAQ,aAAa;AAEnC,QAAM,SAAS,IAAI,mCAAc,OAAO;AAExC,QAAM,aAAmC,CAAC;AAG1C,WAAS,YAAY,GAAG,YAAY,MAAM,QAAQ,aAAa;AAC9D,UAAM,WAAW,MAAM,SAAS;AAGhC,UAAM,aAAa,QAAQ,iBAAiB,MAAM,WAAW,IAAI;AAAA,MAChE,cAAc;AAAA,IACf,CAAC;AAGD,UAAM,cAAc,MAAM,KAAK,qBAAqB,SAAS,QAAW,YAAY,SAAS;AAE7F,QAAI;AAEH,YAAM,EAAE,oBAAoB,oBAAoB,IAAI,UAAM;AAAA,QACzD;AAAA,QACA;AAAA,QACA;AAAA,MACD;AAGA,UAAI,oBAAoB,WAAW,GAAG;AACrC,cAAM,IAAI,uCAAmB,QAAQ,QAAQ,GAAG,mCAAmC;AAAA,MACpF;AAGA,iBAAW,KAAK,GAAG,mBAAmB;AAGtC,YAAM,YAAY,aAAa,oBAAoB;AAAA,QAClD,KAAK,CAAC,UAAU;AAAA,MACjB,CAAC;AAGD,qCAAW,SAAS,yBAAyB;AAAA,IAC9C,UAAE;AAED,WAAK,2BAA2B,WAAW;AAAA,IAC5C;AAAA,EACD;AAEA,SAAO;AACR;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../../nodes/vector_store/shared/createVectorStoreNode/types.ts"],"sourcesContent":["import type { Document } from '@langchain/core/documents';\nimport type { Embeddings } from '@langchain/core/embeddings';\nimport type { VectorStore } from '@langchain/core/vectorstores';\nimport type {\n\tIExecuteFunctions,\n\tINodeCredentialDescription,\n\tINodeProperties,\n\tILoadOptionsFunctions,\n\tINodeListSearchResult,\n\tIcon,\n\tISupplyDataFunctions,\n\tThemeIconColor,\n} from 'n8n-workflow';\n\nexport type NodeOperationMode = 'insert' | 'load' | 'retrieve' | 'update' | 'retrieve-as-tool';\n\nexport interface NodeMeta {\n\tdisplayName: string;\n\tname: string;\n\tdescription: string;\n\tdocsUrl: string;\n\ticon: Icon;\n\ticonColor?: ThemeIconColor;\n\tcredentials?: INodeCredentialDescription[];\n\toperationModes?: NodeOperationMode[];\n}\n\nexport interface VectorStoreNodeConstructorArgs<T extends VectorStore = VectorStore> {\n\tmeta: NodeMeta;\n\tmethods?: {\n\t\tlistSearch?: {\n\t\t\t[key: string]: (\n\t\t\t\tthis: ILoadOptionsFunctions,\n\t\t\t\tfilter?: string,\n\t\t\t\tpaginationToken?: string,\n\t\t\t) => Promise<INodeListSearchResult>;\n\t\t};\n\t};\n\n\tsharedFields: INodeProperties[];\n\tinsertFields?: INodeProperties[];\n\tloadFields?: INodeProperties[];\n\tretrieveFields?: INodeProperties[];\n\tupdateFields?: INodeProperties[];\n\n\t/**\n\t * Function to populate the vector store with documents\n\t * Used during the 'insert' operation mode\n\t */\n\tpopulateVectorStore: (\n\t\tcontext: IExecuteFunctions | ISupplyDataFunctions,\n\t\tembeddings: Embeddings,\n\t\tdocuments: Array<Document<Record<string, unknown>>>,\n\t\titemIndex: number,\n\t) => Promise<void>;\n\n\t/**\n\t * Function to get the vector store client\n\t * This function is called for all operation modes\n\t */\n\tgetVectorStoreClient: (\n\t\tcontext: IExecuteFunctions | ISupplyDataFunctions,\n\t\tfilter: Record<string, never> | undefined,\n\t\tembeddings: Embeddings,\n\t\titemIndex: number,\n\t) => Promise<T>;\n\n\t/**\n\t * Optional function to release resources associated with the vector store client\n\t * Called after the vector store operations are complete\n\t */\n\treleaseVectorStoreClient?: (vectorStore: T) => void;\n}\n"],"mappings":";;;;;;;;;;;;;;AAAA;AAAA;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../../nodes/vector_store/shared/createVectorStoreNode/utils.ts"],"sourcesContent":["import type { VectorStore } from '@langchain/core/vectorstores';\nimport type { INodeProperties, INodePropertyOptions } from 'n8n-workflow';\n\nimport { DEFAULT_OPERATION_MODES, OPERATION_MODE_DESCRIPTIONS } from './constants';\nimport type { NodeOperationMode, VectorStoreNodeConstructorArgs } from './types';\n\n/**\n * Transforms field descriptions to show only for specific operation modes\n * This function adds displayOptions to each field to make it appear only for specified modes\n */\nexport function transformDescriptionForOperationMode(\n\tfields: INodeProperties[],\n\tmode: NodeOperationMode | NodeOperationMode[],\n): INodeProperties[] {\n\treturn fields.map((field) => ({\n\t\t...field,\n\t\tdisplayOptions: { show: { mode: Array.isArray(mode) ? mode : [mode] } },\n\t}));\n}\n\n/**\n * Checks if the update operation is supported for a specific vector store\n * A vector store supports updates if it explicitly includes 'update' in its operationModes\n */\nexport function isUpdateSupported<T extends VectorStore>(\n\targs: VectorStoreNodeConstructorArgs<T>,\n): boolean {\n\treturn args.meta.operationModes?.includes('update') ?? false;\n}\n\n/**\n * Returns the operation mode options enabled for a specific vector store\n * Filters the full list of operation modes based on what's enabled for this vector store\n */\nexport function getOperationModeOptions<T extends VectorStore>(\n\targs: VectorStoreNodeConstructorArgs<T>,\n): INodePropertyOptions[] {\n\tconst enabledOperationModes = args.meta.operationModes ?? DEFAULT_OPERATION_MODES;\n\n\treturn OPERATION_MODE_DESCRIPTIONS.filter(({ value }) =>\n\t\tenabledOperationModes.includes(value as NodeOperationMode),\n\t);\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAGA,uBAAqE;AAO9D,SAAS,qCACf,QACA,MACoB;AACpB,SAAO,OAAO,IAAI,CAAC,WAAW;AAAA,IAC7B,GAAG;AAAA,IACH,gBAAgB,EAAE,MAAM,EAAE,MAAM,MAAM,QAAQ,IAAI,IAAI,OAAO,CAAC,IAAI,EAAE,EAAE;AAAA,EACvE,EAAE;AACH;AAMO,SAAS,kBACf,MACU;AACV,SAAO,KAAK,KAAK,gBAAgB,SAAS,QAAQ,KAAK;AACxD;AAMO,SAAS,wBACf,MACyB;AACzB,QAAM,wBAAwB,KAAK,KAAK,kBAAkB;AAE1D,SAAO,6CAA4B;AAAA,IAAO,CAAC,EAAE,MAAM,MAClD,sBAAsB,SAAS,KAA0B;AAAA,EAC1D;AACD;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../nodes/vector_store/shared/descriptions.ts"],"sourcesContent":["import type { INodeProperties } from 'n8n-workflow';\n\nexport const pineconeIndexRLC: INodeProperties = {\n\tdisplayName: 'Pinecone Index',\n\tname: 'pineconeIndex',\n\ttype: 'resourceLocator',\n\tdefault: { mode: 'list', value: '' },\n\trequired: true,\n\tmodes: [\n\t\t{\n\t\t\tdisplayName: 'From List',\n\t\t\tname: 'list',\n\t\t\ttype: 'list',\n\t\t\ttypeOptions: {\n\t\t\t\tsearchListMethod: 'pineconeIndexSearch',\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tdisplayName: 'ID',\n\t\t\tname: 'id',\n\t\t\ttype: 'string',\n\t\t},\n\t],\n};\n\nexport const supabaseTableNameRLC: INodeProperties = {\n\tdisplayName: 'Table Name',\n\tname: 'tableName',\n\ttype: 'resourceLocator',\n\tdefault: { mode: 'list', value: '' },\n\trequired: true,\n\tmodes: [\n\t\t{\n\t\t\tdisplayName: 'From List',\n\t\t\tname: 'list',\n\t\t\ttype: 'list',\n\t\t\ttypeOptions: {\n\t\t\t\tsearchListMethod: 'supabaseTableNameSearch',\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tdisplayName: 'ID',\n\t\t\tname: 'id',\n\t\t\ttype: 'string',\n\t\t},\n\t],\n};\n\nexport const qdrantCollectionRLC: INodeProperties = {\n\tdisplayName: 'Qdrant Collection',\n\tname: 'qdrantCollection',\n\ttype: 'resourceLocator',\n\tdefault: { mode: 'list', value: '' },\n\trequired: true,\n\tmodes: [\n\t\t{\n\t\t\tdisplayName: 'From List',\n\t\t\tname: 'list',\n\t\t\ttype: 'list',\n\t\t\ttypeOptions: {\n\t\t\t\tsearchListMethod: 'qdrantCollectionsSearch',\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tdisplayName: 'ID',\n\t\t\tname: 'id',\n\t\t\ttype: 'string',\n\t\t},\n\t],\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAEO,MAAM,mBAAoC;AAAA,EAChD,aAAa;AAAA,EACb,MAAM;AAAA,EACN,MAAM;AAAA,EACN,SAAS,EAAE,MAAM,QAAQ,OAAO,GAAG;AAAA,EACnC,UAAU;AAAA,EACV,OAAO;AAAA,IACN;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,aAAa;AAAA,QACZ,kBAAkB;AAAA,MACnB;AAAA,IACD;AAAA,IACA;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,IACP;AAAA,EACD;AACD;AAEO,MAAM,uBAAwC;AAAA,EACpD,aAAa;AAAA,EACb,MAAM;AAAA,EACN,MAAM;AAAA,EACN,SAAS,EAAE,MAAM,QAAQ,OAAO,GAAG;AAAA,EACnC,UAAU;AAAA,EACV,OAAO;AAAA,IACN;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,aAAa;AAAA,QACZ,kBAAkB;AAAA,MACnB;AAAA,IACD;AAAA,IACA;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,IACP;AAAA,EACD;AACD;AAEO,MAAM,sBAAuC;AAAA,EACnD,aAAa;AAAA,EACb,MAAM;AAAA,EACN,MAAM;AAAA,EACN,SAAS,EAAE,MAAM,QAAQ,OAAO,GAAG;AAAA,EACnC,UAAU;AAAA,EACV,OAAO;AAAA,IACN;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,aAAa;AAAA,QACZ,kBAAkB;AAAA,MACnB;AAAA,IACD;AAAA,IACA;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,IACP;AAAA,EACD;AACD;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../nodes/vector_store/shared/processDocuments.ts"],"sourcesContent":["import type { Document } from '@langchain/core/documents';\nimport type { INodeExecutionData } from 'n8n-workflow';\n\nimport { N8nBinaryLoader } from '@utils/N8nBinaryLoader';\nimport { N8nJsonLoader } from '@utils/N8nJsonLoader';\n\nexport async function processDocuments(\n\tdocumentInput: N8nJsonLoader | N8nBinaryLoader | Array<Document<Record<string, unknown>>>,\n\tinputItems: INodeExecutionData[],\n) {\n\tlet processedDocuments: Document[];\n\n\tif (documentInput instanceof N8nJsonLoader || documentInput instanceof N8nBinaryLoader) {\n\t\tprocessedDocuments = await documentInput.processAll(inputItems);\n\t} else {\n\t\tprocessedDocuments = documentInput;\n\t}\n\n\tconst serializedDocuments = processedDocuments.map(({ metadata, pageContent }) => ({\n\t\tjson: { metadata, pageContent },\n\t}));\n\n\treturn {\n\t\tprocessedDocuments,\n\t\tserializedDocuments,\n\t};\n}\nexport async function processDocument(\n\tdocumentInput: N8nJsonLoader | N8nBinaryLoader | Array<Document<Record<string, unknown>>>,\n\tinputItem: INodeExecutionData,\n\titemIndex: number,\n) {\n\tlet processedDocuments: Document[];\n\n\tif (documentInput instanceof N8nJsonLoader || documentInput instanceof N8nBinaryLoader) {\n\t\tprocessedDocuments = await documentInput.processItem(inputItem, itemIndex);\n\t} else {\n\t\tprocessedDocuments = documentInput;\n\t}\n\n\tconst serializedDocuments = processedDocuments.map(({ metadata, pageContent }) => ({\n\t\tjson: { metadata, pageContent },\n\t\tpairedItem: {\n\t\t\titem: itemIndex,\n\t\t},\n\t}));\n\n\treturn {\n\t\tprocessedDocuments,\n\t\tserializedDocuments,\n\t};\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAGA,6BAAgC;AAChC,2BAA8B;AAE9B,eAAsB,iBACrB,eACA,YACC;AACD,MAAI;AAEJ,MAAI,yBAAyB,sCAAiB,yBAAyB,wCAAiB;AACvF,yBAAqB,MAAM,cAAc,WAAW,UAAU;AAAA,EAC/D,OAAO;AACN,yBAAqB;AAAA,EACtB;AAEA,QAAM,sBAAsB,mBAAmB,IAAI,CAAC,EAAE,UAAU,YAAY,OAAO;AAAA,IAClF,MAAM,EAAE,UAAU,YAAY;AAAA,EAC/B,EAAE;AAEF,SAAO;AAAA,IACN;AAAA,IACA;AAAA,EACD;AACD;AACA,eAAsB,gBACrB,eACA,WACA,WACC;AACD,MAAI;AAEJ,MAAI,yBAAyB,sCAAiB,yBAAyB,wCAAiB;AACvF,yBAAqB,MAAM,cAAc,YAAY,WAAW,SAAS;AAAA,EAC1E,OAAO;AACN,yBAAqB;AAAA,EACtB;AAEA,QAAM,sBAAsB,mBAAmB,IAAI,CAAC,EAAE,UAAU,YAAY,OAAO;AAAA,IAClF,MAAM,EAAE,UAAU,YAAY;AAAA,IAC9B,YAAY;AAAA,MACX,MAAM;AAAA,IACP;AAAA,EACD,EAAE;AAEF,SAAO;AAAA,IACN;AAAA,IACA;AAAA,EACD;AACD;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../nodes/vendors/OpenAi/OpenAi.node.ts"],"sourcesContent":["import type { IExecuteFunctions, INodeType } from 'n8n-workflow';\n\nimport { router } from './actions/router';\nimport { versionDescription } from './actions/versionDescription';\nimport { listSearch, loadOptions } from './methods';\n\nexport class OpenAi implements INodeType {\n\tdescription = versionDescription;\n\n\tmethods = {\n\t\tlistSearch,\n\t\tloadOptions,\n\t};\n\n\tasync execute(this: IExecuteFunctions) {\n\t\treturn await router.call(this);\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,oBAAuB;AACvB,gCAAmC;AACnC,qBAAwC;AAEjC,MAAM,OAA4B;AAAA,EAAlC;AACN,uBAAc;AAEd,mBAAU;AAAA,MACT;AAAA,MACA;AAAA,IACD;AAAA;AAAA,EAEA,MAAM,UAAiC;AACtC,WAAO,MAAM,qBAAO,KAAK,IAAI;AAAA,EAC9B;AACD;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../../../nodes/vendors/OpenAi/actions/assistant/create.operation.ts"],"sourcesContent":["import type {\n\tINodeProperties,\n\tIExecuteFunctions,\n\tINodeExecutionData,\n\tIDataObject,\n} from 'n8n-workflow';\nimport { NodeOperationError, updateDisplayOptions } from 'n8n-workflow';\n\nimport { apiRequest } from '../../transport';\nimport { modelRLC } from '../descriptions';\n\nconst properties: INodeProperties[] = [\n\tmodelRLC('modelSearch'),\n\t{\n\t\tdisplayName: 'Name',\n\t\tname: 'name',\n\t\ttype: 'string',\n\t\tdefault: '',\n\t\tdescription: 'The name of the assistant. The maximum length is 256 characters.',\n\t\tplaceholder: 'e.g. My Assistant',\n\t\trequired: true,\n\t},\n\t{\n\t\tdisplayName: 'Description',\n\t\tname: 'description',\n\t\ttype: 'string',\n\t\tdefault: '',\n\t\tdescription: 'The description of the assistant. The maximum length is 512 characters.',\n\t\tplaceholder: 'e.g. My personal assistant',\n\t},\n\t{\n\t\tdisplayName: 'Instructions',\n\t\tname: 'instructions',\n\t\ttype: 'string',\n\t\tdescription:\n\t\t\t'The system instructions that the assistant uses. The maximum length is 32768 characters.',\n\t\tdefault: '',\n\t\ttypeOptions: {\n\t\t\trows: 2,\n\t\t},\n\t},\n\t{\n\t\tdisplayName: 'Code Interpreter',\n\t\tname: 'codeInterpreter',\n\t\ttype: 'boolean',\n\t\tdefault: false,\n\t\tdescription:\n\t\t\t'Whether to enable the code interpreter that allows the assistants to write and run Python code in a sandboxed execution environment, find more <a href=\"https://platform.openai.com/docs/assistants/tools/code-interpreter\" target=\"_blank\">here</a>',\n\t},\n\t{\n\t\tdisplayName: 'Knowledge Retrieval',\n\t\tname: 'knowledgeRetrieval',\n\t\ttype: 'boolean',\n\t\tdefault: false,\n\t\tdescription:\n\t\t\t'Whether to augments the assistant with knowledge from outside its model, such as proprietary product information or documents, find more <a href=\"https://platform.openai.com/docs/assistants/tools/knowledge-retrieval\" target=\"_blank\">here</a>',\n\t},\n\t//we want to display Files selector only when codeInterpreter true or knowledgeRetrieval true or both\n\t{\n\t\t// eslint-disable-next-line n8n-nodes-base/node-param-display-name-wrong-for-dynamic-multi-options\n\t\tdisplayName: 'Files',\n\t\tname: 'file_ids',\n\t\ttype: 'multiOptions',\n\t\t// eslint-disable-next-line n8n-nodes-base/node-param-description-wrong-for-dynamic-multi-options\n\t\tdescription:\n\t\t\t'The files to be used by the assistant, there can be a maximum of 20 files attached to the assistant. You can use expression to pass file IDs as an array or comma-separated string.',\n\t\ttypeOptions: {\n\t\t\tloadOptionsMethod: 'getFiles',\n\t\t},\n\t\tdefault: [],\n\t\thint: \"Add more files by using the 'Upload a File' operation\",\n\t\tdisplayOptions: {\n\t\t\tshow: {\n\t\t\t\tcodeInterpreter: [true],\n\t\t\t},\n\t\t\thide: {\n\t\t\t\tknowledgeRetrieval: [true],\n\t\t\t},\n\t\t},\n\t},\n\t{\n\t\t// eslint-disable-next-line n8n-nodes-base/node-param-display-name-wrong-for-dynamic-multi-options\n\t\tdisplayName: 'Files',\n\t\tname: 'file_ids',\n\t\ttype: 'multiOptions',\n\t\t// eslint-disable-next-line n8n-nodes-base/node-param-description-wrong-for-dynamic-multi-options\n\t\tdescription:\n\t\t\t'The files to be used by the assistant, there can be a maximum of 20 files attached to the assistant',\n\t\ttypeOptions: {\n\t\t\tloadOptionsMethod: 'getFiles',\n\t\t},\n\t\tdefault: [],\n\t\thint: \"Add more files by using the 'Upload a File' operation\",\n\t\tdisplayOptions: {\n\t\t\tshow: {\n\t\t\t\tknowledgeRetrieval: [true],\n\t\t\t},\n\t\t\thide: {\n\t\t\t\tcodeInterpreter: [true],\n\t\t\t},\n\t\t},\n\t},\n\t{\n\t\t// eslint-disable-next-line n8n-nodes-base/node-param-display-name-wrong-for-dynamic-multi-options\n\t\tdisplayName: 'Files',\n\t\tname: 'file_ids',\n\t\ttype: 'multiOptions',\n\t\t// eslint-disable-next-line n8n-nodes-base/node-param-description-wrong-for-dynamic-multi-options\n\t\tdescription:\n\t\t\t'The files to be used by the assistant, there can be a maximum of 20 files attached to the assistant',\n\t\ttypeOptions: {\n\t\t\tloadOptionsMethod: 'getFiles',\n\t\t},\n\t\tdefault: [],\n\t\thint: \"Add more files by using the 'Upload a File' operation\",\n\t\tdisplayOptions: {\n\t\t\tshow: {\n\t\t\t\tknowledgeRetrieval: [true],\n\t\t\t\tcodeInterpreter: [true],\n\t\t\t},\n\t\t},\n\t},\n\t{\n\t\tdisplayName:\n\t\t\t'Add custom n8n tools when you <i>message</i> your assistant (rather than when creating it)',\n\t\tname: 'noticeTools',\n\t\ttype: 'notice',\n\t\tdefault: '',\n\t},\n\t{\n\t\tdisplayName: 'Options',\n\t\tname: 'options',\n\t\tplaceholder: 'Add Option',\n\t\ttype: 'collection',\n\t\tdefault: {},\n\t\toptions: [\n\t\t\t{\n\t\t\t\tdisplayName: 'Output Randomness (Temperature)',\n\t\t\t\tname: 'temperature',\n\t\t\t\tdefault: 1,\n\t\t\t\ttypeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },\n\t\t\t\tdescription:\n\t\t\t\t\t'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive. We generally recommend altering this or temperature but not both.',\n\t\t\t\ttype: 'number',\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Output Randomness (Top P)',\n\t\t\t\tname: 'topP',\n\t\t\t\tdefault: 1,\n\t\t\t\ttypeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },\n\t\t\t\tdescription:\n\t\t\t\t\t'An alternative to sampling with temperature, controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.',\n\t\t\t\ttype: 'number',\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Fail if Assistant Already Exists',\n\t\t\t\tname: 'failIfExists',\n\t\t\t\ttype: 'boolean',\n\t\t\t\tdefault: false,\n\t\t\t\tdescription:\n\t\t\t\t\t'Whether to fail an operation if the assistant with the same name already exists',\n\t\t\t},\n\t\t],\n\t},\n];\n\nconst displayOptions = {\n\tshow: {\n\t\toperation: ['create'],\n\t\tresource: ['assistant'],\n\t},\n};\n\nexport const description = updateDisplayOptions(displayOptions, properties);\n\nexport async function execute(this: IExecuteFunctions, i: number): Promise<INodeExecutionData[]> {\n\tconst model = this.getNodeParameter('modelId', i, '', { extractValue: true }) as string;\n\tconst name = this.getNodeParameter('name', i) as string;\n\tconst assistantDescription = this.getNodeParameter('description', i) as string;\n\tconst instructions = this.getNodeParameter('instructions', i) as string;\n\tconst codeInterpreter = this.getNodeParameter('codeInterpreter', i) as boolean;\n\tconst knowledgeRetrieval = this.getNodeParameter('knowledgeRetrieval', i) as boolean;\n\tlet file_ids = this.getNodeParameter('file_ids', i, []) as string[] | string;\n\tif (typeof file_ids === 'string') {\n\t\tfile_ids = file_ids.split(',').map((file_id) => file_id.trim());\n\t}\n\tconst options = this.getNodeParameter('options', i, {});\n\n\tif (options.failIfExists) {\n\t\tconst assistants: string[] = [];\n\n\t\tlet has_more = true;\n\t\tlet after: string | undefined;\n\n\t\tdo {\n\t\t\tconst response = (await apiRequest.call(this, 'GET', '/assistants', {\n\t\t\t\theaders: {\n\t\t\t\t\t'OpenAI-Beta': 'assistants=v2',\n\t\t\t\t},\n\t\t\t\tqs: {\n\t\t\t\t\tlimit: 100,\n\t\t\t\t\tafter,\n\t\t\t\t},\n\t\t\t})) as { data: IDataObject[]; has_more: boolean; last_id: string };\n\n\t\t\tfor (const assistant of response.data || []) {\n\t\t\t\tassistants.push(assistant.name as string);\n\t\t\t}\n\n\t\t\thas_more = response.has_more;\n\n\t\t\tif (has_more) {\n\t\t\t\tafter = response.last_id;\n\t\t\t} else {\n\t\t\t\tbreak;\n\t\t\t}\n\t\t} while (has_more);\n\n\t\tif (assistants.includes(name)) {\n\t\t\tthrow new NodeOperationError(\n\t\t\t\tthis.getNode(),\n\t\t\t\t`An assistant with the same name '${name}' already exists`,\n\t\t\t\t{ itemIndex: i },\n\t\t\t);\n\t\t}\n\t}\n\n\tif (file_ids.length > 20) {\n\t\tthrow new NodeOperationError(\n\t\t\tthis.getNode(),\n\t\t\t'The maximum number of files that can be attached to the assistant is 20',\n\t\t\t{ itemIndex: i },\n\t\t);\n\t}\n\n\tconst body: IDataObject = {\n\t\tmodel,\n\t\tname,\n\t\tdescription: assistantDescription,\n\t\tinstructions,\n\t};\n\n\tconst tools = [];\n\n\tif (codeInterpreter) {\n\t\ttools.push({\n\t\t\ttype: 'code_interpreter',\n\t\t});\n\t\tbody.tool_resources = {\n\t\t\t...((body.tool_resources as object) ?? {}),\n\t\t\tcode_interpreter: {\n\t\t\t\tfile_ids,\n\t\t\t},\n\t\t};\n\t}\n\n\tif (knowledgeRetrieval) {\n\t\ttools.push({\n\t\t\ttype: 'file_search',\n\t\t});\n\t\tbody.tool_resources = {\n\t\t\t...((body.tool_resources as object) ?? {}),\n\t\t\tfile_search: {\n\t\t\t\tvector_stores: [\n\t\t\t\t\t{\n\t\t\t\t\t\tfile_ids,\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t};\n\t}\n\n\tif (tools.length) {\n\t\tbody.tools = tools;\n\t}\n\n\tconst response = await apiRequest.call(this, 'POST', '/assistants', {\n\t\tbody,\n\t\theaders: {\n\t\t\t'OpenAI-Beta': 'assistants=v2',\n\t\t},\n\t});\n\n\treturn [\n\t\t{\n\t\t\tjson: response,\n\t\t\tpairedItem: { item: i },\n\t\t},\n\t];\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAMA,0BAAyD;AAEzD,uBAA2B;AAC3B,0BAAyB;AAEzB,MAAM,aAAgC;AAAA,MACrC,8BAAS,aAAa;AAAA,EACtB;AAAA,IACC,aAAa;AAAA,IACb,MAAM;AAAA,IACN,MAAM;AAAA,IACN,SAAS;AAAA,IACT,aAAa;AAAA,IACb,aAAa;AAAA,IACb,UAAU;AAAA,EACX;AAAA,EACA;AAAA,IACC,aAAa;AAAA,IACb,MAAM;AAAA,IACN,MAAM;AAAA,IACN,SAAS;AAAA,IACT,aAAa;AAAA,IACb,aAAa;AAAA,EACd;AAAA,EACA;AAAA,IACC,aAAa;AAAA,IACb,MAAM;AAAA,IACN,MAAM;AAAA,IACN,aACC;AAAA,IACD,SAAS;AAAA,IACT,aAAa;AAAA,MACZ,MAAM;AAAA,IACP;AAAA,EACD;AAAA,EACA;AAAA,IACC,aAAa;AAAA,IACb,MAAM;AAAA,IACN,MAAM;AAAA,IACN,SAAS;AAAA,IACT,aACC;AAAA,EACF;AAAA,EACA;AAAA,IACC,aAAa;AAAA,IACb,MAAM;AAAA,IACN,MAAM;AAAA,IACN,SAAS;AAAA,IACT,aACC;AAAA,EACF;AAAA;AAAA,EAEA;AAAA;AAAA,IAEC,aAAa;AAAA,IACb,MAAM;AAAA,IACN,MAAM;AAAA;AAAA,IAEN,aACC;AAAA,IACD,aAAa;AAAA,MACZ,mBAAmB;AAAA,IACpB;AAAA,IACA,SAAS,CAAC;AAAA,IACV,MAAM;AAAA,IACN,gBAAgB;AAAA,MACf,MAAM;AAAA,QACL,iBAAiB,CAAC,IAAI;AAAA,MACvB;AAAA,MACA,MAAM;AAAA,QACL,oBAAoB,CAAC,IAAI;AAAA,MAC1B;AAAA,IACD;AAAA,EACD;AAAA,EACA;AAAA;AAAA,IAEC,aAAa;AAAA,IACb,MAAM;AAAA,IACN,MAAM;AAAA;AAAA,IAEN,aACC;AAAA,IACD,aAAa;AAAA,MACZ,mBAAmB;AAAA,IACpB;AAAA,IACA,SAAS,CAAC;AAAA,IACV,MAAM;AAAA,IACN,gBAAgB;AAAA,MACf,MAAM;AAAA,QACL,oBAAoB,CAAC,IAAI;AAAA,MAC1B;AAAA,MACA,MAAM;AAAA,QACL,iBAAiB,CAAC,IAAI;AAAA,MACvB;AAAA,IACD;AAAA,EACD;AAAA,EACA;AAAA;AAAA,IAEC,aAAa;AAAA,IACb,MAAM;AAAA,IACN,MAAM;AAAA;AAAA,IAEN,aACC;AAAA,IACD,aAAa;AAAA,MACZ,mBAAmB;AAAA,IACpB;AAAA,IACA,SAAS,CAAC;AAAA,IACV,MAAM;AAAA,IACN,gBAAgB;AAAA,MACf,MAAM;AAAA,QACL,oBAAoB,CAAC,IAAI;AAAA,QACzB,iBAAiB,CAAC,IAAI;AAAA,MACvB;AAAA,IACD;AAAA,EACD;AAAA,EACA;AAAA,IACC,aACC;AAAA,IACD,MAAM;AAAA,IACN,MAAM;AAAA,IACN,SAAS;AAAA,EACV;AAAA,EACA;AAAA,IACC,aAAa;AAAA,IACb,MAAM;AAAA,IACN,aAAa;AAAA,IACb,MAAM;AAAA,IACN,SAAS,CAAC;AAAA,IACV,SAAS;AAAA,MACR;AAAA,QACC,aAAa;AAAA,QACb,MAAM;AAAA,QACN,SAAS;AAAA,QACT,aAAa,EAAE,UAAU,GAAG,UAAU,GAAG,iBAAiB,EAAE;AAAA,QAC5D,aACC;AAAA,QACD,MAAM;AAAA,MACP;AAAA,MACA;AAAA,QACC,aAAa;AAAA,QACb,MAAM;AAAA,QACN,SAAS;AAAA,QACT,aAAa,EAAE,UAAU,GAAG,UAAU,GAAG,iBAAiB,EAAE;AAAA,QAC5D,aACC;AAAA,QACD,MAAM;AAAA,MACP;AAAA,MACA;AAAA,QACC,aAAa;AAAA,QACb,MAAM;AAAA,QACN,MAAM;AAAA,QACN,SAAS;AAAA,QACT,aACC;AAAA,MACF;AAAA,IACD;AAAA,EACD;AACD;AAEA,MAAM,iBAAiB;AAAA,EACtB,MAAM;AAAA,IACL,WAAW,CAAC,QAAQ;AAAA,IACpB,UAAU,CAAC,WAAW;AAAA,EACvB;AACD;AAEO,MAAM,kBAAc,0CAAqB,gBAAgB,UAAU;AAE1E,eAAsB,QAAiC,GAA0C;AAChG,QAAM,QAAQ,KAAK,iBAAiB,WAAW,GAAG,IAAI,EAAE,cAAc,KAAK,CAAC;AAC5E,QAAM,OAAO,KAAK,iBAAiB,QAAQ,CAAC;AAC5C,QAAM,uBAAuB,KAAK,iBAAiB,eAAe,CAAC;AACnE,QAAM,eAAe,KAAK,iBAAiB,gBAAgB,CAAC;AAC5D,QAAM,kBAAkB,KAAK,iBAAiB,mBAAmB,CAAC;AAClE,QAAM,qBAAqB,KAAK,iBAAiB,sBAAsB,CAAC;AACxE,MAAI,WAAW,KAAK,iBAAiB,YAAY,GAAG,CAAC,CAAC;AACtD,MAAI,OAAO,aAAa,UAAU;AACjC,eAAW,SAAS,MAAM,GAAG,EAAE,IAAI,CAAC,YAAY,QAAQ,KAAK,CAAC;AAAA,EAC/D;AACA,QAAM,UAAU,KAAK,iBAAiB,WAAW,GAAG,CAAC,CAAC;AAEtD,MAAI,QAAQ,cAAc;AACzB,UAAM,aAAuB,CAAC;AAE9B,QAAI,WAAW;AACf,QAAI;AAEJ,OAAG;AACF,YAAMA,YAAY,MAAM,4BAAW,KAAK,MAAM,OAAO,eAAe;AAAA,QACnE,SAAS;AAAA,UACR,eAAe;AAAA,QAChB;AAAA,QACA,IAAI;AAAA,UACH,OAAO;AAAA,UACP;AAAA,QACD;AAAA,MACD,CAAC;AAED,iBAAW,aAAaA,UAAS,QAAQ,CAAC,GAAG;AAC5C,mBAAW,KAAK,UAAU,IAAc;AAAA,MACzC;AAEA,iBAAWA,UAAS;AAEpB,UAAI,UAAU;AACb,gBAAQA,UAAS;AAAA,MAClB,OAAO;AACN;AAAA,MACD;AAAA,IACD,SAAS;AAET,QAAI,WAAW,SAAS,IAAI,GAAG;AAC9B,YAAM,IAAI;AAAA,QACT,KAAK,QAAQ;AAAA,QACb,oCAAoC,IAAI;AAAA,QACxC,EAAE,WAAW,EAAE;AAAA,MAChB;AAAA,IACD;AAAA,EACD;AAEA,MAAI,SAAS,SAAS,IAAI;AACzB,UAAM,IAAI;AAAA,MACT,KAAK,QAAQ;AAAA,MACb;AAAA,MACA,EAAE,WAAW,EAAE;AAAA,IAChB;AAAA,EACD;AAEA,QAAM,OAAoB;AAAA,IACzB;AAAA,IACA;AAAA,IACA,aAAa;AAAA,IACb;AAAA,EACD;AAEA,QAAM,QAAQ,CAAC;AAEf,MAAI,iBAAiB;AACpB,UAAM,KAAK;AAAA,MACV,MAAM;AAAA,IACP,CAAC;AACD,SAAK,iBAAiB;AAAA,MACrB,GAAK,KAAK,kBAA6B,CAAC;AAAA,MACxC,kBAAkB;AAAA,QACjB;AAAA,MACD;AAAA,IACD;AAAA,EACD;AAEA,MAAI,oBAAoB;AACvB,UAAM,KAAK;AAAA,MACV,MAAM;AAAA,IACP,CAAC;AACD,SAAK,iBAAiB;AAAA,MACrB,GAAK,KAAK,kBAA6B,CAAC;AAAA,MACxC,aAAa;AAAA,QACZ,eAAe;AAAA,UACd;AAAA,YACC;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAAA,EACD;AAEA,MAAI,MAAM,QAAQ;AACjB,SAAK,QAAQ;AAAA,EACd;AAEA,QAAM,WAAW,MAAM,4BAAW,KAAK,MAAM,QAAQ,eAAe;AAAA,IACnE;AAAA,IACA,SAAS;AAAA,MACR,eAAe;AAAA,IAChB;AAAA,EACD,CAAC;AAED,SAAO;AAAA,IACN;AAAA,MACC,MAAM;AAAA,MACN,YAAY,EAAE,MAAM,EAAE;AAAA,IACvB;AAAA,EACD;AACD;","names":["response"]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../../../nodes/vendors/OpenAi/actions/assistant/deleteAssistant.operation.ts"],"sourcesContent":["import type { INodeProperties, IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';\nimport { updateDisplayOptions } from 'n8n-workflow';\n\nimport { apiRequest } from '../../transport';\nimport { assistantRLC } from '../descriptions';\n\nconst properties: INodeProperties[] = [assistantRLC];\n\nconst displayOptions = {\n\tshow: {\n\t\toperation: ['deleteAssistant'],\n\t\tresource: ['assistant'],\n\t},\n};\n\nexport const description = updateDisplayOptions(displayOptions, properties);\n\nexport async function execute(this: IExecuteFunctions, i: number): Promise<INodeExecutionData[]> {\n\tconst assistantId = this.getNodeParameter('assistantId', i, '', { extractValue: true }) as string;\n\n\tconst response = await apiRequest.call(this, 'DELETE', `/assistants/${assistantId}`, {\n\t\theaders: {\n\t\t\t'OpenAI-Beta': 'assistants=v2',\n\t\t},\n\t});\n\n\treturn [\n\t\t{\n\t\t\tjson: response,\n\t\t\tpairedItem: { item: i },\n\t\t},\n\t];\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,0BAAqC;AAErC,uBAA2B;AAC3B,0BAA6B;AAE7B,MAAM,aAAgC,CAAC,gCAAY;AAEnD,MAAM,iBAAiB;AAAA,EACtB,MAAM;AAAA,IACL,WAAW,CAAC,iBAAiB;AAAA,IAC7B,UAAU,CAAC,WAAW;AAAA,EACvB;AACD;AAEO,MAAM,kBAAc,0CAAqB,gBAAgB,UAAU;AAE1E,eAAsB,QAAiC,GAA0C;AAChG,QAAM,cAAc,KAAK,iBAAiB,eAAe,GAAG,IAAI,EAAE,cAAc,KAAK,CAAC;AAEtF,QAAM,WAAW,MAAM,4BAAW,KAAK,MAAM,UAAU,eAAe,WAAW,IAAI;AAAA,IACpF,SAAS;AAAA,MACR,eAAe;AAAA,IAChB;AAAA,EACD,CAAC;AAED,SAAO;AAAA,IACN;AAAA,MACC,MAAM;AAAA,MACN,YAAY,EAAE,MAAM,EAAE;AAAA,IACvB;AAAA,EACD;AACD;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../../../nodes/vendors/OpenAi/actions/assistant/index.ts"],"sourcesContent":["import type { INodeProperties } from 'n8n-workflow';\n\nimport * as create from './create.operation';\nimport * as deleteAssistant from './deleteAssistant.operation';\nimport * as list from './list.operation';\nimport * as message from './message.operation';\nimport * as update from './update.operation';\n\nexport { create, deleteAssistant, message, list, update };\n\nexport const description: INodeProperties[] = [\n\t{\n\t\tdisplayName: 'Operation',\n\t\tname: 'operation',\n\t\ttype: 'options',\n\t\tnoDataExpression: true,\n\t\toptions: [\n\t\t\t{\n\t\t\t\tname: 'Create an Assistant',\n\t\t\t\tvalue: 'create',\n\t\t\t\taction: 'Create an assistant',\n\t\t\t\tdescription: 'Create a new assistant',\n\t\t\t},\n\t\t\t{\n\t\t\t\tname: 'Delete an Assistant',\n\t\t\t\tvalue: 'deleteAssistant',\n\t\t\t\taction: 'Delete an assistant',\n\t\t\t\tdescription: 'Delete an assistant from the account',\n\t\t\t},\n\t\t\t{\n\t\t\t\tname: 'List Assistants',\n\t\t\t\tvalue: 'list',\n\t\t\t\taction: 'List assistants',\n\t\t\t\tdescription: 'List assistants in the organization',\n\t\t\t},\n\t\t\t{\n\t\t\t\tname: 'Message an Assistant',\n\t\t\t\tvalue: 'message',\n\t\t\t\taction: 'Message an assistant',\n\t\t\t\tdescription: 'Send messages to an assistant',\n\t\t\t},\n\t\t\t{\n\t\t\t\tname: 'Update an Assistant',\n\t\t\t\tvalue: 'update',\n\t\t\t\taction: 'Update an assistant',\n\t\t\t\tdescription: 'Update an existing assistant',\n\t\t\t},\n\t\t],\n\t\tdefault: 'message',\n\t\tdisplayOptions: {\n\t\t\tshow: {\n\t\t\t\tresource: ['assistant'],\n\t\t\t},\n\t\t},\n\t},\n\n\t...create.description,\n\t...deleteAssistant.description,\n\t...message.description,\n\t...list.description,\n\t...update.description,\n];\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,aAAwB;AACxB,sBAAiC;AACjC,WAAsB;AACtB,cAAyB;AACzB,aAAwB;AAIjB,MAAM,cAAiC;AAAA,EAC7C;AAAA,IACC,aAAa;AAAA,IACb,MAAM;AAAA,IACN,MAAM;AAAA,IACN,kBAAkB;AAAA,IAClB,SAAS;AAAA,MACR;AAAA,QACC,MAAM;AAAA,QACN,OAAO;AAAA,QACP,QAAQ;AAAA,QACR,aAAa;AAAA,MACd;AAAA,MACA;AAAA,QACC,MAAM;AAAA,QACN,OAAO;AAAA,QACP,QAAQ;AAAA,QACR,aAAa;AAAA,MACd;AAAA,MACA;AAAA,QACC,MAAM;AAAA,QACN,OAAO;AAAA,QACP,QAAQ;AAAA,QACR,aAAa;AAAA,MACd;AAAA,MACA;AAAA,QACC,MAAM;AAAA,QACN,OAAO;AAAA,QACP,QAAQ;AAAA,QACR,aAAa;AAAA,MACd;AAAA,MACA;AAAA,QACC,MAAM;AAAA,QACN,OAAO;AAAA,QACP,QAAQ;AAAA,QACR,aAAa;AAAA,MACd;AAAA,IACD;AAAA,IACA,SAAS;AAAA,IACT,gBAAgB;AAAA,MACf,MAAM;AAAA,QACL,UAAU,CAAC,WAAW;AAAA,MACvB;AAAA,IACD;AAAA,EACD;AAAA,EAEA,GAAG,OAAO;AAAA,EACV,GAAG,gBAAgB;AAAA,EACnB,GAAG,QAAQ;AAAA,EACX,GAAG,KAAK;AAAA,EACR,GAAG,OAAO;AACX;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../../../nodes/vendors/OpenAi/actions/assistant/list.operation.ts"],"sourcesContent":["import type { INodeProperties, IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';\nimport { updateDisplayOptions } from 'n8n-workflow';\n\nimport { apiRequest } from '../../transport';\n\nconst properties: INodeProperties[] = [\n\t{\n\t\tdisplayName: 'Simplify Output',\n\t\tname: 'simplify',\n\t\ttype: 'boolean',\n\t\tdefault: true,\n\t\tdescription: 'Whether to return a simplified version of the response instead of the raw data',\n\t},\n];\n\nconst displayOptions = {\n\tshow: {\n\t\toperation: ['list'],\n\t\tresource: ['assistant'],\n\t},\n};\n\nexport const description = updateDisplayOptions(displayOptions, properties);\n\nexport async function execute(this: IExecuteFunctions, i: number): Promise<INodeExecutionData[]> {\n\tconst returnData: INodeExecutionData[] = [];\n\n\tlet has_more = true;\n\tlet after: string | undefined;\n\n\tdo {\n\t\tconst response = await apiRequest.call(this, 'GET', '/assistants', {\n\t\t\theaders: {\n\t\t\t\t'OpenAI-Beta': 'assistants=v2',\n\t\t\t},\n\t\t\tqs: {\n\t\t\t\tlimit: 100,\n\t\t\t\tafter,\n\t\t\t},\n\t\t});\n\n\t\tfor (const assistant of response.data || []) {\n\t\t\ttry {\n\t\t\t\tassistant.created_at = new Date(assistant.created_at * 1000).toISOString();\n\t\t\t} catch (error) {}\n\n\t\t\treturnData.push({ json: assistant, pairedItem: { item: i } });\n\t\t}\n\n\t\thas_more = response.has_more;\n\n\t\tif (has_more) {\n\t\t\tafter = response.last_id as string;\n\t\t} else {\n\t\t\tbreak;\n\t\t}\n\t} while (has_more);\n\n\tconst simplify = this.getNodeParameter('simplify', i) as boolean;\n\n\tif (simplify) {\n\t\treturn returnData.map((item) => {\n\t\t\tconst { id, name, model } = item.json;\n\t\t\treturn {\n\t\t\t\tjson: {\n\t\t\t\t\tid,\n\t\t\t\t\tname,\n\t\t\t\t\tmodel,\n\t\t\t\t},\n\t\t\t\tpairedItem: { item: i },\n\t\t\t};\n\t\t});\n\t}\n\n\treturn returnData;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,0BAAqC;AAErC,uBAA2B;AAE3B,MAAM,aAAgC;AAAA,EACrC;AAAA,IACC,aAAa;AAAA,IACb,MAAM;AAAA,IACN,MAAM;AAAA,IACN,SAAS;AAAA,IACT,aAAa;AAAA,EACd;AACD;AAEA,MAAM,iBAAiB;AAAA,EACtB,MAAM;AAAA,IACL,WAAW,CAAC,MAAM;AAAA,IAClB,UAAU,CAAC,WAAW;AAAA,EACvB;AACD;AAEO,MAAM,kBAAc,0CAAqB,gBAAgB,UAAU;AAE1E,eAAsB,QAAiC,GAA0C;AAChG,QAAM,aAAmC,CAAC;AAE1C,MAAI,WAAW;AACf,MAAI;AAEJ,KAAG;AACF,UAAM,WAAW,MAAM,4BAAW,KAAK,MAAM,OAAO,eAAe;AAAA,MAClE,SAAS;AAAA,QACR,eAAe;AAAA,MAChB;AAAA,MACA,IAAI;AAAA,QACH,OAAO;AAAA,QACP;AAAA,MACD;AAAA,IACD,CAAC;AAED,eAAW,aAAa,SAAS,QAAQ,CAAC,GAAG;AAC5C,UAAI;AACH,kBAAU,aAAa,IAAI,KAAK,UAAU,aAAa,GAAI,EAAE,YAAY;AAAA,MAC1E,SAAS,OAAO;AAAA,MAAC;AAEjB,iBAAW,KAAK,EAAE,MAAM,WAAW,YAAY,EAAE,MAAM,EAAE,EAAE,CAAC;AAAA,IAC7D;AAEA,eAAW,SAAS;AAEpB,QAAI,UAAU;AACb,cAAQ,SAAS;AAAA,IAClB,OAAO;AACN;AAAA,IACD;AAAA,EACD,SAAS;AAET,QAAM,WAAW,KAAK,iBAAiB,YAAY,CAAC;AAEpD,MAAI,UAAU;AACb,WAAO,WAAW,IAAI,CAAC,SAAS;AAC/B,YAAM,EAAE,IAAI,MAAM,MAAM,IAAI,KAAK;AACjC,aAAO;AAAA,QACN,MAAM;AAAA,UACL;AAAA,UACA;AAAA,UACA;AAAA,QACD;AAAA,QACA,YAAY,EAAE,MAAM,EAAE;AAAA,MACvB;AAAA,IACD,CAAC;AAAA,EACF;AAEA,SAAO;AACR;","names":[]}
|