@n8n/n8n-nodes-langchain 1.86.0 → 1.87.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/credentials/AnthropicApi.credentials.js +1 -0
- package/dist/credentials/AnthropicApi.credentials.js.map +1 -0
- package/dist/credentials/AzureOpenAiApi.credentials.js +1 -0
- package/dist/credentials/AzureOpenAiApi.credentials.js.map +1 -0
- package/dist/credentials/CohereApi.credentials.js +1 -0
- package/dist/credentials/CohereApi.credentials.js.map +1 -0
- package/dist/credentials/DeepSeekApi.credentials.js +1 -0
- package/dist/credentials/DeepSeekApi.credentials.js.map +1 -0
- package/dist/credentials/GooglePalmApi.credentials.js +1 -0
- package/dist/credentials/GooglePalmApi.credentials.js.map +1 -0
- package/dist/credentials/GroqApi.credentials.js +1 -0
- package/dist/credentials/GroqApi.credentials.js.map +1 -0
- package/dist/credentials/HuggingFaceApi.credentials.js +1 -0
- package/dist/credentials/HuggingFaceApi.credentials.js.map +1 -0
- package/dist/credentials/MistralCloudApi.credentials.js +1 -0
- package/dist/credentials/MistralCloudApi.credentials.js.map +1 -0
- package/dist/credentials/MotorheadApi.credentials.js +1 -0
- package/dist/credentials/MotorheadApi.credentials.js.map +1 -0
- package/dist/credentials/OllamaApi.credentials.js +1 -0
- package/dist/credentials/OllamaApi.credentials.js.map +1 -0
- package/dist/credentials/OpenRouterApi.credentials.js +1 -0
- package/dist/credentials/OpenRouterApi.credentials.js.map +1 -0
- package/dist/credentials/PineconeApi.credentials.js +1 -0
- package/dist/credentials/PineconeApi.credentials.js.map +1 -0
- package/dist/credentials/QdrantApi.credentials.js +1 -0
- package/dist/credentials/QdrantApi.credentials.js.map +1 -0
- package/dist/credentials/SerpApi.credentials.js +1 -0
- package/dist/credentials/SerpApi.credentials.js.map +1 -0
- package/dist/credentials/WolframAlphaApi.credentials.js +1 -0
- package/dist/credentials/WolframAlphaApi.credentials.js.map +1 -0
- package/dist/credentials/XAiApi.credentials.js +1 -0
- package/dist/credentials/XAiApi.credentials.js.map +1 -0
- package/dist/credentials/XataApi.credentials.js +1 -0
- package/dist/credentials/XataApi.credentials.js.map +1 -0
- package/dist/credentials/ZepApi.credentials.js +1 -0
- package/dist/credentials/ZepApi.credentials.js.map +1 -0
- package/dist/known/nodes.json +12 -0
- package/dist/nodes/agents/Agent/Agent.node.js +1 -0
- package/dist/nodes/agents/Agent/Agent.node.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/ConversationalAgent/description.js +1 -0
- package/dist/nodes/agents/Agent/agents/ConversationalAgent/description.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/ConversationalAgent/execute.js +1 -0
- package/dist/nodes/agents/Agent/agents/ConversationalAgent/execute.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/ConversationalAgent/prompt.js +1 -0
- package/dist/nodes/agents/Agent/agents/ConversationalAgent/prompt.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/OpenAiFunctionsAgent/description.js +1 -0
- package/dist/nodes/agents/Agent/agents/OpenAiFunctionsAgent/description.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/OpenAiFunctionsAgent/execute.js +1 -0
- package/dist/nodes/agents/Agent/agents/OpenAiFunctionsAgent/execute.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/OpenAiFunctionsAgent/prompt.js +1 -0
- package/dist/nodes/agents/Agent/agents/OpenAiFunctionsAgent/prompt.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/PlanAndExecuteAgent/description.js +1 -0
- package/dist/nodes/agents/Agent/agents/PlanAndExecuteAgent/description.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/PlanAndExecuteAgent/execute.js +1 -0
- package/dist/nodes/agents/Agent/agents/PlanAndExecuteAgent/execute.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/PlanAndExecuteAgent/prompt.js +1 -0
- package/dist/nodes/agents/Agent/agents/PlanAndExecuteAgent/prompt.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/ReActAgent/description.js +1 -0
- package/dist/nodes/agents/Agent/agents/ReActAgent/description.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/ReActAgent/execute.js +1 -0
- package/dist/nodes/agents/Agent/agents/ReActAgent/execute.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/ReActAgent/prompt.js +1 -0
- package/dist/nodes/agents/Agent/agents/ReActAgent/prompt.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/SqlAgent/description.js +1 -0
- package/dist/nodes/agents/Agent/agents/SqlAgent/description.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/SqlAgent/execute.js +1 -0
- package/dist/nodes/agents/Agent/agents/SqlAgent/execute.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/SqlAgent/other/handlers/mysql.js +1 -0
- package/dist/nodes/agents/Agent/agents/SqlAgent/other/handlers/mysql.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/SqlAgent/other/handlers/postgres.js +1 -0
- package/dist/nodes/agents/Agent/agents/SqlAgent/other/handlers/postgres.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/SqlAgent/other/handlers/sqlite.js +1 -0
- package/dist/nodes/agents/Agent/agents/SqlAgent/other/handlers/sqlite.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/SqlAgent/other/prompts.js +1 -0
- package/dist/nodes/agents/Agent/agents/SqlAgent/other/prompts.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/ToolsAgent/description.js +1 -0
- package/dist/nodes/agents/Agent/agents/ToolsAgent/description.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/ToolsAgent/execute.js +1 -0
- package/dist/nodes/agents/Agent/agents/ToolsAgent/execute.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/ToolsAgent/prompt.js +1 -0
- package/dist/nodes/agents/Agent/agents/ToolsAgent/prompt.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/utils.js +1 -0
- package/dist/nodes/agents/Agent/agents/utils.js.map +1 -0
- package/dist/nodes/agents/OpenAiAssistant/OpenAiAssistant.node.js +1 -0
- package/dist/nodes/agents/OpenAiAssistant/OpenAiAssistant.node.js.map +1 -0
- package/dist/nodes/agents/OpenAiAssistant/utils.js +1 -0
- package/dist/nodes/agents/OpenAiAssistant/utils.js.map +1 -0
- package/dist/nodes/chains/ChainLLM/ChainLlm.node.js +1 -0
- package/dist/nodes/chains/ChainLLM/ChainLlm.node.js.map +1 -0
- package/dist/nodes/chains/ChainLLM/methods/chainExecutor.js +1 -0
- package/dist/nodes/chains/ChainLLM/methods/chainExecutor.js.map +1 -0
- package/dist/nodes/chains/ChainLLM/methods/config.js +1 -0
- package/dist/nodes/chains/ChainLLM/methods/config.js.map +1 -0
- package/dist/nodes/chains/ChainLLM/methods/imageUtils.js +1 -0
- package/dist/nodes/chains/ChainLLM/methods/imageUtils.js.map +1 -0
- package/dist/nodes/chains/ChainLLM/methods/index.js +1 -0
- package/dist/nodes/chains/ChainLLM/methods/index.js.map +1 -0
- package/dist/nodes/chains/ChainLLM/methods/promptUtils.js +1 -0
- package/dist/nodes/chains/ChainLLM/methods/promptUtils.js.map +1 -0
- package/dist/nodes/chains/ChainLLM/methods/responseFormatter.js +1 -0
- package/dist/nodes/chains/ChainLLM/methods/responseFormatter.js.map +1 -0
- package/dist/nodes/chains/ChainLLM/methods/types.js +1 -0
- package/dist/nodes/chains/ChainLLM/methods/types.js.map +1 -0
- package/dist/nodes/chains/ChainRetrievalQA/ChainRetrievalQa.node.js +1 -0
- package/dist/nodes/chains/ChainRetrievalQA/ChainRetrievalQa.node.js.map +1 -0
- package/dist/nodes/chains/ChainSummarization/ChainSummarization.node.js +1 -0
- package/dist/nodes/chains/ChainSummarization/ChainSummarization.node.js.map +1 -0
- package/dist/nodes/chains/ChainSummarization/V1/ChainSummarizationV1.node.js +1 -0
- package/dist/nodes/chains/ChainSummarization/V1/ChainSummarizationV1.node.js.map +1 -0
- package/dist/nodes/chains/ChainSummarization/V2/ChainSummarizationV2.node.js +1 -0
- package/dist/nodes/chains/ChainSummarization/V2/ChainSummarizationV2.node.js.map +1 -0
- package/dist/nodes/chains/ChainSummarization/helpers.js +1 -0
- package/dist/nodes/chains/ChainSummarization/helpers.js.map +1 -0
- package/dist/nodes/chains/ChainSummarization/prompt.js +1 -0
- package/dist/nodes/chains/ChainSummarization/prompt.js.map +1 -0
- package/dist/nodes/chains/InformationExtractor/InformationExtractor.node.js +1 -0
- package/dist/nodes/chains/InformationExtractor/InformationExtractor.node.js.map +1 -0
- package/dist/nodes/chains/InformationExtractor/helpers.js +1 -0
- package/dist/nodes/chains/InformationExtractor/helpers.js.map +1 -0
- package/dist/nodes/chains/InformationExtractor/types.js +1 -0
- package/dist/nodes/chains/InformationExtractor/types.js.map +1 -0
- package/dist/nodes/chains/SentimentAnalysis/SentimentAnalysis.node.js +1 -0
- package/dist/nodes/chains/SentimentAnalysis/SentimentAnalysis.node.js.map +1 -0
- package/dist/nodes/chains/TextClassifier/TextClassifier.node.js +1 -0
- package/dist/nodes/chains/TextClassifier/TextClassifier.node.js.map +1 -0
- package/dist/nodes/code/Code.node.js +1 -0
- package/dist/nodes/code/Code.node.js.map +1 -0
- package/dist/nodes/document_loaders/DocumentBinaryInputLoader/DocumentBinaryInputLoader.node.js +1 -0
- package/dist/nodes/document_loaders/DocumentBinaryInputLoader/DocumentBinaryInputLoader.node.js.map +1 -0
- package/dist/nodes/document_loaders/DocumentDefaultDataLoader/DocumentDefaultDataLoader.node.js +1 -0
- package/dist/nodes/document_loaders/DocumentDefaultDataLoader/DocumentDefaultDataLoader.node.js.map +1 -0
- package/dist/nodes/document_loaders/DocumentGithubLoader/DocumentGithubLoader.node.js +1 -0
- package/dist/nodes/document_loaders/DocumentGithubLoader/DocumentGithubLoader.node.js.map +1 -0
- package/dist/nodes/document_loaders/DocumentJSONInputLoader/DocumentJsonInputLoader.node.js +1 -0
- package/dist/nodes/document_loaders/DocumentJSONInputLoader/DocumentJsonInputLoader.node.js.map +1 -0
- package/dist/nodes/embeddings/EmbeddingsAwsBedrock/EmbeddingsAwsBedrock.node.js +1 -0
- package/dist/nodes/embeddings/EmbeddingsAwsBedrock/EmbeddingsAwsBedrock.node.js.map +1 -0
- package/dist/nodes/embeddings/EmbeddingsAzureOpenAi/EmbeddingsAzureOpenAi.node.js +1 -0
- package/dist/nodes/embeddings/EmbeddingsAzureOpenAi/EmbeddingsAzureOpenAi.node.js.map +1 -0
- package/dist/nodes/embeddings/EmbeddingsCohere/EmbeddingsCohere.node.js +1 -0
- package/dist/nodes/embeddings/EmbeddingsCohere/EmbeddingsCohere.node.js.map +1 -0
- package/dist/nodes/embeddings/EmbeddingsGoogleGemini/EmbeddingsGoogleGemini.node.js +1 -0
- package/dist/nodes/embeddings/EmbeddingsGoogleGemini/EmbeddingsGoogleGemini.node.js.map +1 -0
- package/dist/nodes/embeddings/EmbeddingsGoogleVertex/EmbeddingsGoogleVertex.node.js +166 -0
- package/dist/nodes/embeddings/EmbeddingsGoogleVertex/EmbeddingsGoogleVertex.node.js.map +1 -0
- package/dist/nodes/embeddings/EmbeddingsGoogleVertex/google.svg +1 -0
- package/dist/nodes/embeddings/EmbeddingsHuggingFaceInference/EmbeddingsHuggingFaceInference.node.js +1 -0
- package/dist/nodes/embeddings/EmbeddingsHuggingFaceInference/EmbeddingsHuggingFaceInference.node.js.map +1 -0
- package/dist/nodes/embeddings/EmbeddingsMistralCloud/EmbeddingsMistralCloud.node.js +1 -0
- package/dist/nodes/embeddings/EmbeddingsMistralCloud/EmbeddingsMistralCloud.node.js.map +1 -0
- package/dist/nodes/embeddings/EmbeddingsOllama/EmbeddingsOllama.node.js +1 -0
- package/dist/nodes/embeddings/EmbeddingsOllama/EmbeddingsOllama.node.js.map +1 -0
- package/dist/nodes/embeddings/EmbeddingsOpenAI/EmbeddingsOpenAi.node.js +1 -0
- package/dist/nodes/embeddings/EmbeddingsOpenAI/EmbeddingsOpenAi.node.js.map +1 -0
- package/dist/nodes/llms/LMChatAnthropic/LmChatAnthropic.node.js +1 -0
- package/dist/nodes/llms/LMChatAnthropic/LmChatAnthropic.node.js.map +1 -0
- package/dist/nodes/llms/LMChatAnthropic/methods/searchModels.js +1 -0
- package/dist/nodes/llms/LMChatAnthropic/methods/searchModels.js.map +1 -0
- package/dist/nodes/llms/LMChatOllama/LmChatOllama.node.js +1 -0
- package/dist/nodes/llms/LMChatOllama/LmChatOllama.node.js.map +1 -0
- package/dist/nodes/llms/LMChatOpenAi/LmChatOpenAi.node.js +1 -0
- package/dist/nodes/llms/LMChatOpenAi/LmChatOpenAi.node.js.map +1 -0
- package/dist/nodes/llms/LMChatOpenAi/methods/loadModels.js +1 -0
- package/dist/nodes/llms/LMChatOpenAi/methods/loadModels.js.map +1 -0
- package/dist/nodes/llms/LMCohere/LmCohere.node.js +1 -0
- package/dist/nodes/llms/LMCohere/LmCohere.node.js.map +1 -0
- package/dist/nodes/llms/LMOllama/LmOllama.node.js +1 -0
- package/dist/nodes/llms/LMOllama/LmOllama.node.js.map +1 -0
- package/dist/nodes/llms/LMOllama/description.js +1 -0
- package/dist/nodes/llms/LMOllama/description.js.map +1 -0
- package/dist/nodes/llms/LMOpenAi/LmOpenAi.node.js +1 -0
- package/dist/nodes/llms/LMOpenAi/LmOpenAi.node.js.map +1 -0
- package/dist/nodes/llms/LMOpenHuggingFaceInference/LmOpenHuggingFaceInference.node.js +1 -0
- package/dist/nodes/llms/LMOpenHuggingFaceInference/LmOpenHuggingFaceInference.node.js.map +1 -0
- package/dist/nodes/llms/LmChatAwsBedrock/LmChatAwsBedrock.node.js +1 -0
- package/dist/nodes/llms/LmChatAwsBedrock/LmChatAwsBedrock.node.js.map +1 -0
- package/dist/nodes/llms/LmChatAzureOpenAi/LmChatAzureOpenAi.node.js +1 -0
- package/dist/nodes/llms/LmChatAzureOpenAi/LmChatAzureOpenAi.node.js.map +1 -0
- package/dist/nodes/llms/LmChatDeepSeek/LmChatDeepSeek.node.js +1 -0
- package/dist/nodes/llms/LmChatDeepSeek/LmChatDeepSeek.node.js.map +1 -0
- package/dist/nodes/llms/LmChatGoogleGemini/LmChatGoogleGemini.node.js +1 -0
- package/dist/nodes/llms/LmChatGoogleGemini/LmChatGoogleGemini.node.js.map +1 -0
- package/dist/nodes/llms/LmChatGoogleVertex/LmChatGoogleVertex.node.js +1 -0
- package/dist/nodes/llms/LmChatGoogleVertex/LmChatGoogleVertex.node.js.map +1 -0
- package/dist/nodes/llms/LmChatGoogleVertex/error-handling.js +1 -0
- package/dist/nodes/llms/LmChatGoogleVertex/error-handling.js.map +1 -0
- package/dist/nodes/llms/LmChatGroq/LmChatGroq.node.js +1 -0
- package/dist/nodes/llms/LmChatGroq/LmChatGroq.node.js.map +1 -0
- package/dist/nodes/llms/LmChatMistralCloud/LmChatMistralCloud.node.js +1 -0
- package/dist/nodes/llms/LmChatMistralCloud/LmChatMistralCloud.node.js.map +1 -0
- package/dist/nodes/llms/LmChatOpenRouter/LmChatOpenRouter.node.js +1 -0
- package/dist/nodes/llms/LmChatOpenRouter/LmChatOpenRouter.node.js.map +1 -0
- package/dist/nodes/llms/LmChatXAiGrok/LmChatXAiGrok.node.js +1 -0
- package/dist/nodes/llms/LmChatXAiGrok/LmChatXAiGrok.node.js.map +1 -0
- package/dist/nodes/llms/N8nLlmTracing.js +1 -0
- package/dist/nodes/llms/N8nLlmTracing.js.map +1 -0
- package/dist/nodes/llms/gemini-common/additional-options.js +1 -0
- package/dist/nodes/llms/gemini-common/additional-options.js.map +1 -0
- package/dist/nodes/llms/gemini-common/safety-options.js +1 -0
- package/dist/nodes/llms/gemini-common/safety-options.js.map +1 -0
- package/dist/nodes/llms/n8nDefaultFailedAttemptHandler.js +1 -0
- package/dist/nodes/llms/n8nDefaultFailedAttemptHandler.js.map +1 -0
- package/dist/nodes/llms/n8nLlmFailedAttemptHandler.js +1 -0
- package/dist/nodes/llms/n8nLlmFailedAttemptHandler.js.map +1 -0
- package/dist/nodes/memory/MemoryBufferWindow/MemoryBufferWindow.node.js +1 -0
- package/dist/nodes/memory/MemoryBufferWindow/MemoryBufferWindow.node.js.map +1 -0
- package/dist/nodes/memory/MemoryChatRetriever/MemoryChatRetriever.node.js +1 -0
- package/dist/nodes/memory/MemoryChatRetriever/MemoryChatRetriever.node.js.map +1 -0
- package/dist/nodes/memory/MemoryManager/MemoryManager.node.js +1 -0
- package/dist/nodes/memory/MemoryManager/MemoryManager.node.js.map +1 -0
- package/dist/nodes/memory/MemoryMongoDbChat/MemoryMongoDbChat.node.js +155 -0
- package/dist/nodes/memory/MemoryMongoDbChat/MemoryMongoDbChat.node.js.map +1 -0
- package/dist/nodes/memory/MemoryMongoDbChat/mongodb.dark.svg +3 -0
- package/dist/nodes/memory/MemoryMongoDbChat/mongodb.svg +3 -0
- package/dist/nodes/memory/MemoryMotorhead/MemoryMotorhead.node.js +1 -0
- package/dist/nodes/memory/MemoryMotorhead/MemoryMotorhead.node.js.map +1 -0
- package/dist/nodes/memory/MemoryPostgresChat/MemoryPostgresChat.node.js +1 -0
- package/dist/nodes/memory/MemoryPostgresChat/MemoryPostgresChat.node.js.map +1 -0
- package/dist/nodes/memory/MemoryRedisChat/MemoryRedisChat.node.js +1 -0
- package/dist/nodes/memory/MemoryRedisChat/MemoryRedisChat.node.js.map +1 -0
- package/dist/nodes/memory/MemoryXata/MemoryXata.node.js +1 -0
- package/dist/nodes/memory/MemoryXata/MemoryXata.node.js.map +1 -0
- package/dist/nodes/memory/MemoryZep/MemoryZep.node.js +1 -0
- package/dist/nodes/memory/MemoryZep/MemoryZep.node.js.map +1 -0
- package/dist/nodes/memory/descriptions.js +1 -0
- package/dist/nodes/memory/descriptions.js.map +1 -0
- package/dist/nodes/output_parser/OutputParserAutofixing/OutputParserAutofixing.node.js +1 -0
- package/dist/nodes/output_parser/OutputParserAutofixing/OutputParserAutofixing.node.js.map +1 -0
- package/dist/nodes/output_parser/OutputParserAutofixing/prompt.js +1 -0
- package/dist/nodes/output_parser/OutputParserAutofixing/prompt.js.map +1 -0
- package/dist/nodes/output_parser/OutputParserItemList/OutputParserItemList.node.js +1 -0
- package/dist/nodes/output_parser/OutputParserItemList/OutputParserItemList.node.js.map +1 -0
- package/dist/nodes/output_parser/OutputParserStructured/OutputParserStructured.node.js +1 -0
- package/dist/nodes/output_parser/OutputParserStructured/OutputParserStructured.node.js.map +1 -0
- package/dist/nodes/retrievers/RetrieverContextualCompression/RetrieverContextualCompression.node.js +1 -0
- package/dist/nodes/retrievers/RetrieverContextualCompression/RetrieverContextualCompression.node.js.map +1 -0
- package/dist/nodes/retrievers/RetrieverMultiQuery/RetrieverMultiQuery.node.js +1 -0
- package/dist/nodes/retrievers/RetrieverMultiQuery/RetrieverMultiQuery.node.js.map +1 -0
- package/dist/nodes/retrievers/RetrieverVectorStore/RetrieverVectorStore.node.js +1 -0
- package/dist/nodes/retrievers/RetrieverVectorStore/RetrieverVectorStore.node.js.map +1 -0
- package/dist/nodes/retrievers/RetrieverWorkflow/RetrieverWorkflow.node.js +1 -0
- package/dist/nodes/retrievers/RetrieverWorkflow/RetrieverWorkflow.node.js.map +1 -0
- package/dist/nodes/text_splitters/TextSplitterCharacterTextSplitter/TextSplitterCharacterTextSplitter.node.js +1 -0
- package/dist/nodes/text_splitters/TextSplitterCharacterTextSplitter/TextSplitterCharacterTextSplitter.node.js.map +1 -0
- package/dist/nodes/text_splitters/TextSplitterRecursiveCharacterTextSplitter/TextSplitterRecursiveCharacterTextSplitter.node.js +1 -0
- package/dist/nodes/text_splitters/TextSplitterRecursiveCharacterTextSplitter/TextSplitterRecursiveCharacterTextSplitter.node.js.map +1 -0
- package/dist/nodes/text_splitters/TextSplitterTokenSplitter/TextSplitterTokenSplitter.node.js +1 -0
- package/dist/nodes/text_splitters/TextSplitterTokenSplitter/TextSplitterTokenSplitter.node.js.map +1 -0
- package/dist/nodes/tools/ToolCalculator/ToolCalculator.node.js +1 -0
- package/dist/nodes/tools/ToolCalculator/ToolCalculator.node.js.map +1 -0
- package/dist/nodes/tools/ToolCode/ToolCode.node.js +1 -0
- package/dist/nodes/tools/ToolCode/ToolCode.node.js.map +1 -0
- package/dist/nodes/tools/ToolHttpRequest/ToolHttpRequest.node.js +1 -0
- package/dist/nodes/tools/ToolHttpRequest/ToolHttpRequest.node.js.map +1 -0
- package/dist/nodes/tools/ToolHttpRequest/descriptions.js +1 -0
- package/dist/nodes/tools/ToolHttpRequest/descriptions.js.map +1 -0
- package/dist/nodes/tools/ToolHttpRequest/interfaces.js +1 -0
- package/dist/nodes/tools/ToolHttpRequest/interfaces.js.map +1 -0
- package/dist/nodes/tools/ToolHttpRequest/utils.js +3 -2
- package/dist/nodes/tools/ToolHttpRequest/utils.js.map +1 -0
- package/dist/nodes/tools/ToolSerpApi/ToolSerpApi.node.js +1 -0
- package/dist/nodes/tools/ToolSerpApi/ToolSerpApi.node.js.map +1 -0
- package/dist/nodes/tools/ToolThink/ToolThink.node.js +94 -0
- package/dist/nodes/tools/ToolThink/ToolThink.node.js.map +1 -0
- package/dist/nodes/tools/ToolVectorStore/ToolVectorStore.node.js +1 -0
- package/dist/nodes/tools/ToolVectorStore/ToolVectorStore.node.js.map +1 -0
- package/dist/nodes/tools/ToolWikipedia/ToolWikipedia.node.js +1 -0
- package/dist/nodes/tools/ToolWikipedia/ToolWikipedia.node.js.map +1 -0
- package/dist/nodes/tools/ToolWolframAlpha/ToolWolframAlpha.node.js +1 -0
- package/dist/nodes/tools/ToolWolframAlpha/ToolWolframAlpha.node.js.map +1 -0
- package/dist/nodes/tools/ToolWorkflow/ToolWorkflow.node.js +1 -0
- package/dist/nodes/tools/ToolWorkflow/ToolWorkflow.node.js.map +1 -0
- package/dist/nodes/tools/ToolWorkflow/v1/ToolWorkflowV1.node.js +1 -0
- package/dist/nodes/tools/ToolWorkflow/v1/ToolWorkflowV1.node.js.map +1 -0
- package/dist/nodes/tools/ToolWorkflow/v1/versionDescription.js +1 -0
- package/dist/nodes/tools/ToolWorkflow/v1/versionDescription.js.map +1 -0
- package/dist/nodes/tools/ToolWorkflow/v2/ToolWorkflowV2.node.js +1 -0
- package/dist/nodes/tools/ToolWorkflow/v2/ToolWorkflowV2.node.js.map +1 -0
- package/dist/nodes/tools/ToolWorkflow/v2/methods/index.js +1 -0
- package/dist/nodes/tools/ToolWorkflow/v2/methods/index.js.map +1 -0
- package/dist/nodes/tools/ToolWorkflow/v2/methods/localResourceMapping.js +1 -0
- package/dist/nodes/tools/ToolWorkflow/v2/methods/localResourceMapping.js.map +1 -0
- package/dist/nodes/tools/ToolWorkflow/v2/utils/WorkflowToolService.js +1 -0
- package/dist/nodes/tools/ToolWorkflow/v2/utils/WorkflowToolService.js.map +1 -0
- package/dist/nodes/tools/ToolWorkflow/v2/versionDescription.js +1 -0
- package/dist/nodes/tools/ToolWorkflow/v2/versionDescription.js.map +1 -0
- package/dist/nodes/trigger/ChatTrigger/ChatTrigger.node.js +1 -0
- package/dist/nodes/trigger/ChatTrigger/ChatTrigger.node.js.map +1 -0
- package/dist/nodes/trigger/ChatTrigger/GenericFunctions.js +1 -0
- package/dist/nodes/trigger/ChatTrigger/GenericFunctions.js.map +1 -0
- package/dist/nodes/trigger/ChatTrigger/constants.js +1 -0
- package/dist/nodes/trigger/ChatTrigger/constants.js.map +1 -0
- package/dist/nodes/trigger/ChatTrigger/error.js +1 -0
- package/dist/nodes/trigger/ChatTrigger/error.js.map +1 -0
- package/dist/nodes/trigger/ChatTrigger/templates.js +1 -0
- package/dist/nodes/trigger/ChatTrigger/templates.js.map +1 -0
- package/dist/nodes/trigger/ChatTrigger/types.js +1 -0
- package/dist/nodes/trigger/ChatTrigger/types.js.map +1 -0
- package/dist/nodes/trigger/ManualChatTrigger/ManualChatTrigger.node.js +1 -0
- package/dist/nodes/trigger/ManualChatTrigger/ManualChatTrigger.node.js.map +1 -0
- package/dist/nodes/vector_store/VectorStoreInMemory/VectorStoreInMemory.node.js +1 -0
- package/dist/nodes/vector_store/VectorStoreInMemory/VectorStoreInMemory.node.js.map +1 -0
- package/dist/nodes/vector_store/VectorStoreInMemoryInsert/VectorStoreInMemoryInsert.node.js +1 -0
- package/dist/nodes/vector_store/VectorStoreInMemoryInsert/VectorStoreInMemoryInsert.node.js.map +1 -0
- package/dist/nodes/vector_store/VectorStoreInMemoryLoad/VectorStoreInMemoryLoad.node.js +1 -0
- package/dist/nodes/vector_store/VectorStoreInMemoryLoad/VectorStoreInMemoryLoad.node.js.map +1 -0
- package/dist/nodes/vector_store/VectorStoreMongoDBAtlas/VectorStoreMongoDBAtlas.node.js +1 -0
- package/dist/nodes/vector_store/VectorStoreMongoDBAtlas/VectorStoreMongoDBAtlas.node.js.map +1 -0
- package/dist/nodes/vector_store/VectorStorePGVector/VectorStorePGVector.node.js +1 -0
- package/dist/nodes/vector_store/VectorStorePGVector/VectorStorePGVector.node.js.map +1 -0
- package/dist/nodes/vector_store/VectorStorePinecone/VectorStorePinecone.node.js +1 -0
- package/dist/nodes/vector_store/VectorStorePinecone/VectorStorePinecone.node.js.map +1 -0
- package/dist/nodes/vector_store/VectorStorePineconeInsert/VectorStorePineconeInsert.node.js +1 -0
- package/dist/nodes/vector_store/VectorStorePineconeInsert/VectorStorePineconeInsert.node.js.map +1 -0
- package/dist/nodes/vector_store/VectorStorePineconeLoad/VectorStorePineconeLoad.node.js +1 -0
- package/dist/nodes/vector_store/VectorStorePineconeLoad/VectorStorePineconeLoad.node.js.map +1 -0
- package/dist/nodes/vector_store/VectorStoreQdrant/VectorStoreQdrant.node.js +1 -0
- package/dist/nodes/vector_store/VectorStoreQdrant/VectorStoreQdrant.node.js.map +1 -0
- package/dist/nodes/vector_store/VectorStoreSupabase/VectorStoreSupabase.node.js +1 -0
- package/dist/nodes/vector_store/VectorStoreSupabase/VectorStoreSupabase.node.js.map +1 -0
- package/dist/nodes/vector_store/VectorStoreSupabaseInsert/VectorStoreSupabaseInsert.node.js +1 -0
- package/dist/nodes/vector_store/VectorStoreSupabaseInsert/VectorStoreSupabaseInsert.node.js.map +1 -0
- package/dist/nodes/vector_store/VectorStoreSupabaseLoad/VectorStoreSupabaseLoad.node.js +1 -0
- package/dist/nodes/vector_store/VectorStoreSupabaseLoad/VectorStoreSupabaseLoad.node.js.map +1 -0
- package/dist/nodes/vector_store/VectorStoreZep/VectorStoreZep.node.js +1 -0
- package/dist/nodes/vector_store/VectorStoreZep/VectorStoreZep.node.js.map +1 -0
- package/dist/nodes/vector_store/VectorStoreZepInsert/VectorStoreZepInsert.node.js +1 -0
- package/dist/nodes/vector_store/VectorStoreZepInsert/VectorStoreZepInsert.node.js.map +1 -0
- package/dist/nodes/vector_store/VectorStoreZepLoad/VectorStoreZepLoad.node.js +1 -0
- package/dist/nodes/vector_store/VectorStoreZepLoad/VectorStoreZepLoad.node.js.map +1 -0
- package/dist/nodes/vector_store/shared/MemoryManager/MemoryCalculator.js +1 -0
- package/dist/nodes/vector_store/shared/MemoryManager/MemoryCalculator.js.map +1 -0
- package/dist/nodes/vector_store/shared/MemoryManager/MemoryVectorStoreManager.js +1 -0
- package/dist/nodes/vector_store/shared/MemoryManager/MemoryVectorStoreManager.js.map +1 -0
- package/dist/nodes/vector_store/shared/MemoryManager/StoreCleanupService.js +1 -0
- package/dist/nodes/vector_store/shared/MemoryManager/StoreCleanupService.js.map +1 -0
- package/dist/nodes/vector_store/shared/MemoryManager/config.js +1 -0
- package/dist/nodes/vector_store/shared/MemoryManager/config.js.map +1 -0
- package/dist/nodes/vector_store/shared/MemoryManager/types.js +1 -0
- package/dist/nodes/vector_store/shared/MemoryManager/types.js.map +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/constants.js +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/constants.js.map +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/createVectorStoreNode.js +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/createVectorStoreNode.js.map +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/methods/listSearch.js +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/methods/listSearch.js.map +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/operations/index.js +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/operations/index.js.map +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/operations/insertOperation.js +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/operations/insertOperation.js.map +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/operations/loadOperation.js +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/operations/loadOperation.js.map +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/operations/retrieveAsToolOperation.js +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/operations/retrieveAsToolOperation.js.map +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/operations/retrieveOperation.js +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/operations/retrieveOperation.js.map +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/operations/updateOperation.js +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/operations/updateOperation.js.map +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/types.js +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/types.js.map +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/utils.js +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/utils.js.map +1 -0
- package/dist/nodes/vector_store/shared/descriptions.js +1 -0
- package/dist/nodes/vector_store/shared/descriptions.js.map +1 -0
- package/dist/nodes/vector_store/shared/processDocuments.js +1 -0
- package/dist/nodes/vector_store/shared/processDocuments.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/OpenAi.node.js +1 -0
- package/dist/nodes/vendors/OpenAi/OpenAi.node.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/assistant/create.operation.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/assistant/create.operation.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/assistant/deleteAssistant.operation.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/assistant/deleteAssistant.operation.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/assistant/index.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/assistant/index.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/assistant/list.operation.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/assistant/list.operation.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/assistant/message.operation.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/assistant/message.operation.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/assistant/update.operation.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/assistant/update.operation.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/audio/generate.operation.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/audio/generate.operation.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/audio/index.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/audio/index.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/audio/transcribe.operation.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/audio/transcribe.operation.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/audio/translate.operation.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/audio/translate.operation.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/descriptions.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/descriptions.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/file/deleteFile.operation.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/file/deleteFile.operation.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/file/index.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/file/index.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/file/list.operation.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/file/list.operation.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/file/upload.operation.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/file/upload.operation.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/image/analyze.operation.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/image/analyze.operation.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/image/generate.operation.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/image/generate.operation.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/image/index.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/image/index.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/node.type.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/node.type.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/router.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/router.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/text/classify.operation.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/text/classify.operation.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/text/index.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/text/index.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/text/message.operation.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/text/message.operation.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/versionDescription.js +1 -0
- package/dist/nodes/vendors/OpenAi/actions/versionDescription.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/helpers/constants.js +1 -0
- package/dist/nodes/vendors/OpenAi/helpers/constants.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/helpers/error-handling.js +1 -0
- package/dist/nodes/vendors/OpenAi/helpers/error-handling.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/helpers/interfaces.js +1 -0
- package/dist/nodes/vendors/OpenAi/helpers/interfaces.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/helpers/utils.js +1 -0
- package/dist/nodes/vendors/OpenAi/helpers/utils.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/methods/index.js +1 -0
- package/dist/nodes/vendors/OpenAi/methods/index.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/methods/listSearch.js +1 -0
- package/dist/nodes/vendors/OpenAi/methods/listSearch.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/methods/loadOptions.js +1 -0
- package/dist/nodes/vendors/OpenAi/methods/loadOptions.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/transport/index.js +1 -0
- package/dist/nodes/vendors/OpenAi/transport/index.js.map +1 -0
- package/dist/types/nodes.json +3 -0
- package/dist/types/types.js +1 -0
- package/dist/types/types.js.map +1 -0
- package/dist/types/zod.types.js +1 -0
- package/dist/types/zod.types.js.map +1 -0
- package/dist/utils/N8nBinaryLoader.js +1 -0
- package/dist/utils/N8nBinaryLoader.js.map +1 -0
- package/dist/utils/N8nJsonLoader.js +1 -0
- package/dist/utils/N8nJsonLoader.js.map +1 -0
- package/dist/utils/N8nTool.js +1 -0
- package/dist/utils/N8nTool.js.map +1 -0
- package/dist/utils/descriptions.js +1 -0
- package/dist/utils/descriptions.js.map +1 -0
- package/dist/utils/helpers.js +1 -0
- package/dist/utils/helpers.js.map +1 -0
- package/dist/utils/logWrapper.js +1 -0
- package/dist/utils/logWrapper.js.map +1 -0
- package/dist/utils/output_parsers/N8nItemListOutputParser.js +1 -0
- package/dist/utils/output_parsers/N8nItemListOutputParser.js.map +1 -0
- package/dist/utils/output_parsers/N8nOutputFixingParser.js +1 -0
- package/dist/utils/output_parsers/N8nOutputFixingParser.js.map +1 -0
- package/dist/utils/output_parsers/N8nOutputParser.js +1 -0
- package/dist/utils/output_parsers/N8nOutputParser.js.map +1 -0
- package/dist/utils/output_parsers/N8nStructuredOutputParser.js +1 -0
- package/dist/utils/output_parsers/N8nStructuredOutputParser.js.map +1 -0
- package/dist/utils/output_parsers/prompt.js +1 -0
- package/dist/utils/output_parsers/prompt.js.map +1 -0
- package/dist/utils/schemaParsing.js +1 -0
- package/dist/utils/schemaParsing.js.map +1 -0
- package/dist/utils/sharedFields.js +1 -0
- package/dist/utils/sharedFields.js.map +1 -0
- package/dist/utils/tracing.js +1 -0
- package/dist/utils/tracing.js.map +1 -0
- package/package.json +8 -5
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
var EmbeddingsGoogleVertex_node_exports = {};
|
|
20
|
+
__export(EmbeddingsGoogleVertex_node_exports, {
|
|
21
|
+
EmbeddingsGoogleVertex: () => EmbeddingsGoogleVertex
|
|
22
|
+
});
|
|
23
|
+
module.exports = __toCommonJS(EmbeddingsGoogleVertex_node_exports);
|
|
24
|
+
var import_resource_manager = require("@google-cloud/resource-manager");
|
|
25
|
+
var import_google_vertexai = require("@langchain/google-vertexai");
|
|
26
|
+
var import_utilities = require("n8n-nodes-base/dist/utils/utilities");
|
|
27
|
+
var import_n8n_workflow = require("n8n-workflow");
|
|
28
|
+
var import_logWrapper = require("../../../utils/logWrapper");
|
|
29
|
+
var import_sharedFields = require("../../../utils/sharedFields");
|
|
30
|
+
class EmbeddingsGoogleVertex {
|
|
31
|
+
constructor() {
|
|
32
|
+
this.methods = {
|
|
33
|
+
listSearch: {
|
|
34
|
+
async gcpProjectsList() {
|
|
35
|
+
const results = [];
|
|
36
|
+
const credentials = await this.getCredentials("googleApi");
|
|
37
|
+
const privateKey = (0, import_utilities.formatPrivateKey)(credentials.privateKey);
|
|
38
|
+
const email = credentials.email.trim();
|
|
39
|
+
const client = new import_resource_manager.ProjectsClient({
|
|
40
|
+
credentials: {
|
|
41
|
+
client_email: email,
|
|
42
|
+
private_key: privateKey
|
|
43
|
+
}
|
|
44
|
+
});
|
|
45
|
+
const [projects] = await client.searchProjects();
|
|
46
|
+
for (const project of projects) {
|
|
47
|
+
if (project.projectId) {
|
|
48
|
+
results.push({
|
|
49
|
+
name: project.displayName ?? project.projectId,
|
|
50
|
+
value: project.projectId
|
|
51
|
+
});
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
return { results };
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
};
|
|
58
|
+
this.description = {
|
|
59
|
+
displayName: "Embeddings Google Vertex",
|
|
60
|
+
name: "embeddingsGoogleVertex",
|
|
61
|
+
icon: "file:google.svg",
|
|
62
|
+
group: ["transform"],
|
|
63
|
+
version: 1,
|
|
64
|
+
description: "Use Google Vertex Embeddings",
|
|
65
|
+
defaults: {
|
|
66
|
+
name: "Embeddings Google Vertex"
|
|
67
|
+
},
|
|
68
|
+
requestDefaults: {
|
|
69
|
+
ignoreHttpStatusErrors: true,
|
|
70
|
+
baseURL: "={{ $credentials.host }}"
|
|
71
|
+
},
|
|
72
|
+
credentials: [
|
|
73
|
+
{
|
|
74
|
+
name: "googleApi",
|
|
75
|
+
required: true
|
|
76
|
+
}
|
|
77
|
+
],
|
|
78
|
+
codex: {
|
|
79
|
+
categories: ["AI"],
|
|
80
|
+
subcategories: {
|
|
81
|
+
AI: ["Embeddings"]
|
|
82
|
+
},
|
|
83
|
+
resources: {
|
|
84
|
+
primaryDocumentation: [
|
|
85
|
+
{
|
|
86
|
+
url: "https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.embeddingsgooglevertex/"
|
|
87
|
+
}
|
|
88
|
+
]
|
|
89
|
+
}
|
|
90
|
+
},
|
|
91
|
+
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
|
92
|
+
inputs: [],
|
|
93
|
+
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
|
94
|
+
outputs: [import_n8n_workflow.NodeConnectionTypes.AiEmbedding],
|
|
95
|
+
outputNames: ["Embeddings"],
|
|
96
|
+
properties: [
|
|
97
|
+
(0, import_sharedFields.getConnectionHintNoticeField)([import_n8n_workflow.NodeConnectionTypes.AiVectorStore]),
|
|
98
|
+
{
|
|
99
|
+
displayName: 'Each model is using different dimensional density for embeddings. Please make sure to use the same dimensionality for your vector store. The default model is using 768-dimensional embeddings. You can find available models <a href="https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/text-embeddings-api">here</a>.',
|
|
100
|
+
name: "notice",
|
|
101
|
+
type: "notice",
|
|
102
|
+
default: ""
|
|
103
|
+
},
|
|
104
|
+
{
|
|
105
|
+
displayName: "Project ID",
|
|
106
|
+
name: "projectId",
|
|
107
|
+
type: "resourceLocator",
|
|
108
|
+
default: { mode: "list", value: "" },
|
|
109
|
+
required: true,
|
|
110
|
+
description: "Select or enter your Google Cloud project ID",
|
|
111
|
+
modes: [
|
|
112
|
+
{
|
|
113
|
+
displayName: "From List",
|
|
114
|
+
name: "list",
|
|
115
|
+
type: "list",
|
|
116
|
+
typeOptions: {
|
|
117
|
+
searchListMethod: "gcpProjectsList"
|
|
118
|
+
}
|
|
119
|
+
},
|
|
120
|
+
{
|
|
121
|
+
displayName: "ID",
|
|
122
|
+
name: "id",
|
|
123
|
+
type: "string"
|
|
124
|
+
}
|
|
125
|
+
]
|
|
126
|
+
},
|
|
127
|
+
{
|
|
128
|
+
displayName: "Model Name",
|
|
129
|
+
name: "modelName",
|
|
130
|
+
type: "string",
|
|
131
|
+
description: 'The model which will generate the embeddings. <a href="https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/text-embeddings-api">Learn more</a>.',
|
|
132
|
+
default: "text-embedding-005"
|
|
133
|
+
}
|
|
134
|
+
]
|
|
135
|
+
};
|
|
136
|
+
}
|
|
137
|
+
async supplyData(itemIndex) {
|
|
138
|
+
const credentials = await this.getCredentials("googleApi");
|
|
139
|
+
const privateKey = (0, import_utilities.formatPrivateKey)(credentials.privateKey);
|
|
140
|
+
const email = credentials.email.trim();
|
|
141
|
+
const region = credentials.region;
|
|
142
|
+
const modelName = this.getNodeParameter("modelName", itemIndex);
|
|
143
|
+
const projectId = this.getNodeParameter("projectId", itemIndex, "", {
|
|
144
|
+
extractValue: true
|
|
145
|
+
});
|
|
146
|
+
const embeddings = new import_google_vertexai.VertexAIEmbeddings({
|
|
147
|
+
authOptions: {
|
|
148
|
+
projectId,
|
|
149
|
+
credentials: {
|
|
150
|
+
client_email: email,
|
|
151
|
+
private_key: privateKey
|
|
152
|
+
}
|
|
153
|
+
},
|
|
154
|
+
location: region,
|
|
155
|
+
model: modelName
|
|
156
|
+
});
|
|
157
|
+
return {
|
|
158
|
+
response: (0, import_logWrapper.logWrapper)(embeddings, this)
|
|
159
|
+
};
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
163
|
+
0 && (module.exports = {
|
|
164
|
+
EmbeddingsGoogleVertex
|
|
165
|
+
});
|
|
166
|
+
//# sourceMappingURL=EmbeddingsGoogleVertex.node.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../nodes/embeddings/EmbeddingsGoogleVertex/EmbeddingsGoogleVertex.node.ts"],"sourcesContent":["/* eslint-disable n8n-nodes-base/node-dirname-against-convention */\nimport { ProjectsClient } from '@google-cloud/resource-manager';\nimport { VertexAIEmbeddings } from '@langchain/google-vertexai';\nimport { formatPrivateKey } from 'n8n-nodes-base/dist/utils/utilities';\nimport { NodeConnectionTypes } from 'n8n-workflow';\nimport type {\n\tILoadOptionsFunctions,\n\tINodeType,\n\tINodeTypeDescription,\n\tISupplyDataFunctions,\n\tSupplyData,\n} from 'n8n-workflow';\n\nimport { logWrapper } from '@utils/logWrapper';\nimport { getConnectionHintNoticeField } from '@utils/sharedFields';\n\nexport class EmbeddingsGoogleVertex implements INodeType {\n\tmethods = {\n\t\tlistSearch: {\n\t\t\tasync gcpProjectsList(this: ILoadOptionsFunctions) {\n\t\t\t\tconst results: Array<{ name: string; value: string }> = [];\n\n\t\t\t\tconst credentials = await this.getCredentials('googleApi');\n\t\t\t\tconst privateKey = formatPrivateKey(credentials.privateKey as string);\n\t\t\t\tconst email = (credentials.email as string).trim();\n\n\t\t\t\tconst client = new ProjectsClient({\n\t\t\t\t\tcredentials: {\n\t\t\t\t\t\tclient_email: email,\n\t\t\t\t\t\tprivate_key: privateKey,\n\t\t\t\t\t},\n\t\t\t\t});\n\n\t\t\t\tconst [projects] = await client.searchProjects();\n\n\t\t\t\tfor (const project of projects) {\n\t\t\t\t\tif (project.projectId) {\n\t\t\t\t\t\tresults.push({\n\t\t\t\t\t\t\tname: project.displayName ?? project.projectId,\n\t\t\t\t\t\t\tvalue: project.projectId,\n\t\t\t\t\t\t});\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\treturn { results };\n\t\t\t},\n\t\t},\n\t};\n\n\tdescription: INodeTypeDescription = {\n\t\tdisplayName: 'Embeddings Google Vertex',\n\t\tname: 'embeddingsGoogleVertex',\n\t\ticon: 'file:google.svg',\n\t\tgroup: ['transform'],\n\t\tversion: 1,\n\t\tdescription: 'Use Google Vertex Embeddings',\n\t\tdefaults: {\n\t\t\tname: 'Embeddings Google Vertex',\n\t\t},\n\t\trequestDefaults: {\n\t\t\tignoreHttpStatusErrors: true,\n\t\t\tbaseURL: '={{ $credentials.host }}',\n\t\t},\n\t\tcredentials: [\n\t\t\t{\n\t\t\t\tname: 'googleApi',\n\t\t\t\trequired: true,\n\t\t\t},\n\t\t],\n\t\tcodex: {\n\t\t\tcategories: ['AI'],\n\t\t\tsubcategories: {\n\t\t\t\tAI: ['Embeddings'],\n\t\t\t},\n\t\t\tresources: {\n\t\t\t\tprimaryDocumentation: [\n\t\t\t\t\t{\n\t\t\t\t\t\turl: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.embeddingsgooglevertex/',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t},\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node\n\t\tinputs: [],\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong\n\t\toutputs: [NodeConnectionTypes.AiEmbedding],\n\t\toutputNames: ['Embeddings'],\n\n\t\tproperties: [\n\t\t\tgetConnectionHintNoticeField([NodeConnectionTypes.AiVectorStore]),\n\t\t\t{\n\t\t\t\tdisplayName:\n\t\t\t\t\t'Each model is using different dimensional density for embeddings. Please make sure to use the same dimensionality for your vector store. The default model is using 768-dimensional embeddings. You can find available models <a href=\"https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/text-embeddings-api\">here</a>.',\n\t\t\t\tname: 'notice',\n\t\t\t\ttype: 'notice',\n\t\t\t\tdefault: '',\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Project ID',\n\t\t\t\tname: 'projectId',\n\t\t\t\ttype: 'resourceLocator',\n\t\t\t\tdefault: { mode: 'list', value: '' },\n\t\t\t\trequired: true,\n\t\t\t\tdescription: 'Select or enter your Google Cloud project ID',\n\t\t\t\tmodes: [\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'From List',\n\t\t\t\t\t\tname: 'list',\n\t\t\t\t\t\ttype: 'list',\n\t\t\t\t\t\ttypeOptions: {\n\t\t\t\t\t\t\tsearchListMethod: 'gcpProjectsList',\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'ID',\n\t\t\t\t\t\tname: 'id',\n\t\t\t\t\t\ttype: 'string',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Model Name',\n\t\t\t\tname: 'modelName',\n\t\t\t\ttype: 'string',\n\t\t\t\tdescription:\n\t\t\t\t\t'The model which will generate the embeddings. <a href=\"https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/text-embeddings-api\">Learn more</a>.',\n\t\t\t\tdefault: 'text-embedding-005',\n\t\t\t},\n\t\t],\n\t};\n\n\tasync supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {\n\t\tconst credentials = await this.getCredentials('googleApi');\n\t\tconst privateKey = formatPrivateKey(credentials.privateKey as string);\n\t\tconst email = (credentials.email as string).trim();\n\t\tconst region = credentials.region as string;\n\n\t\tconst modelName = this.getNodeParameter('modelName', itemIndex) as string;\n\n\t\tconst projectId = this.getNodeParameter('projectId', itemIndex, '', {\n\t\t\textractValue: true,\n\t\t}) as string;\n\n\t\tconst embeddings = new VertexAIEmbeddings({\n\t\t\tauthOptions: {\n\t\t\t\tprojectId,\n\t\t\t\tcredentials: {\n\t\t\t\t\tclient_email: email,\n\t\t\t\t\tprivate_key: privateKey,\n\t\t\t\t},\n\t\t\t},\n\t\t\tlocation: region,\n\t\t\tmodel: modelName,\n\t\t});\n\n\t\treturn {\n\t\t\tresponse: logWrapper(embeddings, this),\n\t\t};\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,8BAA+B;AAC/B,6BAAmC;AACnC,uBAAiC;AACjC,0BAAoC;AASpC,wBAA2B;AAC3B,0BAA6C;AAEtC,MAAM,uBAA4C;AAAA,EAAlD;AACN,mBAAU;AAAA,MACT,YAAY;AAAA,QACX,MAAM,kBAA6C;AAClD,gBAAM,UAAkD,CAAC;AAEzD,gBAAM,cAAc,MAAM,KAAK,eAAe,WAAW;AACzD,gBAAM,iBAAa,mCAAiB,YAAY,UAAoB;AACpE,gBAAM,QAAS,YAAY,MAAiB,KAAK;AAEjD,gBAAM,SAAS,IAAI,uCAAe;AAAA,YACjC,aAAa;AAAA,cACZ,cAAc;AAAA,cACd,aAAa;AAAA,YACd;AAAA,UACD,CAAC;AAED,gBAAM,CAAC,QAAQ,IAAI,MAAM,OAAO,eAAe;AAE/C,qBAAW,WAAW,UAAU;AAC/B,gBAAI,QAAQ,WAAW;AACtB,sBAAQ,KAAK;AAAA,gBACZ,MAAM,QAAQ,eAAe,QAAQ;AAAA,gBACrC,OAAO,QAAQ;AAAA,cAChB,CAAC;AAAA,YACF;AAAA,UACD;AAEA,iBAAO,EAAE,QAAQ;AAAA,QAClB;AAAA,MACD;AAAA,IACD;AAEA,uBAAoC;AAAA,MACnC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,OAAO,CAAC,WAAW;AAAA,MACnB,SAAS;AAAA,MACT,aAAa;AAAA,MACb,UAAU;AAAA,QACT,MAAM;AAAA,MACP;AAAA,MACA,iBAAiB;AAAA,QAChB,wBAAwB;AAAA,QACxB,SAAS;AAAA,MACV;AAAA,MACA,aAAa;AAAA,QACZ;AAAA,UACC,MAAM;AAAA,UACN,UAAU;AAAA,QACX;AAAA,MACD;AAAA,MACA,OAAO;AAAA,QACN,YAAY,CAAC,IAAI;AAAA,QACjB,eAAe;AAAA,UACd,IAAI,CAAC,YAAY;AAAA,QAClB;AAAA,QACA,WAAW;AAAA,UACV,sBAAsB;AAAA,YACrB;AAAA,cACC,KAAK;AAAA,YACN;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA;AAAA,MAEA,QAAQ,CAAC;AAAA;AAAA,MAET,SAAS,CAAC,wCAAoB,WAAW;AAAA,MACzC,aAAa,CAAC,YAAY;AAAA,MAE1B,YAAY;AAAA,YACX,kDAA6B,CAAC,wCAAoB,aAAa,CAAC;AAAA,QAChE;AAAA,UACC,aACC;AAAA,UACD,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,QACV;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS,EAAE,MAAM,QAAQ,OAAO,GAAG;AAAA,UACnC,UAAU;AAAA,UACV,aAAa;AAAA,UACb,OAAO;AAAA,YACN;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,MAAM;AAAA,cACN,aAAa;AAAA,gBACZ,kBAAkB;AAAA,cACnB;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,MAAM;AAAA,YACP;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,aACC;AAAA,UACD,SAAS;AAAA,QACV;AAAA,MACD;AAAA,IACD;AAAA;AAAA,EAEA,MAAM,WAAuC,WAAwC;AACpF,UAAM,cAAc,MAAM,KAAK,eAAe,WAAW;AACzD,UAAM,iBAAa,mCAAiB,YAAY,UAAoB;AACpE,UAAM,QAAS,YAAY,MAAiB,KAAK;AACjD,UAAM,SAAS,YAAY;AAE3B,UAAM,YAAY,KAAK,iBAAiB,aAAa,SAAS;AAE9D,UAAM,YAAY,KAAK,iBAAiB,aAAa,WAAW,IAAI;AAAA,MACnE,cAAc;AAAA,IACf,CAAC;AAED,UAAM,aAAa,IAAI,0CAAmB;AAAA,MACzC,aAAa;AAAA,QACZ;AAAA,QACA,aAAa;AAAA,UACZ,cAAc;AAAA,UACd,aAAa;AAAA,QACd;AAAA,MACD;AAAA,MACA,UAAU;AAAA,MACV,OAAO;AAAA,IACR,CAAC;AAED,WAAO;AAAA,MACN,cAAU,8BAAW,YAAY,IAAI;AAAA,IACtC;AAAA,EACD;AACD;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 48 48"><defs><path id="a" d="M44.5 20H24v8.5h11.8C34.7 33.9 30.1 37 24 37c-7.2 0-13-5.8-13-13s5.8-13 13-13c3.1 0 5.9 1.1 8.1 2.9l6.4-6.4C34.6 4.1 29.6 2 24 2 11.8 2 2 11.8 2 24s9.8 22 22 22c11 0 21-8 21-22 0-1.3-.2-2.7-.5-4"/></defs><clipPath id="b"><use xlink:href="#a" overflow="visible"/></clipPath><path fill="#FBBC05" d="M0 37V11l17 13z" clip-path="url(#b)"/><path fill="#EA4335" d="m0 11 17 13 7-6.1L48 14V0H0z" clip-path="url(#b)"/><path fill="#34A853" d="m0 37 30-23 7.9 1L48 0v48H0z" clip-path="url(#b)"/><path fill="#4285F4" d="M48 48 17 24l-4-3 35-10z" clip-path="url(#b)"/></svg>
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../nodes/embeddings/EmbeddingsHuggingFaceInference/EmbeddingsHuggingFaceInference.node.ts"],"sourcesContent":["/* eslint-disable n8n-nodes-base/node-dirname-against-convention */\nimport { HuggingFaceInferenceEmbeddings } from '@langchain/community/embeddings/hf';\nimport {\n\tNodeConnectionTypes,\n\ttype INodeType,\n\ttype INodeTypeDescription,\n\ttype ISupplyDataFunctions,\n\ttype SupplyData,\n} from 'n8n-workflow';\n\nimport { logWrapper } from '@utils/logWrapper';\nimport { getConnectionHintNoticeField } from '@utils/sharedFields';\n\nexport class EmbeddingsHuggingFaceInference implements INodeType {\n\tdescription: INodeTypeDescription = {\n\t\tdisplayName: 'Embeddings Hugging Face Inference',\n\t\tname: 'embeddingsHuggingFaceInference',\n\t\ticon: 'file:huggingface.svg',\n\t\tgroup: ['transform'],\n\t\tversion: 1,\n\t\tdescription: 'Use HuggingFace Inference Embeddings',\n\t\tdefaults: {\n\t\t\tname: 'Embeddings HuggingFace Inference',\n\t\t},\n\t\tcredentials: [\n\t\t\t{\n\t\t\t\tname: 'huggingFaceApi',\n\t\t\t\trequired: true,\n\t\t\t},\n\t\t],\n\t\tcodex: {\n\t\t\tcategories: ['AI'],\n\t\t\tsubcategories: {\n\t\t\t\tAI: ['Embeddings'],\n\t\t\t},\n\t\t\tresources: {\n\t\t\t\tprimaryDocumentation: [\n\t\t\t\t\t{\n\t\t\t\t\t\turl: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.embeddingshuggingfaceinference/',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t},\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node\n\t\tinputs: [],\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong\n\t\toutputs: [NodeConnectionTypes.AiEmbedding],\n\t\toutputNames: ['Embeddings'],\n\t\tproperties: [\n\t\t\tgetConnectionHintNoticeField([NodeConnectionTypes.AiVectorStore]),\n\t\t\t{\n\t\t\t\tdisplayName:\n\t\t\t\t\t'Each model is using different dimensional density for embeddings. Please make sure to use the same dimensionality for your vector store. The default model is using 768-dimensional embeddings.',\n\t\t\t\tname: 'notice',\n\t\t\t\ttype: 'notice',\n\t\t\t\tdefault: '',\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Model Name',\n\t\t\t\tname: 'modelName',\n\t\t\t\ttype: 'string',\n\t\t\t\tdefault: 'sentence-transformers/distilbert-base-nli-mean-tokens',\n\t\t\t\tdescription: 'The model name to use from HuggingFace library',\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Options',\n\t\t\t\tname: 'options',\n\t\t\t\tplaceholder: 'Add Option',\n\t\t\t\tdescription: 'Additional options to add',\n\t\t\t\ttype: 'collection',\n\t\t\t\tdefault: {},\n\t\t\t\toptions: [\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Custom Inference Endpoint',\n\t\t\t\t\t\tname: 'endpointUrl',\n\t\t\t\t\t\tdefault: '',\n\t\t\t\t\t\tdescription: 'Custom endpoint URL',\n\t\t\t\t\t\ttype: 'string',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t],\n\t};\n\n\tasync supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {\n\t\tthis.logger.debug('Supply data for embeddings HF Inference');\n\t\tconst model = this.getNodeParameter(\n\t\t\t'modelName',\n\t\t\titemIndex,\n\t\t\t'sentence-transformers/distilbert-base-nli-mean-tokens',\n\t\t) as string;\n\t\tconst credentials = await this.getCredentials('huggingFaceApi');\n\t\tconst options = this.getNodeParameter('options', itemIndex, {}) as object;\n\n\t\tconst embeddings = new HuggingFaceInferenceEmbeddings({\n\t\t\tapiKey: credentials.apiKey as string,\n\t\t\tmodel,\n\t\t\t...options,\n\t\t});\n\n\t\treturn {\n\t\t\tresponse: logWrapper(embeddings, this),\n\t\t};\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,gBAA+C;AAC/C,0BAMO;AAEP,wBAA2B;AAC3B,0BAA6C;AAEtC,MAAM,+BAAoD;AAAA,EAA1D;AACN,uBAAoC;AAAA,MACnC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,OAAO,CAAC,WAAW;AAAA,MACnB,SAAS;AAAA,MACT,aAAa;AAAA,MACb,UAAU;AAAA,QACT,MAAM;AAAA,MACP;AAAA,MACA,aAAa;AAAA,QACZ;AAAA,UACC,MAAM;AAAA,UACN,UAAU;AAAA,QACX;AAAA,MACD;AAAA,MACA,OAAO;AAAA,QACN,YAAY,CAAC,IAAI;AAAA,QACjB,eAAe;AAAA,UACd,IAAI,CAAC,YAAY;AAAA,QAClB;AAAA,QACA,WAAW;AAAA,UACV,sBAAsB;AAAA,YACrB;AAAA,cACC,KAAK;AAAA,YACN;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA;AAAA,MAEA,QAAQ,CAAC;AAAA;AAAA,MAET,SAAS,CAAC,wCAAoB,WAAW;AAAA,MACzC,aAAa,CAAC,YAAY;AAAA,MAC1B,YAAY;AAAA,YACX,kDAA6B,CAAC,wCAAoB,aAAa,CAAC;AAAA,QAChE;AAAA,UACC,aACC;AAAA,UACD,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,QACV;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,UACT,aAAa;AAAA,QACd;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,aAAa;AAAA,UACb,aAAa;AAAA,UACb,MAAM;AAAA,UACN,SAAS,CAAC;AAAA,UACV,SAAS;AAAA,YACR;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,cACb,MAAM;AAAA,YACP;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAAA;AAAA,EAEA,MAAM,WAAuC,WAAwC;AACpF,SAAK,OAAO,MAAM,yCAAyC;AAC3D,UAAM,QAAQ,KAAK;AAAA,MAClB;AAAA,MACA;AAAA,MACA;AAAA,IACD;AACA,UAAM,cAAc,MAAM,KAAK,eAAe,gBAAgB;AAC9D,UAAM,UAAU,KAAK,iBAAiB,WAAW,WAAW,CAAC,CAAC;AAE9D,UAAM,aAAa,IAAI,yCAA+B;AAAA,MACrD,QAAQ,YAAY;AAAA,MACpB;AAAA,MACA,GAAG;AAAA,IACJ,CAAC;AAED,WAAO;AAAA,MACN,cAAU,8BAAW,YAAY,IAAI;AAAA,IACtC;AAAA,EACD;AACD;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../nodes/embeddings/EmbeddingsMistralCloud/EmbeddingsMistralCloud.node.ts"],"sourcesContent":["/* eslint-disable n8n-nodes-base/node-dirname-against-convention */\nimport type { MistralAIEmbeddingsParams } from '@langchain/mistralai';\nimport { MistralAIEmbeddings } from '@langchain/mistralai';\nimport {\n\tNodeConnectionTypes,\n\ttype INodeType,\n\ttype INodeTypeDescription,\n\ttype ISupplyDataFunctions,\n\ttype SupplyData,\n} from 'n8n-workflow';\n\nimport { logWrapper } from '@utils/logWrapper';\nimport { getConnectionHintNoticeField } from '@utils/sharedFields';\n\nexport class EmbeddingsMistralCloud implements INodeType {\n\tdescription: INodeTypeDescription = {\n\t\tdisplayName: 'Embeddings Mistral Cloud',\n\t\tname: 'embeddingsMistralCloud',\n\t\ticon: 'file:mistral.svg',\n\t\tcredentials: [\n\t\t\t{\n\t\t\t\tname: 'mistralCloudApi',\n\t\t\t\trequired: true,\n\t\t\t},\n\t\t],\n\t\tgroup: ['transform'],\n\t\tversion: 1,\n\t\tdescription: 'Use Embeddings Mistral Cloud',\n\t\tdefaults: {\n\t\t\tname: 'Embeddings Mistral Cloud',\n\t\t},\n\n\t\tcodex: {\n\t\t\tcategories: ['AI'],\n\t\t\tsubcategories: {\n\t\t\t\tAI: ['Embeddings'],\n\t\t\t},\n\t\t\tresources: {\n\t\t\t\tprimaryDocumentation: [\n\t\t\t\t\t{\n\t\t\t\t\t\turl: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.embeddingsmistralcloud/',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t},\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node\n\t\tinputs: [],\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong\n\t\toutputs: [NodeConnectionTypes.AiEmbedding],\n\t\toutputNames: ['Embeddings'],\n\t\trequestDefaults: {\n\t\t\tignoreHttpStatusErrors: true,\n\t\t\tbaseURL: 'https://api.mistral.ai/v1',\n\t\t},\n\t\tproperties: [\n\t\t\tgetConnectionHintNoticeField([NodeConnectionTypes.AiVectorStore]),\n\t\t\t{\n\t\t\t\tdisplayName: 'Model',\n\t\t\t\tname: 'model',\n\t\t\t\ttype: 'options',\n\t\t\t\tdescription:\n\t\t\t\t\t'The model which will compute the embeddings. <a href=\"https://docs.mistral.ai/platform/endpoints/\">Learn more</a>.',\n\t\t\t\ttypeOptions: {\n\t\t\t\t\tloadOptions: {\n\t\t\t\t\t\trouting: {\n\t\t\t\t\t\t\trequest: {\n\t\t\t\t\t\t\t\tmethod: 'GET',\n\t\t\t\t\t\t\t\turl: '/models',\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\toutput: {\n\t\t\t\t\t\t\t\tpostReceive: [\n\t\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\ttype: 'rootProperty',\n\t\t\t\t\t\t\t\t\t\tproperties: {\n\t\t\t\t\t\t\t\t\t\t\tproperty: 'data',\n\t\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\ttype: 'filter',\n\t\t\t\t\t\t\t\t\t\tproperties: {\n\t\t\t\t\t\t\t\t\t\t\tpass: \"={{ $responseItem.id.includes('embed') }}\",\n\t\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\ttype: 'setKeyValue',\n\t\t\t\t\t\t\t\t\t\tproperties: {\n\t\t\t\t\t\t\t\t\t\t\tname: '={{ $responseItem.id }}',\n\t\t\t\t\t\t\t\t\t\t\tvalue: '={{ $responseItem.id }}',\n\t\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\ttype: 'sort',\n\t\t\t\t\t\t\t\t\t\tproperties: {\n\t\t\t\t\t\t\t\t\t\t\tkey: 'name',\n\t\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t],\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\trouting: {\n\t\t\t\t\tsend: {\n\t\t\t\t\t\ttype: 'body',\n\t\t\t\t\t\tproperty: 'model',\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tdefault: 'mistral-embed',\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Options',\n\t\t\t\tname: 'options',\n\t\t\t\tplaceholder: 'Add Option',\n\t\t\t\tdescription: 'Additional options to add',\n\t\t\t\ttype: 'collection',\n\t\t\t\tdefault: {},\n\t\t\t\toptions: [\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Batch Size',\n\t\t\t\t\t\tname: 'batchSize',\n\t\t\t\t\t\tdefault: 512,\n\t\t\t\t\t\ttypeOptions: { maxValue: 2048 },\n\t\t\t\t\t\tdescription: 'Maximum number of documents to send in each request',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Strip New Lines',\n\t\t\t\t\t\tname: 'stripNewLines',\n\t\t\t\t\t\tdefault: true,\n\t\t\t\t\t\tdescription: 'Whether to strip new lines from the input text',\n\t\t\t\t\t\ttype: 'boolean',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t],\n\t};\n\n\tasync supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {\n\t\tconst credentials = await this.getCredentials('mistralCloudApi');\n\t\tconst modelName = this.getNodeParameter('model', itemIndex) as string;\n\t\tconst options = this.getNodeParameter(\n\t\t\t'options',\n\t\t\titemIndex,\n\t\t\t{},\n\t\t) as Partial<MistralAIEmbeddingsParams>;\n\n\t\tconst embeddings = new MistralAIEmbeddings({\n\t\t\tapiKey: credentials.apiKey as string,\n\t\t\tmodelName,\n\t\t\t...options,\n\t\t});\n\n\t\treturn {\n\t\t\tresponse: logWrapper(embeddings, this),\n\t\t};\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,uBAAoC;AACpC,0BAMO;AAEP,wBAA2B;AAC3B,0BAA6C;AAEtC,MAAM,uBAA4C;AAAA,EAAlD;AACN,uBAAoC;AAAA,MACnC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,aAAa;AAAA,QACZ;AAAA,UACC,MAAM;AAAA,UACN,UAAU;AAAA,QACX;AAAA,MACD;AAAA,MACA,OAAO,CAAC,WAAW;AAAA,MACnB,SAAS;AAAA,MACT,aAAa;AAAA,MACb,UAAU;AAAA,QACT,MAAM;AAAA,MACP;AAAA,MAEA,OAAO;AAAA,QACN,YAAY,CAAC,IAAI;AAAA,QACjB,eAAe;AAAA,UACd,IAAI,CAAC,YAAY;AAAA,QAClB;AAAA,QACA,WAAW;AAAA,UACV,sBAAsB;AAAA,YACrB;AAAA,cACC,KAAK;AAAA,YACN;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA;AAAA,MAEA,QAAQ,CAAC;AAAA;AAAA,MAET,SAAS,CAAC,wCAAoB,WAAW;AAAA,MACzC,aAAa,CAAC,YAAY;AAAA,MAC1B,iBAAiB;AAAA,QAChB,wBAAwB;AAAA,QACxB,SAAS;AAAA,MACV;AAAA,MACA,YAAY;AAAA,YACX,kDAA6B,CAAC,wCAAoB,aAAa,CAAC;AAAA,QAChE;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,aACC;AAAA,UACD,aAAa;AAAA,YACZ,aAAa;AAAA,cACZ,SAAS;AAAA,gBACR,SAAS;AAAA,kBACR,QAAQ;AAAA,kBACR,KAAK;AAAA,gBACN;AAAA,gBACA,QAAQ;AAAA,kBACP,aAAa;AAAA,oBACZ;AAAA,sBACC,MAAM;AAAA,sBACN,YAAY;AAAA,wBACX,UAAU;AAAA,sBACX;AAAA,oBACD;AAAA,oBACA;AAAA,sBACC,MAAM;AAAA,sBACN,YAAY;AAAA,wBACX,MAAM;AAAA,sBACP;AAAA,oBACD;AAAA,oBACA;AAAA,sBACC,MAAM;AAAA,sBACN,YAAY;AAAA,wBACX,MAAM;AAAA,wBACN,OAAO;AAAA,sBACR;AAAA,oBACD;AAAA,oBACA;AAAA,sBACC,MAAM;AAAA,sBACN,YAAY;AAAA,wBACX,KAAK;AAAA,sBACN;AAAA,oBACD;AAAA,kBACD;AAAA,gBACD;AAAA,cACD;AAAA,YACD;AAAA,UACD;AAAA,UACA,SAAS;AAAA,YACR,MAAM;AAAA,cACL,MAAM;AAAA,cACN,UAAU;AAAA,YACX;AAAA,UACD;AAAA,UACA,SAAS;AAAA,QACV;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,aAAa;AAAA,UACb,aAAa;AAAA,UACb,MAAM;AAAA,UACN,SAAS,CAAC;AAAA,UACV,SAAS;AAAA,YACR;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,KAAK;AAAA,cAC9B,aAAa;AAAA,cACb,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,cACb,MAAM;AAAA,YACP;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAAA;AAAA,EAEA,MAAM,WAAuC,WAAwC;AACpF,UAAM,cAAc,MAAM,KAAK,eAAe,iBAAiB;AAC/D,UAAM,YAAY,KAAK,iBAAiB,SAAS,SAAS;AAC1D,UAAM,UAAU,KAAK;AAAA,MACpB;AAAA,MACA;AAAA,MACA,CAAC;AAAA,IACF;AAEA,UAAM,aAAa,IAAI,qCAAoB;AAAA,MAC1C,QAAQ,YAAY;AAAA,MACpB;AAAA,MACA,GAAG;AAAA,IACJ,CAAC;AAED,WAAO;AAAA,MACN,cAAU,8BAAW,YAAY,IAAI;AAAA,IACtC;AAAA,EACD;AACD;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../nodes/embeddings/EmbeddingsOllama/EmbeddingsOllama.node.ts"],"sourcesContent":["/* eslint-disable n8n-nodes-base/node-dirname-against-convention */\nimport { OllamaEmbeddings } from '@langchain/ollama';\nimport {\n\tNodeConnectionTypes,\n\ttype INodeType,\n\ttype INodeTypeDescription,\n\ttype ISupplyDataFunctions,\n\ttype SupplyData,\n} from 'n8n-workflow';\n\nimport { logWrapper } from '@utils/logWrapper';\nimport { getConnectionHintNoticeField } from '@utils/sharedFields';\n\nimport { ollamaDescription, ollamaModel } from '../../llms/LMOllama/description';\n\nexport class EmbeddingsOllama implements INodeType {\n\tdescription: INodeTypeDescription = {\n\t\tdisplayName: 'Embeddings Ollama',\n\t\tname: 'embeddingsOllama',\n\t\ticon: 'file:ollama.svg',\n\t\tgroup: ['transform'],\n\t\tversion: 1,\n\t\tdescription: 'Use Ollama Embeddings',\n\t\tdefaults: {\n\t\t\tname: 'Embeddings Ollama',\n\t\t},\n\t\t...ollamaDescription,\n\t\tcodex: {\n\t\t\tcategories: ['AI'],\n\t\t\tsubcategories: {\n\t\t\t\tAI: ['Embeddings'],\n\t\t\t},\n\t\t\tresources: {\n\t\t\t\tprimaryDocumentation: [\n\t\t\t\t\t{\n\t\t\t\t\t\turl: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.embeddingsollama/',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t},\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node\n\t\tinputs: [],\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong\n\t\toutputs: [NodeConnectionTypes.AiEmbedding],\n\t\toutputNames: ['Embeddings'],\n\t\tproperties: [getConnectionHintNoticeField([NodeConnectionTypes.AiVectorStore]), ollamaModel],\n\t};\n\n\tasync supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {\n\t\tthis.logger.debug('Supply data for embeddings Ollama');\n\t\tconst modelName = this.getNodeParameter('model', itemIndex) as string;\n\t\tconst credentials = await this.getCredentials('ollamaApi');\n\n\t\tconst embeddings = new OllamaEmbeddings({\n\t\t\tbaseUrl: credentials.baseUrl as string,\n\t\t\tmodel: modelName,\n\t\t});\n\n\t\treturn {\n\t\t\tresponse: logWrapper(embeddings, this),\n\t\t};\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,oBAAiC;AACjC,0BAMO;AAEP,wBAA2B;AAC3B,0BAA6C;AAE7C,yBAA+C;AAExC,MAAM,iBAAsC;AAAA,EAA5C;AACN,uBAAoC;AAAA,MACnC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,OAAO,CAAC,WAAW;AAAA,MACnB,SAAS;AAAA,MACT,aAAa;AAAA,MACb,UAAU;AAAA,QACT,MAAM;AAAA,MACP;AAAA,MACA,GAAG;AAAA,MACH,OAAO;AAAA,QACN,YAAY,CAAC,IAAI;AAAA,QACjB,eAAe;AAAA,UACd,IAAI,CAAC,YAAY;AAAA,QAClB;AAAA,QACA,WAAW;AAAA,UACV,sBAAsB;AAAA,YACrB;AAAA,cACC,KAAK;AAAA,YACN;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA;AAAA,MAEA,QAAQ,CAAC;AAAA;AAAA,MAET,SAAS,CAAC,wCAAoB,WAAW;AAAA,MACzC,aAAa,CAAC,YAAY;AAAA,MAC1B,YAAY,KAAC,kDAA6B,CAAC,wCAAoB,aAAa,CAAC,GAAG,8BAAW;AAAA,IAC5F;AAAA;AAAA,EAEA,MAAM,WAAuC,WAAwC;AACpF,SAAK,OAAO,MAAM,mCAAmC;AACrD,UAAM,YAAY,KAAK,iBAAiB,SAAS,SAAS;AAC1D,UAAM,cAAc,MAAM,KAAK,eAAe,WAAW;AAEzD,UAAM,aAAa,IAAI,+BAAiB;AAAA,MACvC,SAAS,YAAY;AAAA,MACrB,OAAO;AAAA,IACR,CAAC;AAED,WAAO;AAAA,MACN,cAAU,8BAAW,YAAY,IAAI;AAAA,IACtC;AAAA,EACD;AACD;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../nodes/embeddings/EmbeddingsOpenAI/EmbeddingsOpenAi.node.ts"],"sourcesContent":["/* eslint-disable n8n-nodes-base/node-dirname-against-convention */\nimport { OpenAIEmbeddings } from '@langchain/openai';\nimport {\n\tNodeConnectionTypes,\n\ttype INodeType,\n\ttype INodeTypeDescription,\n\ttype SupplyData,\n\ttype ISupplyDataFunctions,\n\ttype INodeProperties,\n} from 'n8n-workflow';\nimport type { ClientOptions } from 'openai';\n\nimport { logWrapper } from '@utils/logWrapper';\nimport { getConnectionHintNoticeField } from '@utils/sharedFields';\n\nconst modelParameter: INodeProperties = {\n\tdisplayName: 'Model',\n\tname: 'model',\n\ttype: 'options',\n\tdescription:\n\t\t'The model which will generate the embeddings. <a href=\"https://platform.openai.com/docs/models/overview\">Learn more</a>.',\n\ttypeOptions: {\n\t\tloadOptions: {\n\t\t\trouting: {\n\t\t\t\trequest: {\n\t\t\t\t\tmethod: 'GET',\n\t\t\t\t\turl: '={{ $parameter.options?.baseURL?.split(\"/\").slice(-1).pop() || $credentials?.url?.split(\"/\").slice(-1).pop() || \"v1\" }}/models',\n\t\t\t\t},\n\t\t\t\toutput: {\n\t\t\t\t\tpostReceive: [\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\ttype: 'rootProperty',\n\t\t\t\t\t\t\tproperties: {\n\t\t\t\t\t\t\t\tproperty: 'data',\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\ttype: 'filter',\n\t\t\t\t\t\t\tproperties: {\n\t\t\t\t\t\t\t\tpass: \"={{ $responseItem.id.includes('embed') }}\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\ttype: 'setKeyValue',\n\t\t\t\t\t\t\tproperties: {\n\t\t\t\t\t\t\t\tname: '={{$responseItem.id}}',\n\t\t\t\t\t\t\t\tvalue: '={{$responseItem.id}}',\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\ttype: 'sort',\n\t\t\t\t\t\t\tproperties: {\n\t\t\t\t\t\t\t\tkey: 'name',\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t],\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t},\n\trouting: {\n\t\tsend: {\n\t\t\ttype: 'body',\n\t\t\tproperty: 'model',\n\t\t},\n\t},\n\tdefault: 'text-embedding-3-small',\n};\n\nexport class EmbeddingsOpenAi implements INodeType {\n\tdescription: INodeTypeDescription = {\n\t\tdisplayName: 'Embeddings OpenAI',\n\t\tname: 'embeddingsOpenAi',\n\t\ticon: { light: 'file:openAiLight.svg', dark: 'file:openAiLight.dark.svg' },\n\t\tcredentials: [\n\t\t\t{\n\t\t\t\tname: 'openAiApi',\n\t\t\t\trequired: true,\n\t\t\t},\n\t\t],\n\t\tgroup: ['transform'],\n\t\tversion: [1, 1.1, 1.2],\n\t\tdescription: 'Use Embeddings OpenAI',\n\t\tdefaults: {\n\t\t\tname: 'Embeddings OpenAI',\n\t\t},\n\n\t\tcodex: {\n\t\t\tcategories: ['AI'],\n\t\t\tsubcategories: {\n\t\t\t\tAI: ['Embeddings'],\n\t\t\t},\n\t\t\tresources: {\n\t\t\t\tprimaryDocumentation: [\n\t\t\t\t\t{\n\t\t\t\t\t\turl: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.embeddingsopenai/',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t},\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node\n\t\tinputs: [],\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong\n\t\toutputs: [NodeConnectionTypes.AiEmbedding],\n\t\toutputNames: ['Embeddings'],\n\t\trequestDefaults: {\n\t\t\tignoreHttpStatusErrors: true,\n\t\t\tbaseURL:\n\t\t\t\t'={{ $parameter.options?.baseURL?.split(\"/\").slice(0,-1).join(\"/\") || $credentials.url?.split(\"/\").slice(0,-1).join(\"/\") || \"https://api.openai.com\" }}',\n\t\t},\n\t\tproperties: [\n\t\t\tgetConnectionHintNoticeField([NodeConnectionTypes.AiVectorStore]),\n\t\t\t{\n\t\t\t\t...modelParameter,\n\t\t\t\tdefault: 'text-embedding-ada-002',\n\t\t\t\tdisplayOptions: {\n\t\t\t\t\tshow: {\n\t\t\t\t\t\t'@version': [1],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\t...modelParameter,\n\t\t\t\tdisplayOptions: {\n\t\t\t\t\thide: {\n\t\t\t\t\t\t'@version': [1],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Options',\n\t\t\t\tname: 'options',\n\t\t\t\tplaceholder: 'Add Option',\n\t\t\t\tdescription: 'Additional options to add',\n\t\t\t\ttype: 'collection',\n\t\t\t\tdefault: {},\n\t\t\t\toptions: [\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Dimensions',\n\t\t\t\t\t\tname: 'dimensions',\n\t\t\t\t\t\tdefault: undefined,\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'The number of dimensions the resulting output embeddings should have. Only supported in text-embedding-3 and later models.',\n\t\t\t\t\t\ttype: 'options',\n\t\t\t\t\t\toptions: [\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\tname: '256',\n\t\t\t\t\t\t\t\tvalue: 256,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\tname: '512',\n\t\t\t\t\t\t\t\tvalue: 512,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\tname: '1024',\n\t\t\t\t\t\t\t\tvalue: 1024,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\tname: '1536',\n\t\t\t\t\t\t\t\tvalue: 1536,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\tname: '3072',\n\t\t\t\t\t\t\t\tvalue: 3072,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t],\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Base URL',\n\t\t\t\t\t\tname: 'baseURL',\n\t\t\t\t\t\tdefault: 'https://api.openai.com/v1',\n\t\t\t\t\t\tdescription: 'Override the default base URL for the API',\n\t\t\t\t\t\ttype: 'string',\n\t\t\t\t\t\tdisplayOptions: {\n\t\t\t\t\t\t\thide: {\n\t\t\t\t\t\t\t\t'@version': [{ _cnd: { gte: 1.2 } }],\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Batch Size',\n\t\t\t\t\t\tname: 'batchSize',\n\t\t\t\t\t\tdefault: 512,\n\t\t\t\t\t\ttypeOptions: { maxValue: 2048 },\n\t\t\t\t\t\tdescription: 'Maximum number of documents to send in each request',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Strip New Lines',\n\t\t\t\t\t\tname: 'stripNewLines',\n\t\t\t\t\t\tdefault: true,\n\t\t\t\t\t\tdescription: 'Whether to strip new lines from the input text',\n\t\t\t\t\t\ttype: 'boolean',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Timeout',\n\t\t\t\t\t\tname: 'timeout',\n\t\t\t\t\t\tdefault: -1,\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'Maximum amount of time a request is allowed to take in seconds. Set to -1 for no timeout.',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t],\n\t};\n\n\tasync supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {\n\t\tthis.logger.debug('Supply data for embeddings');\n\t\tconst credentials = await this.getCredentials('openAiApi');\n\n\t\tconst options = this.getNodeParameter('options', itemIndex, {}) as {\n\t\t\tbaseURL?: string;\n\t\t\tbatchSize?: number;\n\t\t\tstripNewLines?: boolean;\n\t\t\ttimeout?: number;\n\t\t\tdimensions?: number | undefined;\n\t\t};\n\n\t\tif (options.timeout === -1) {\n\t\t\toptions.timeout = undefined;\n\t\t}\n\n\t\tconst configuration: ClientOptions = {};\n\t\tif (options.baseURL) {\n\t\t\tconfiguration.baseURL = options.baseURL;\n\t\t} else if (credentials.url) {\n\t\t\tconfiguration.baseURL = credentials.url as string;\n\t\t}\n\n\t\tconst embeddings = new OpenAIEmbeddings(\n\t\t\t{\n\t\t\t\tmodelName: this.getNodeParameter('model', itemIndex, 'text-embedding-3-small') as string,\n\t\t\t\topenAIApiKey: credentials.apiKey as string,\n\t\t\t\t...options,\n\t\t\t},\n\t\t\tconfiguration,\n\t\t);\n\n\t\treturn {\n\t\t\tresponse: logWrapper(embeddings, this),\n\t\t};\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,oBAAiC;AACjC,0BAOO;AAGP,wBAA2B;AAC3B,0BAA6C;AAE7C,MAAM,iBAAkC;AAAA,EACvC,aAAa;AAAA,EACb,MAAM;AAAA,EACN,MAAM;AAAA,EACN,aACC;AAAA,EACD,aAAa;AAAA,IACZ,aAAa;AAAA,MACZ,SAAS;AAAA,QACR,SAAS;AAAA,UACR,QAAQ;AAAA,UACR,KAAK;AAAA,QACN;AAAA,QACA,QAAQ;AAAA,UACP,aAAa;AAAA,YACZ;AAAA,cACC,MAAM;AAAA,cACN,YAAY;AAAA,gBACX,UAAU;AAAA,cACX;AAAA,YACD;AAAA,YACA;AAAA,cACC,MAAM;AAAA,cACN,YAAY;AAAA,gBACX,MAAM;AAAA,cACP;AAAA,YACD;AAAA,YACA;AAAA,cACC,MAAM;AAAA,cACN,YAAY;AAAA,gBACX,MAAM;AAAA,gBACN,OAAO;AAAA,cACR;AAAA,YACD;AAAA,YACA;AAAA,cACC,MAAM;AAAA,cACN,YAAY;AAAA,gBACX,KAAK;AAAA,cACN;AAAA,YACD;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAAA,EACD;AAAA,EACA,SAAS;AAAA,IACR,MAAM;AAAA,MACL,MAAM;AAAA,MACN,UAAU;AAAA,IACX;AAAA,EACD;AAAA,EACA,SAAS;AACV;AAEO,MAAM,iBAAsC;AAAA,EAA5C;AACN,uBAAoC;AAAA,MACnC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM,EAAE,OAAO,wBAAwB,MAAM,4BAA4B;AAAA,MACzE,aAAa;AAAA,QACZ;AAAA,UACC,MAAM;AAAA,UACN,UAAU;AAAA,QACX;AAAA,MACD;AAAA,MACA,OAAO,CAAC,WAAW;AAAA,MACnB,SAAS,CAAC,GAAG,KAAK,GAAG;AAAA,MACrB,aAAa;AAAA,MACb,UAAU;AAAA,QACT,MAAM;AAAA,MACP;AAAA,MAEA,OAAO;AAAA,QACN,YAAY,CAAC,IAAI;AAAA,QACjB,eAAe;AAAA,UACd,IAAI,CAAC,YAAY;AAAA,QAClB;AAAA,QACA,WAAW;AAAA,UACV,sBAAsB;AAAA,YACrB;AAAA,cACC,KAAK;AAAA,YACN;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA;AAAA,MAEA,QAAQ,CAAC;AAAA;AAAA,MAET,SAAS,CAAC,wCAAoB,WAAW;AAAA,MACzC,aAAa,CAAC,YAAY;AAAA,MAC1B,iBAAiB;AAAA,QAChB,wBAAwB;AAAA,QACxB,SACC;AAAA,MACF;AAAA,MACA,YAAY;AAAA,YACX,kDAA6B,CAAC,wCAAoB,aAAa,CAAC;AAAA,QAChE;AAAA,UACC,GAAG;AAAA,UACH,SAAS;AAAA,UACT,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,YAAY,CAAC,CAAC;AAAA,YACf;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,GAAG;AAAA,UACH,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,YAAY,CAAC,CAAC;AAAA,YACf;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,aAAa;AAAA,UACb,aAAa;AAAA,UACb,MAAM;AAAA,UACN,SAAS,CAAC;AAAA,UACV,SAAS;AAAA,YACR;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aACC;AAAA,cACD,MAAM;AAAA,cACN,SAAS;AAAA,gBACR;AAAA,kBACC,MAAM;AAAA,kBACN,OAAO;AAAA,gBACR;AAAA,gBACA;AAAA,kBACC,MAAM;AAAA,kBACN,OAAO;AAAA,gBACR;AAAA,gBACA;AAAA,kBACC,MAAM;AAAA,kBACN,OAAO;AAAA,gBACR;AAAA,gBACA;AAAA,kBACC,MAAM;AAAA,kBACN,OAAO;AAAA,gBACR;AAAA,gBACA;AAAA,kBACC,MAAM;AAAA,kBACN,OAAO;AAAA,gBACR;AAAA,cACD;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,cACb,MAAM;AAAA,cACN,gBAAgB;AAAA,gBACf,MAAM;AAAA,kBACL,YAAY,CAAC,EAAE,MAAM,EAAE,KAAK,IAAI,EAAE,CAAC;AAAA,gBACpC;AAAA,cACD;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,KAAK;AAAA,cAC9B,aAAa;AAAA,cACb,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,cACb,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aACC;AAAA,cACD,MAAM;AAAA,YACP;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAAA;AAAA,EAEA,MAAM,WAAuC,WAAwC;AACpF,SAAK,OAAO,MAAM,4BAA4B;AAC9C,UAAM,cAAc,MAAM,KAAK,eAAe,WAAW;AAEzD,UAAM,UAAU,KAAK,iBAAiB,WAAW,WAAW,CAAC,CAAC;AAQ9D,QAAI,QAAQ,YAAY,IAAI;AAC3B,cAAQ,UAAU;AAAA,IACnB;AAEA,UAAM,gBAA+B,CAAC;AACtC,QAAI,QAAQ,SAAS;AACpB,oBAAc,UAAU,QAAQ;AAAA,IACjC,WAAW,YAAY,KAAK;AAC3B,oBAAc,UAAU,YAAY;AAAA,IACrC;AAEA,UAAM,aAAa,IAAI;AAAA,MACtB;AAAA,QACC,WAAW,KAAK,iBAAiB,SAAS,WAAW,wBAAwB;AAAA,QAC7E,cAAc,YAAY;AAAA,QAC1B,GAAG;AAAA,MACJ;AAAA,MACA;AAAA,IACD;AAEA,WAAO;AAAA,MACN,cAAU,8BAAW,YAAY,IAAI;AAAA,IACtC;AAAA,EACD;AACD;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../nodes/llms/LMChatAnthropic/LmChatAnthropic.node.ts"],"sourcesContent":["/* eslint-disable n8n-nodes-base/node-dirname-against-convention */\n\nimport { ChatAnthropic } from '@langchain/anthropic';\nimport type { LLMResult } from '@langchain/core/outputs';\nimport {\n\tNodeConnectionTypes,\n\ttype INodePropertyOptions,\n\ttype INodeProperties,\n\ttype ISupplyDataFunctions,\n\ttype INodeType,\n\ttype INodeTypeDescription,\n\ttype SupplyData,\n} from 'n8n-workflow';\n\nimport { getConnectionHintNoticeField } from '@utils/sharedFields';\n\nimport { searchModels } from './methods/searchModels';\nimport { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';\nimport { N8nLlmTracing } from '../N8nLlmTracing';\n\nconst modelField: INodeProperties = {\n\tdisplayName: 'Model',\n\tname: 'model',\n\ttype: 'options',\n\t// eslint-disable-next-line n8n-nodes-base/node-param-options-type-unsorted-items\n\toptions: [\n\t\t{\n\t\t\tname: 'Claude 3.5 Sonnet(20241022)',\n\t\t\tvalue: 'claude-3-5-sonnet-20241022',\n\t\t},\n\t\t{\n\t\t\tname: 'Claude 3 Opus(20240229)',\n\t\t\tvalue: 'claude-3-opus-20240229',\n\t\t},\n\t\t{\n\t\t\tname: 'Claude 3.5 Sonnet(20240620)',\n\t\t\tvalue: 'claude-3-5-sonnet-20240620',\n\t\t},\n\t\t{\n\t\t\tname: 'Claude 3 Sonnet(20240229)',\n\t\t\tvalue: 'claude-3-sonnet-20240229',\n\t\t},\n\t\t{\n\t\t\tname: 'Claude 3.5 Haiku(20241022)',\n\t\t\tvalue: 'claude-3-5-haiku-20241022',\n\t\t},\n\t\t{\n\t\t\tname: 'Claude 3 Haiku(20240307)',\n\t\t\tvalue: 'claude-3-haiku-20240307',\n\t\t},\n\t\t{\n\t\t\tname: 'LEGACY: Claude 2',\n\t\t\tvalue: 'claude-2',\n\t\t},\n\t\t{\n\t\t\tname: 'LEGACY: Claude 2.1',\n\t\t\tvalue: 'claude-2.1',\n\t\t},\n\t\t{\n\t\t\tname: 'LEGACY: Claude Instant 1.2',\n\t\t\tvalue: 'claude-instant-1.2',\n\t\t},\n\t\t{\n\t\t\tname: 'LEGACY: Claude Instant 1',\n\t\t\tvalue: 'claude-instant-1',\n\t\t},\n\t],\n\tdescription:\n\t\t'The model which will generate the completion. <a href=\"https://docs.anthropic.com/claude/docs/models-overview\">Learn more</a>.',\n\tdefault: 'claude-2',\n};\n\nconst MIN_THINKING_BUDGET = 1024;\nconst DEFAULT_MAX_TOKENS = 4096;\nexport class LmChatAnthropic implements INodeType {\n\tmethods = {\n\t\tlistSearch: {\n\t\t\tsearchModels,\n\t\t},\n\t};\n\n\tdescription: INodeTypeDescription = {\n\t\tdisplayName: 'Anthropic Chat Model',\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-name-miscased\n\t\tname: 'lmChatAnthropic',\n\t\ticon: 'file:anthropic.svg',\n\t\tgroup: ['transform'],\n\t\tversion: [1, 1.1, 1.2, 1.3],\n\t\tdefaultVersion: 1.3,\n\t\tdescription: 'Language Model Anthropic',\n\t\tdefaults: {\n\t\t\tname: 'Anthropic Chat Model',\n\t\t},\n\t\tcodex: {\n\t\t\tcategories: ['AI'],\n\t\t\tsubcategories: {\n\t\t\t\tAI: ['Language Models', 'Root Nodes'],\n\t\t\t\t'Language Models': ['Chat Models (Recommended)'],\n\t\t\t},\n\t\t\tresources: {\n\t\t\t\tprimaryDocumentation: [\n\t\t\t\t\t{\n\t\t\t\t\t\turl: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatanthropic/',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t\talias: ['claude', 'sonnet', 'opus'],\n\t\t},\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node\n\t\tinputs: [],\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong\n\t\toutputs: [NodeConnectionTypes.AiLanguageModel],\n\t\toutputNames: ['Model'],\n\t\tcredentials: [\n\t\t\t{\n\t\t\t\tname: 'anthropicApi',\n\t\t\t\trequired: true,\n\t\t\t},\n\t\t],\n\t\tproperties: [\n\t\t\tgetConnectionHintNoticeField([NodeConnectionTypes.AiChain, NodeConnectionTypes.AiChain]),\n\t\t\t{\n\t\t\t\t...modelField,\n\t\t\t\tdisplayOptions: {\n\t\t\t\t\tshow: {\n\t\t\t\t\t\t'@version': [1],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\t...modelField,\n\t\t\t\tdefault: 'claude-3-sonnet-20240229',\n\t\t\t\tdisplayOptions: {\n\t\t\t\t\tshow: {\n\t\t\t\t\t\t'@version': [1.1],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\t...modelField,\n\t\t\t\tdefault: 'claude-3-5-sonnet-20240620',\n\t\t\t\toptions: (modelField.options ?? []).filter(\n\t\t\t\t\t(o): o is INodePropertyOptions => 'name' in o && !o.name.toString().startsWith('LEGACY'),\n\t\t\t\t),\n\t\t\t\tdisplayOptions: {\n\t\t\t\t\tshow: {\n\t\t\t\t\t\t'@version': [{ _cnd: { lte: 1.2 } }],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Model',\n\t\t\t\tname: 'model',\n\t\t\t\ttype: 'resourceLocator',\n\t\t\t\tdefault: {\n\t\t\t\t\tmode: 'list',\n\t\t\t\t\tvalue: 'claude-3-7-sonnet-20250219',\n\t\t\t\t\tcachedResultName: 'Claude 3.7 Sonnet',\n\t\t\t\t},\n\t\t\t\trequired: true,\n\t\t\t\tmodes: [\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'From List',\n\t\t\t\t\t\tname: 'list',\n\t\t\t\t\t\ttype: 'list',\n\t\t\t\t\t\tplaceholder: 'Select a model...',\n\t\t\t\t\t\ttypeOptions: {\n\t\t\t\t\t\t\tsearchListMethod: 'searchModels',\n\t\t\t\t\t\t\tsearchable: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'ID',\n\t\t\t\t\t\tname: 'id',\n\t\t\t\t\t\ttype: 'string',\n\t\t\t\t\t\tplaceholder: 'Claude Sonnet',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t\tdescription:\n\t\t\t\t\t'The model. Choose from the list, or specify an ID. <a href=\"https://docs.anthropic.com/claude/docs/models-overview\">Learn more</a>.',\n\t\t\t\tdisplayOptions: {\n\t\t\t\t\tshow: {\n\t\t\t\t\t\t'@version': [{ _cnd: { gte: 1.3 } }],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Options',\n\t\t\t\tname: 'options',\n\t\t\t\tplaceholder: 'Add Option',\n\t\t\t\tdescription: 'Additional options to add',\n\t\t\t\ttype: 'collection',\n\t\t\t\tdefault: {},\n\t\t\t\toptions: [\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Maximum Number of Tokens',\n\t\t\t\t\t\tname: 'maxTokensToSample',\n\t\t\t\t\t\tdefault: DEFAULT_MAX_TOKENS,\n\t\t\t\t\t\tdescription: 'The maximum number of tokens to generate in the completion',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Sampling Temperature',\n\t\t\t\t\t\tname: 'temperature',\n\t\t\t\t\t\tdefault: 0.7,\n\t\t\t\t\t\ttypeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t\tdisplayOptions: {\n\t\t\t\t\t\t\thide: {\n\t\t\t\t\t\t\t\tthinking: [true],\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Top K',\n\t\t\t\t\t\tname: 'topK',\n\t\t\t\t\t\tdefault: -1,\n\t\t\t\t\t\ttypeOptions: { maxValue: 1, minValue: -1, numberPrecision: 1 },\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'Used to remove \"long tail\" low probability responses. Defaults to -1, which disables it.',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t\tdisplayOptions: {\n\t\t\t\t\t\t\thide: {\n\t\t\t\t\t\t\t\tthinking: [true],\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Top P',\n\t\t\t\t\t\tname: 'topP',\n\t\t\t\t\t\tdefault: 1,\n\t\t\t\t\t\ttypeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t\tdisplayOptions: {\n\t\t\t\t\t\t\thide: {\n\t\t\t\t\t\t\t\tthinking: [true],\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Enable Thinking',\n\t\t\t\t\t\tname: 'thinking',\n\t\t\t\t\t\ttype: 'boolean',\n\t\t\t\t\t\tdefault: false,\n\t\t\t\t\t\tdescription: 'Whether to enable thinking mode for the model',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Thinking Budget (Tokens)',\n\t\t\t\t\t\tname: 'thinkingBudget',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t\tdefault: MIN_THINKING_BUDGET,\n\t\t\t\t\t\tdescription: 'The maximum number of tokens to use for thinking',\n\t\t\t\t\t\tdisplayOptions: {\n\t\t\t\t\t\t\tshow: {\n\t\t\t\t\t\t\t\tthinking: [true],\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t],\n\t};\n\n\tasync supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {\n\t\tconst credentials = await this.getCredentials('anthropicApi');\n\n\t\tconst version = this.getNode().typeVersion;\n\t\tconst modelName =\n\t\t\tversion >= 1.3\n\t\t\t\t? (this.getNodeParameter('model.value', itemIndex) as string)\n\t\t\t\t: (this.getNodeParameter('model', itemIndex) as string);\n\n\t\tconst options = this.getNodeParameter('options', itemIndex, {}) as {\n\t\t\tmaxTokensToSample?: number;\n\t\t\ttemperature: number;\n\t\t\ttopK?: number;\n\t\t\ttopP?: number;\n\t\t\tthinking?: boolean;\n\t\t\tthinkingBudget?: number;\n\t\t};\n\t\tlet invocationKwargs = {};\n\n\t\tconst tokensUsageParser = (llmOutput: LLMResult['llmOutput']) => {\n\t\t\tconst usage = (llmOutput?.usage as { input_tokens: number; output_tokens: number }) ?? {\n\t\t\t\tinput_tokens: 0,\n\t\t\t\toutput_tokens: 0,\n\t\t\t};\n\t\t\treturn {\n\t\t\t\tcompletionTokens: usage.output_tokens,\n\t\t\t\tpromptTokens: usage.input_tokens,\n\t\t\t\ttotalTokens: usage.input_tokens + usage.output_tokens,\n\t\t\t};\n\t\t};\n\n\t\tif (options.thinking) {\n\t\t\tinvocationKwargs = {\n\t\t\t\tthinking: {\n\t\t\t\t\ttype: 'enabled',\n\t\t\t\t\t// If thinking is enabled, we need to set a budget.\n\t\t\t\t\t// We fallback to 1024 as that is the minimum\n\t\t\t\t\tbudget_tokens: options.thinkingBudget ?? MIN_THINKING_BUDGET,\n\t\t\t\t},\n\t\t\t\t// The default Langchain max_tokens is -1 (no limit) but Anthropic requires a number\n\t\t\t\t// higher than budget_tokens\n\t\t\t\tmax_tokens: options.maxTokensToSample ?? DEFAULT_MAX_TOKENS,\n\t\t\t\t// These need to be unset when thinking is enabled.\n\t\t\t\t// Because the invocationKwargs will override the model options\n\t\t\t\t// we can pass options to the model and then override them here\n\t\t\t\ttop_k: undefined,\n\t\t\t\ttop_p: undefined,\n\t\t\t\ttemperature: undefined,\n\t\t\t};\n\t\t}\n\n\t\tconst model = new ChatAnthropic({\n\t\t\tanthropicApiKey: credentials.apiKey as string,\n\t\t\tmodelName,\n\t\t\tmaxTokens: options.maxTokensToSample,\n\t\t\ttemperature: options.temperature,\n\t\t\ttopK: options.topK,\n\t\t\ttopP: options.topP,\n\t\t\tcallbacks: [new N8nLlmTracing(this, { tokensUsageParser })],\n\t\t\tonFailedAttempt: makeN8nLlmFailedAttemptHandler(this),\n\t\t\tinvocationKwargs,\n\t\t});\n\n\t\treturn {\n\t\t\tresponse: model,\n\t\t};\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,uBAA8B;AAE9B,0BAQO;AAEP,0BAA6C;AAE7C,0BAA6B;AAC7B,wCAA+C;AAC/C,2BAA8B;AAE9B,MAAM,aAA8B;AAAA,EACnC,aAAa;AAAA,EACb,MAAM;AAAA,EACN,MAAM;AAAA;AAAA,EAEN,SAAS;AAAA,IACR;AAAA,MACC,MAAM;AAAA,MACN,OAAO;AAAA,IACR;AAAA,IACA;AAAA,MACC,MAAM;AAAA,MACN,OAAO;AAAA,IACR;AAAA,IACA;AAAA,MACC,MAAM;AAAA,MACN,OAAO;AAAA,IACR;AAAA,IACA;AAAA,MACC,MAAM;AAAA,MACN,OAAO;AAAA,IACR;AAAA,IACA;AAAA,MACC,MAAM;AAAA,MACN,OAAO;AAAA,IACR;AAAA,IACA;AAAA,MACC,MAAM;AAAA,MACN,OAAO;AAAA,IACR;AAAA,IACA;AAAA,MACC,MAAM;AAAA,MACN,OAAO;AAAA,IACR;AAAA,IACA;AAAA,MACC,MAAM;AAAA,MACN,OAAO;AAAA,IACR;AAAA,IACA;AAAA,MACC,MAAM;AAAA,MACN,OAAO;AAAA,IACR;AAAA,IACA;AAAA,MACC,MAAM;AAAA,MACN,OAAO;AAAA,IACR;AAAA,EACD;AAAA,EACA,aACC;AAAA,EACD,SAAS;AACV;AAEA,MAAM,sBAAsB;AAC5B,MAAM,qBAAqB;AACpB,MAAM,gBAAqC;AAAA,EAA3C;AACN,mBAAU;AAAA,MACT,YAAY;AAAA,QACX;AAAA,MACD;AAAA,IACD;AAEA,uBAAoC;AAAA,MACnC,aAAa;AAAA;AAAA,MAEb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,OAAO,CAAC,WAAW;AAAA,MACnB,SAAS,CAAC,GAAG,KAAK,KAAK,GAAG;AAAA,MAC1B,gBAAgB;AAAA,MAChB,aAAa;AAAA,MACb,UAAU;AAAA,QACT,MAAM;AAAA,MACP;AAAA,MACA,OAAO;AAAA,QACN,YAAY,CAAC,IAAI;AAAA,QACjB,eAAe;AAAA,UACd,IAAI,CAAC,mBAAmB,YAAY;AAAA,UACpC,mBAAmB,CAAC,2BAA2B;AAAA,QAChD;AAAA,QACA,WAAW;AAAA,UACV,sBAAsB;AAAA,YACrB;AAAA,cACC,KAAK;AAAA,YACN;AAAA,UACD;AAAA,QACD;AAAA,QACA,OAAO,CAAC,UAAU,UAAU,MAAM;AAAA,MACnC;AAAA;AAAA,MAEA,QAAQ,CAAC;AAAA;AAAA,MAET,SAAS,CAAC,wCAAoB,eAAe;AAAA,MAC7C,aAAa,CAAC,OAAO;AAAA,MACrB,aAAa;AAAA,QACZ;AAAA,UACC,MAAM;AAAA,UACN,UAAU;AAAA,QACX;AAAA,MACD;AAAA,MACA,YAAY;AAAA,YACX,kDAA6B,CAAC,wCAAoB,SAAS,wCAAoB,OAAO,CAAC;AAAA,QACvF;AAAA,UACC,GAAG;AAAA,UACH,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,YAAY,CAAC,CAAC;AAAA,YACf;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,GAAG;AAAA,UACH,SAAS;AAAA,UACT,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,YAAY,CAAC,GAAG;AAAA,YACjB;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,GAAG;AAAA,UACH,SAAS;AAAA,UACT,UAAU,WAAW,WAAW,CAAC,GAAG;AAAA,YACnC,CAAC,MAAiC,UAAU,KAAK,CAAC,EAAE,KAAK,SAAS,EAAE,WAAW,QAAQ;AAAA,UACxF;AAAA,UACA,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,YAAY,CAAC,EAAE,MAAM,EAAE,KAAK,IAAI,EAAE,CAAC;AAAA,YACpC;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,YACR,MAAM;AAAA,YACN,OAAO;AAAA,YACP,kBAAkB;AAAA,UACnB;AAAA,UACA,UAAU;AAAA,UACV,OAAO;AAAA,YACN;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,MAAM;AAAA,cACN,aAAa;AAAA,cACb,aAAa;AAAA,gBACZ,kBAAkB;AAAA,gBAClB,YAAY;AAAA,cACb;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,MAAM;AAAA,cACN,aAAa;AAAA,YACd;AAAA,UACD;AAAA,UACA,aACC;AAAA,UACD,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,YAAY,CAAC,EAAE,MAAM,EAAE,KAAK,IAAI,EAAE,CAAC;AAAA,YACpC;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,aAAa;AAAA,UACb,aAAa;AAAA,UACb,MAAM;AAAA,UACN,SAAS,CAAC;AAAA,UACV,SAAS;AAAA,YACR;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,cACb,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,GAAG,UAAU,GAAG,iBAAiB,EAAE;AAAA,cAC5D,aACC;AAAA,cACD,MAAM;AAAA,cACN,gBAAgB;AAAA,gBACf,MAAM;AAAA,kBACL,UAAU,CAAC,IAAI;AAAA,gBAChB;AAAA,cACD;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,GAAG,UAAU,IAAI,iBAAiB,EAAE;AAAA,cAC7D,aACC;AAAA,cACD,MAAM;AAAA,cACN,gBAAgB;AAAA,gBACf,MAAM;AAAA,kBACL,UAAU,CAAC,IAAI;AAAA,gBAChB;AAAA,cACD;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,GAAG,UAAU,GAAG,iBAAiB,EAAE;AAAA,cAC5D,aACC;AAAA,cACD,MAAM;AAAA,cACN,gBAAgB;AAAA,gBACf,MAAM;AAAA,kBACL,UAAU,CAAC,IAAI;AAAA,gBAChB;AAAA,cACD;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,YACd;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,cACb,gBAAgB;AAAA,gBACf,MAAM;AAAA,kBACL,UAAU,CAAC,IAAI;AAAA,gBAChB;AAAA,cACD;AAAA,YACD;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAAA;AAAA,EAEA,MAAM,WAAuC,WAAwC;AACpF,UAAM,cAAc,MAAM,KAAK,eAAe,cAAc;AAE5D,UAAM,UAAU,KAAK,QAAQ,EAAE;AAC/B,UAAM,YACL,WAAW,MACP,KAAK,iBAAiB,eAAe,SAAS,IAC9C,KAAK,iBAAiB,SAAS,SAAS;AAE7C,UAAM,UAAU,KAAK,iBAAiB,WAAW,WAAW,CAAC,CAAC;AAQ9D,QAAI,mBAAmB,CAAC;AAExB,UAAM,oBAAoB,CAAC,cAAsC;AAChE,YAAM,QAAS,WAAW,SAA6D;AAAA,QACtF,cAAc;AAAA,QACd,eAAe;AAAA,MAChB;AACA,aAAO;AAAA,QACN,kBAAkB,MAAM;AAAA,QACxB,cAAc,MAAM;AAAA,QACpB,aAAa,MAAM,eAAe,MAAM;AAAA,MACzC;AAAA,IACD;AAEA,QAAI,QAAQ,UAAU;AACrB,yBAAmB;AAAA,QAClB,UAAU;AAAA,UACT,MAAM;AAAA;AAAA;AAAA,UAGN,eAAe,QAAQ,kBAAkB;AAAA,QAC1C;AAAA;AAAA;AAAA,QAGA,YAAY,QAAQ,qBAAqB;AAAA;AAAA;AAAA;AAAA,QAIzC,OAAO;AAAA,QACP,OAAO;AAAA,QACP,aAAa;AAAA,MACd;AAAA,IACD;AAEA,UAAM,QAAQ,IAAI,+BAAc;AAAA,MAC/B,iBAAiB,YAAY;AAAA,MAC7B;AAAA,MACA,WAAW,QAAQ;AAAA,MACnB,aAAa,QAAQ;AAAA,MACrB,MAAM,QAAQ;AAAA,MACd,MAAM,QAAQ;AAAA,MACd,WAAW,CAAC,IAAI,mCAAc,MAAM,EAAE,kBAAkB,CAAC,CAAC;AAAA,MAC1D,qBAAiB,kEAA+B,IAAI;AAAA,MACpD;AAAA,IACD,CAAC;AAED,WAAO;AAAA,MACN,UAAU;AAAA,IACX;AAAA,EACD;AACD;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../../nodes/llms/LMChatAnthropic/methods/searchModels.ts"],"sourcesContent":["import type {\n\tILoadOptionsFunctions,\n\tINodeListSearchItems,\n\tINodeListSearchResult,\n} from 'n8n-workflow';\n\nexport interface AnthropicModel {\n\tid: string;\n\tdisplay_name: string;\n\ttype: string;\n\tcreated_at: string;\n}\n\nexport async function searchModels(\n\tthis: ILoadOptionsFunctions,\n\tfilter?: string,\n): Promise<INodeListSearchResult> {\n\tconst response = (await this.helpers.httpRequestWithAuthentication.call(this, 'anthropicApi', {\n\t\turl: 'https://api.anthropic.com/v1/models',\n\t\theaders: {\n\t\t\t'anthropic-version': '2023-06-01',\n\t\t},\n\t})) as { data: AnthropicModel[] };\n\n\tconst models = response.data || [];\n\tlet results: INodeListSearchItems[] = [];\n\n\tif (filter) {\n\t\tfor (const model of models) {\n\t\t\tif (model.id.toLowerCase().includes(filter.toLowerCase())) {\n\t\t\t\tresults.push({\n\t\t\t\t\tname: model.display_name,\n\t\t\t\t\tvalue: model.id,\n\t\t\t\t});\n\t\t\t}\n\t\t}\n\t} else {\n\t\tresults = models.map((model) => ({\n\t\t\tname: model.display_name,\n\t\t\tvalue: model.id,\n\t\t}));\n\t}\n\n\t// Sort models with more recent ones first (claude-3 before claude-2)\n\tresults = results.sort((a, b) => {\n\t\tconst modelA = models.find((m) => m.id === a.value);\n\t\tconst modelB = models.find((m) => m.id === b.value);\n\n\t\tif (!modelA || !modelB) return 0;\n\n\t\t// Sort by created_at date, most recent first\n\t\tconst dateA = new Date(modelA.created_at);\n\t\tconst dateB = new Date(modelB.created_at);\n\t\treturn dateB.getTime() - dateA.getTime();\n\t});\n\n\treturn {\n\t\tresults,\n\t};\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAaA,eAAsB,aAErB,QACiC;AACjC,QAAM,WAAY,MAAM,KAAK,QAAQ,8BAA8B,KAAK,MAAM,gBAAgB;AAAA,IAC7F,KAAK;AAAA,IACL,SAAS;AAAA,MACR,qBAAqB;AAAA,IACtB;AAAA,EACD,CAAC;AAED,QAAM,SAAS,SAAS,QAAQ,CAAC;AACjC,MAAI,UAAkC,CAAC;AAEvC,MAAI,QAAQ;AACX,eAAW,SAAS,QAAQ;AAC3B,UAAI,MAAM,GAAG,YAAY,EAAE,SAAS,OAAO,YAAY,CAAC,GAAG;AAC1D,gBAAQ,KAAK;AAAA,UACZ,MAAM,MAAM;AAAA,UACZ,OAAO,MAAM;AAAA,QACd,CAAC;AAAA,MACF;AAAA,IACD;AAAA,EACD,OAAO;AACN,cAAU,OAAO,IAAI,CAAC,WAAW;AAAA,MAChC,MAAM,MAAM;AAAA,MACZ,OAAO,MAAM;AAAA,IACd,EAAE;AAAA,EACH;AAGA,YAAU,QAAQ,KAAK,CAAC,GAAG,MAAM;AAChC,UAAM,SAAS,OAAO,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK;AAClD,UAAM,SAAS,OAAO,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK;AAElD,QAAI,CAAC,UAAU,CAAC,OAAQ,QAAO;AAG/B,UAAM,QAAQ,IAAI,KAAK,OAAO,UAAU;AACxC,UAAM,QAAQ,IAAI,KAAK,OAAO,UAAU;AACxC,WAAO,MAAM,QAAQ,IAAI,MAAM,QAAQ;AAAA,EACxC,CAAC;AAED,SAAO;AAAA,IACN;AAAA,EACD;AACD;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../nodes/llms/LMChatOllama/LmChatOllama.node.ts"],"sourcesContent":["/* eslint-disable n8n-nodes-base/node-dirname-against-convention */\n\nimport type { ChatOllamaInput } from '@langchain/ollama';\nimport { ChatOllama } from '@langchain/ollama';\nimport {\n\tNodeConnectionTypes,\n\ttype INodeType,\n\ttype INodeTypeDescription,\n\ttype ISupplyDataFunctions,\n\ttype SupplyData,\n} from 'n8n-workflow';\n\nimport { getConnectionHintNoticeField } from '@utils/sharedFields';\n\nimport { ollamaModel, ollamaOptions, ollamaDescription } from '../LMOllama/description';\nimport { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';\nimport { N8nLlmTracing } from '../N8nLlmTracing';\n\nexport class LmChatOllama implements INodeType {\n\tdescription: INodeTypeDescription = {\n\t\tdisplayName: 'Ollama Chat Model',\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-name-miscased\n\t\tname: 'lmChatOllama',\n\t\ticon: 'file:ollama.svg',\n\t\tgroup: ['transform'],\n\t\tversion: 1,\n\t\tdescription: 'Language Model Ollama',\n\t\tdefaults: {\n\t\t\tname: 'Ollama Chat Model',\n\t\t},\n\t\tcodex: {\n\t\t\tcategories: ['AI'],\n\t\t\tsubcategories: {\n\t\t\t\tAI: ['Language Models', 'Root Nodes'],\n\t\t\t\t'Language Models': ['Chat Models (Recommended)'],\n\t\t\t},\n\t\t\tresources: {\n\t\t\t\tprimaryDocumentation: [\n\t\t\t\t\t{\n\t\t\t\t\t\turl: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatollama/',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t},\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node\n\t\tinputs: [],\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong\n\t\toutputs: [NodeConnectionTypes.AiLanguageModel],\n\t\toutputNames: ['Model'],\n\t\t...ollamaDescription,\n\t\tproperties: [\n\t\t\tgetConnectionHintNoticeField([NodeConnectionTypes.AiChain, NodeConnectionTypes.AiAgent]),\n\t\t\tollamaModel,\n\t\t\tollamaOptions,\n\t\t],\n\t};\n\n\tasync supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {\n\t\tconst credentials = await this.getCredentials('ollamaApi');\n\n\t\tconst modelName = this.getNodeParameter('model', itemIndex) as string;\n\t\tconst options = this.getNodeParameter('options', itemIndex, {}) as ChatOllamaInput;\n\n\t\tconst model = new ChatOllama({\n\t\t\t...options,\n\t\t\tbaseUrl: credentials.baseUrl as string,\n\t\t\tmodel: modelName,\n\t\t\tformat: options.format === 'default' ? undefined : options.format,\n\t\t\tcallbacks: [new N8nLlmTracing(this)],\n\t\t\tonFailedAttempt: makeN8nLlmFailedAttemptHandler(this),\n\t\t});\n\n\t\treturn {\n\t\t\tresponse: model,\n\t\t};\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAGA,oBAA2B;AAC3B,0BAMO;AAEP,0BAA6C;AAE7C,yBAA8D;AAC9D,wCAA+C;AAC/C,2BAA8B;AAEvB,MAAM,aAAkC;AAAA,EAAxC;AACN,uBAAoC;AAAA,MACnC,aAAa;AAAA;AAAA,MAEb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,OAAO,CAAC,WAAW;AAAA,MACnB,SAAS;AAAA,MACT,aAAa;AAAA,MACb,UAAU;AAAA,QACT,MAAM;AAAA,MACP;AAAA,MACA,OAAO;AAAA,QACN,YAAY,CAAC,IAAI;AAAA,QACjB,eAAe;AAAA,UACd,IAAI,CAAC,mBAAmB,YAAY;AAAA,UACpC,mBAAmB,CAAC,2BAA2B;AAAA,QAChD;AAAA,QACA,WAAW;AAAA,UACV,sBAAsB;AAAA,YACrB;AAAA,cACC,KAAK;AAAA,YACN;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA;AAAA,MAEA,QAAQ,CAAC;AAAA;AAAA,MAET,SAAS,CAAC,wCAAoB,eAAe;AAAA,MAC7C,aAAa,CAAC,OAAO;AAAA,MACrB,GAAG;AAAA,MACH,YAAY;AAAA,YACX,kDAA6B,CAAC,wCAAoB,SAAS,wCAAoB,OAAO,CAAC;AAAA,QACvF;AAAA,QACA;AAAA,MACD;AAAA,IACD;AAAA;AAAA,EAEA,MAAM,WAAuC,WAAwC;AACpF,UAAM,cAAc,MAAM,KAAK,eAAe,WAAW;AAEzD,UAAM,YAAY,KAAK,iBAAiB,SAAS,SAAS;AAC1D,UAAM,UAAU,KAAK,iBAAiB,WAAW,WAAW,CAAC,CAAC;AAE9D,UAAM,QAAQ,IAAI,yBAAW;AAAA,MAC5B,GAAG;AAAA,MACH,SAAS,YAAY;AAAA,MACrB,OAAO;AAAA,MACP,QAAQ,QAAQ,WAAW,YAAY,SAAY,QAAQ;AAAA,MAC3D,WAAW,CAAC,IAAI,mCAAc,IAAI,CAAC;AAAA,MACnC,qBAAiB,kEAA+B,IAAI;AAAA,IACrD,CAAC;AAED,WAAO;AAAA,MACN,UAAU;AAAA,IACX;AAAA,EACD;AACD;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../nodes/llms/LMChatOpenAi/LmChatOpenAi.node.ts"],"sourcesContent":["/* eslint-disable n8n-nodes-base/node-dirname-against-convention */\n\nimport { ChatOpenAI, type ClientOptions } from '@langchain/openai';\nimport {\n\tNodeConnectionTypes,\n\ttype INodeType,\n\ttype INodeTypeDescription,\n\ttype ISupplyDataFunctions,\n\ttype SupplyData,\n} from 'n8n-workflow';\n\nimport { getConnectionHintNoticeField } from '@utils/sharedFields';\n\nimport { searchModels } from './methods/loadModels';\nimport { openAiFailedAttemptHandler } from '../../vendors/OpenAi/helpers/error-handling';\nimport { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';\nimport { N8nLlmTracing } from '../N8nLlmTracing';\n\nexport class LmChatOpenAi implements INodeType {\n\tmethods = {\n\t\tlistSearch: {\n\t\t\tsearchModels,\n\t\t},\n\t};\n\n\tdescription: INodeTypeDescription = {\n\t\tdisplayName: 'OpenAI Chat Model',\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-name-miscased\n\t\tname: 'lmChatOpenAi',\n\t\ticon: { light: 'file:openAiLight.svg', dark: 'file:openAiLight.dark.svg' },\n\t\tgroup: ['transform'],\n\t\tversion: [1, 1.1, 1.2],\n\t\tdescription: 'For advanced usage with an AI chain',\n\t\tdefaults: {\n\t\t\tname: 'OpenAI Chat Model',\n\t\t},\n\t\tcodex: {\n\t\t\tcategories: ['AI'],\n\t\t\tsubcategories: {\n\t\t\t\tAI: ['Language Models', 'Root Nodes'],\n\t\t\t\t'Language Models': ['Chat Models (Recommended)'],\n\t\t\t},\n\t\t\tresources: {\n\t\t\t\tprimaryDocumentation: [\n\t\t\t\t\t{\n\t\t\t\t\t\turl: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatopenai/',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t},\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node\n\t\tinputs: [],\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong\n\t\toutputs: [NodeConnectionTypes.AiLanguageModel],\n\t\toutputNames: ['Model'],\n\t\tcredentials: [\n\t\t\t{\n\t\t\t\tname: 'openAiApi',\n\t\t\t\trequired: true,\n\t\t\t},\n\t\t],\n\t\trequestDefaults: {\n\t\t\tignoreHttpStatusErrors: true,\n\t\t\tbaseURL:\n\t\t\t\t'={{ $parameter.options?.baseURL?.split(\"/\").slice(0,-1).join(\"/\") || $credentials?.url?.split(\"/\").slice(0,-1).join(\"/\") || \"https://api.openai.com\" }}',\n\t\t},\n\t\tproperties: [\n\t\t\tgetConnectionHintNoticeField([NodeConnectionTypes.AiChain, NodeConnectionTypes.AiAgent]),\n\t\t\t{\n\t\t\t\tdisplayName:\n\t\t\t\t\t'If using JSON response format, you must include word \"json\" in the prompt in your chain or agent. Also, make sure to select latest models released post November 2023.',\n\t\t\t\tname: 'notice',\n\t\t\t\ttype: 'notice',\n\t\t\t\tdefault: '',\n\t\t\t\tdisplayOptions: {\n\t\t\t\t\tshow: {\n\t\t\t\t\t\t'/options.responseFormat': ['json_object'],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Model',\n\t\t\t\tname: 'model',\n\t\t\t\ttype: 'options',\n\t\t\t\tdescription:\n\t\t\t\t\t'The model which will generate the completion. <a href=\"https://beta.openai.com/docs/models/overview\">Learn more</a>.',\n\t\t\t\ttypeOptions: {\n\t\t\t\t\tloadOptions: {\n\t\t\t\t\t\trouting: {\n\t\t\t\t\t\t\trequest: {\n\t\t\t\t\t\t\t\tmethod: 'GET',\n\t\t\t\t\t\t\t\turl: '={{ $parameter.options?.baseURL?.split(\"/\").slice(-1).pop() || $credentials?.url?.split(\"/\").slice(-1).pop() || \"v1\" }}/models',\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\toutput: {\n\t\t\t\t\t\t\t\tpostReceive: [\n\t\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\ttype: 'rootProperty',\n\t\t\t\t\t\t\t\t\t\tproperties: {\n\t\t\t\t\t\t\t\t\t\t\tproperty: 'data',\n\t\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\ttype: 'filter',\n\t\t\t\t\t\t\t\t\t\tproperties: {\n\t\t\t\t\t\t\t\t\t\t\t// If the baseURL is not set or is set to api.openai.com, include only chat models\n\t\t\t\t\t\t\t\t\t\t\tpass: `={{\n\t\t\t\t\t\t\t\t\t\t\t\t($parameter.options?.baseURL && !$parameter.options?.baseURL?.startsWith('https://api.openai.com/')) ||\n\t\t\t\t\t\t\t\t\t\t\t\t($credentials?.url && !$credentials.url.startsWith('https://api.openai.com/')) ||\n\t\t\t\t\t\t\t\t\t\t\t\t$responseItem.id.startsWith('ft:') ||\n\t\t\t\t\t\t\t\t\t\t\t\t$responseItem.id.startsWith('o1') ||\n\t\t\t\t\t\t\t\t\t\t\t\t$responseItem.id.startsWith('o3') ||\n\t\t\t\t\t\t\t\t\t\t\t\t($responseItem.id.startsWith('gpt-') && !$responseItem.id.includes('instruct'))\n\t\t\t\t\t\t\t\t\t\t\t}}`,\n\t\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\ttype: 'setKeyValue',\n\t\t\t\t\t\t\t\t\t\tproperties: {\n\t\t\t\t\t\t\t\t\t\t\tname: '={{$responseItem.id}}',\n\t\t\t\t\t\t\t\t\t\t\tvalue: '={{$responseItem.id}}',\n\t\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\ttype: 'sort',\n\t\t\t\t\t\t\t\t\t\tproperties: {\n\t\t\t\t\t\t\t\t\t\t\tkey: 'name',\n\t\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t],\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\trouting: {\n\t\t\t\t\tsend: {\n\t\t\t\t\t\ttype: 'body',\n\t\t\t\t\t\tproperty: 'model',\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tdefault: 'gpt-4o-mini',\n\t\t\t\tdisplayOptions: {\n\t\t\t\t\thide: {\n\t\t\t\t\t\t'@version': [{ _cnd: { gte: 1.2 } }],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Model',\n\t\t\t\tname: 'model',\n\t\t\t\ttype: 'resourceLocator',\n\t\t\t\tdefault: { mode: 'list', value: 'gpt-4o-mini' },\n\t\t\t\trequired: true,\n\t\t\t\tmodes: [\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'From List',\n\t\t\t\t\t\tname: 'list',\n\t\t\t\t\t\ttype: 'list',\n\t\t\t\t\t\tplaceholder: 'Select a model...',\n\t\t\t\t\t\ttypeOptions: {\n\t\t\t\t\t\t\tsearchListMethod: 'searchModels',\n\t\t\t\t\t\t\tsearchable: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'ID',\n\t\t\t\t\t\tname: 'id',\n\t\t\t\t\t\ttype: 'string',\n\t\t\t\t\t\tplaceholder: 'gpt-4o-mini',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t\tdescription: 'The model. Choose from the list, or specify an ID.',\n\t\t\t\tdisplayOptions: {\n\t\t\t\t\thide: {\n\t\t\t\t\t\t'@version': [{ _cnd: { lte: 1.1 } }],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName:\n\t\t\t\t\t'When using non-OpenAI models via \"Base URL\" override, not all models might be chat-compatible or support other features, like tools calling or JSON response format',\n\t\t\t\tname: 'notice',\n\t\t\t\ttype: 'notice',\n\t\t\t\tdefault: '',\n\t\t\t\tdisplayOptions: {\n\t\t\t\t\tshow: {\n\t\t\t\t\t\t'/options.baseURL': [{ _cnd: { exists: true } }],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Options',\n\t\t\t\tname: 'options',\n\t\t\t\tplaceholder: 'Add Option',\n\t\t\t\tdescription: 'Additional options to add',\n\t\t\t\ttype: 'collection',\n\t\t\t\tdefault: {},\n\t\t\t\toptions: [\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Base URL',\n\t\t\t\t\t\tname: 'baseURL',\n\t\t\t\t\t\tdefault: 'https://api.openai.com/v1',\n\t\t\t\t\t\tdescription: 'Override the default base URL for the API',\n\t\t\t\t\t\ttype: 'string',\n\t\t\t\t\t\tdisplayOptions: {\n\t\t\t\t\t\t\thide: {\n\t\t\t\t\t\t\t\t'@version': [{ _cnd: { gte: 1.1 } }],\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Frequency Penalty',\n\t\t\t\t\t\tname: 'frequencyPenalty',\n\t\t\t\t\t\tdefault: 0,\n\t\t\t\t\t\ttypeOptions: { maxValue: 2, minValue: -2, numberPrecision: 1 },\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t\"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim\",\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Maximum Number of Tokens',\n\t\t\t\t\t\tname: 'maxTokens',\n\t\t\t\t\t\tdefault: -1,\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t\ttypeOptions: {\n\t\t\t\t\t\t\tmaxValue: 32768,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Response Format',\n\t\t\t\t\t\tname: 'responseFormat',\n\t\t\t\t\t\tdefault: 'text',\n\t\t\t\t\t\ttype: 'options',\n\t\t\t\t\t\toptions: [\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\tname: 'Text',\n\t\t\t\t\t\t\t\tvalue: 'text',\n\t\t\t\t\t\t\t\tdescription: 'Regular text response',\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\tname: 'JSON',\n\t\t\t\t\t\t\t\tvalue: 'json_object',\n\t\t\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t\t\t'Enables JSON mode, which should guarantee the message the model generates is valid JSON',\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t],\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Presence Penalty',\n\t\t\t\t\t\tname: 'presencePenalty',\n\t\t\t\t\t\tdefault: 0,\n\t\t\t\t\t\ttypeOptions: { maxValue: 2, minValue: -2, numberPrecision: 1 },\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t\"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics\",\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Sampling Temperature',\n\t\t\t\t\t\tname: 'temperature',\n\t\t\t\t\t\tdefault: 0.7,\n\t\t\t\t\t\ttypeOptions: { maxValue: 2, minValue: 0, numberPrecision: 1 },\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Reasoning Effort',\n\t\t\t\t\t\tname: 'reasoningEffort',\n\t\t\t\t\t\tdefault: 'medium',\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'Controls the amount of reasoning tokens to use. A value of \"low\" will favor speed and economical token usage, \"high\" will favor more complete reasoning at the cost of more tokens generated and slower responses.',\n\t\t\t\t\t\ttype: 'options',\n\t\t\t\t\t\toptions: [\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\tname: 'Low',\n\t\t\t\t\t\t\t\tvalue: 'low',\n\t\t\t\t\t\t\t\tdescription: 'Favors speed and economical token usage',\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\tname: 'Medium',\n\t\t\t\t\t\t\t\tvalue: 'medium',\n\t\t\t\t\t\t\t\tdescription: 'Balance between speed and reasoning accuracy',\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\tname: 'High',\n\t\t\t\t\t\t\t\tvalue: 'high',\n\t\t\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t\t\t'Favors more complete reasoning at the cost of more tokens generated and slower responses',\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t],\n\t\t\t\t\t\tdisplayOptions: {\n\t\t\t\t\t\t\tshow: {\n\t\t\t\t\t\t\t\t// reasoning_effort is only available on o1, o1-versioned, or on o3-mini and beyond. Not on o1-mini or other GPT-models.\n\t\t\t\t\t\t\t\t'/model': [{ _cnd: { regex: '(^o1([-\\\\d]+)?$)|(^o[3-9].*)' } }],\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Timeout',\n\t\t\t\t\t\tname: 'timeout',\n\t\t\t\t\t\tdefault: 60000,\n\t\t\t\t\t\tdescription: 'Maximum amount of time a request is allowed to take in milliseconds',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Max Retries',\n\t\t\t\t\t\tname: 'maxRetries',\n\t\t\t\t\t\tdefault: 2,\n\t\t\t\t\t\tdescription: 'Maximum number of retries to attempt',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Top P',\n\t\t\t\t\t\tname: 'topP',\n\t\t\t\t\t\tdefault: 1,\n\t\t\t\t\t\ttypeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t],\n\t};\n\n\tasync supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {\n\t\tconst credentials = await this.getCredentials('openAiApi');\n\n\t\tconst version = this.getNode().typeVersion;\n\t\tconst modelName =\n\t\t\tversion >= 1.2\n\t\t\t\t? (this.getNodeParameter('model.value', itemIndex) as string)\n\t\t\t\t: (this.getNodeParameter('model', itemIndex) as string);\n\n\t\tconst options = this.getNodeParameter('options', itemIndex, {}) as {\n\t\t\tbaseURL?: string;\n\t\t\tfrequencyPenalty?: number;\n\t\t\tmaxTokens?: number;\n\t\t\tmaxRetries: number;\n\t\t\ttimeout: number;\n\t\t\tpresencePenalty?: number;\n\t\t\ttemperature?: number;\n\t\t\ttopP?: number;\n\t\t\tresponseFormat?: 'text' | 'json_object';\n\t\t\treasoningEffort?: 'low' | 'medium' | 'high';\n\t\t};\n\n\t\tconst configuration: ClientOptions = {};\n\t\tif (options.baseURL) {\n\t\t\tconfiguration.baseURL = options.baseURL;\n\t\t} else if (credentials.url) {\n\t\t\tconfiguration.baseURL = credentials.url as string;\n\t\t}\n\n\t\t// Extra options to send to OpenAI, that are not directly supported by LangChain\n\t\tconst modelKwargs: {\n\t\t\tresponse_format?: object;\n\t\t\treasoning_effort?: 'low' | 'medium' | 'high';\n\t\t} = {};\n\t\tif (options.responseFormat) modelKwargs.response_format = { type: options.responseFormat };\n\t\tif (options.reasoningEffort && ['low', 'medium', 'high'].includes(options.reasoningEffort))\n\t\t\tmodelKwargs.reasoning_effort = options.reasoningEffort;\n\n\t\tconst model = new ChatOpenAI({\n\t\t\topenAIApiKey: credentials.apiKey as string,\n\t\t\tmodelName,\n\t\t\t...options,\n\t\t\ttimeout: options.timeout ?? 60000,\n\t\t\tmaxRetries: options.maxRetries ?? 2,\n\t\t\tconfiguration,\n\t\t\tcallbacks: [new N8nLlmTracing(this)],\n\t\t\tmodelKwargs,\n\t\t\tonFailedAttempt: makeN8nLlmFailedAttemptHandler(this, openAiFailedAttemptHandler),\n\t\t});\n\n\t\treturn {\n\t\t\tresponse: model,\n\t\t};\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,oBAA+C;AAC/C,0BAMO;AAEP,0BAA6C;AAE7C,wBAA6B;AAC7B,4BAA2C;AAC3C,wCAA+C;AAC/C,2BAA8B;AAEvB,MAAM,aAAkC;AAAA,EAAxC;AACN,mBAAU;AAAA,MACT,YAAY;AAAA,QACX;AAAA,MACD;AAAA,IACD;AAEA,uBAAoC;AAAA,MACnC,aAAa;AAAA;AAAA,MAEb,MAAM;AAAA,MACN,MAAM,EAAE,OAAO,wBAAwB,MAAM,4BAA4B;AAAA,MACzE,OAAO,CAAC,WAAW;AAAA,MACnB,SAAS,CAAC,GAAG,KAAK,GAAG;AAAA,MACrB,aAAa;AAAA,MACb,UAAU;AAAA,QACT,MAAM;AAAA,MACP;AAAA,MACA,OAAO;AAAA,QACN,YAAY,CAAC,IAAI;AAAA,QACjB,eAAe;AAAA,UACd,IAAI,CAAC,mBAAmB,YAAY;AAAA,UACpC,mBAAmB,CAAC,2BAA2B;AAAA,QAChD;AAAA,QACA,WAAW;AAAA,UACV,sBAAsB;AAAA,YACrB;AAAA,cACC,KAAK;AAAA,YACN;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA;AAAA,MAEA,QAAQ,CAAC;AAAA;AAAA,MAET,SAAS,CAAC,wCAAoB,eAAe;AAAA,MAC7C,aAAa,CAAC,OAAO;AAAA,MACrB,aAAa;AAAA,QACZ;AAAA,UACC,MAAM;AAAA,UACN,UAAU;AAAA,QACX;AAAA,MACD;AAAA,MACA,iBAAiB;AAAA,QAChB,wBAAwB;AAAA,QACxB,SACC;AAAA,MACF;AAAA,MACA,YAAY;AAAA,YACX,kDAA6B,CAAC,wCAAoB,SAAS,wCAAoB,OAAO,CAAC;AAAA,QACvF;AAAA,UACC,aACC;AAAA,UACD,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,UACT,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,2BAA2B,CAAC,aAAa;AAAA,YAC1C;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,aACC;AAAA,UACD,aAAa;AAAA,YACZ,aAAa;AAAA,cACZ,SAAS;AAAA,gBACR,SAAS;AAAA,kBACR,QAAQ;AAAA,kBACR,KAAK;AAAA,gBACN;AAAA,gBACA,QAAQ;AAAA,kBACP,aAAa;AAAA,oBACZ;AAAA,sBACC,MAAM;AAAA,sBACN,YAAY;AAAA,wBACX,UAAU;AAAA,sBACX;AAAA,oBACD;AAAA,oBACA;AAAA,sBACC,MAAM;AAAA,sBACN,YAAY;AAAA;AAAA,wBAEX,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAQP;AAAA,oBACD;AAAA,oBACA;AAAA,sBACC,MAAM;AAAA,sBACN,YAAY;AAAA,wBACX,MAAM;AAAA,wBACN,OAAO;AAAA,sBACR;AAAA,oBACD;AAAA,oBACA;AAAA,sBACC,MAAM;AAAA,sBACN,YAAY;AAAA,wBACX,KAAK;AAAA,sBACN;AAAA,oBACD;AAAA,kBACD;AAAA,gBACD;AAAA,cACD;AAAA,YACD;AAAA,UACD;AAAA,UACA,SAAS;AAAA,YACR,MAAM;AAAA,cACL,MAAM;AAAA,cACN,UAAU;AAAA,YACX;AAAA,UACD;AAAA,UACA,SAAS;AAAA,UACT,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,YAAY,CAAC,EAAE,MAAM,EAAE,KAAK,IAAI,EAAE,CAAC;AAAA,YACpC;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS,EAAE,MAAM,QAAQ,OAAO,cAAc;AAAA,UAC9C,UAAU;AAAA,UACV,OAAO;AAAA,YACN;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,MAAM;AAAA,cACN,aAAa;AAAA,cACb,aAAa;AAAA,gBACZ,kBAAkB;AAAA,gBAClB,YAAY;AAAA,cACb;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,MAAM;AAAA,cACN,aAAa;AAAA,YACd;AAAA,UACD;AAAA,UACA,aAAa;AAAA,UACb,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,YAAY,CAAC,EAAE,MAAM,EAAE,KAAK,IAAI,EAAE,CAAC;AAAA,YACpC;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,aACC;AAAA,UACD,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,UACT,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,oBAAoB,CAAC,EAAE,MAAM,EAAE,QAAQ,KAAK,EAAE,CAAC;AAAA,YAChD;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,aAAa;AAAA,UACb,aAAa;AAAA,UACb,MAAM;AAAA,UACN,SAAS,CAAC;AAAA,UACV,SAAS;AAAA,YACR;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,cACb,MAAM;AAAA,cACN,gBAAgB;AAAA,gBACf,MAAM;AAAA,kBACL,YAAY,CAAC,EAAE,MAAM,EAAE,KAAK,IAAI,EAAE,CAAC;AAAA,gBACpC;AAAA,cACD;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,GAAG,UAAU,IAAI,iBAAiB,EAAE;AAAA,cAC7D,aACC;AAAA,cACD,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aACC;AAAA,cACD,MAAM;AAAA,cACN,aAAa;AAAA,gBACZ,UAAU;AAAA,cACX;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,MAAM;AAAA,cACN,SAAS;AAAA,gBACR;AAAA,kBACC,MAAM;AAAA,kBACN,OAAO;AAAA,kBACP,aAAa;AAAA,gBACd;AAAA,gBACA;AAAA,kBACC,MAAM;AAAA,kBACN,OAAO;AAAA,kBACP,aACC;AAAA,gBACF;AAAA,cACD;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,GAAG,UAAU,IAAI,iBAAiB,EAAE;AAAA,cAC7D,aACC;AAAA,cACD,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,GAAG,UAAU,GAAG,iBAAiB,EAAE;AAAA,cAC5D,aACC;AAAA,cACD,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aACC;AAAA,cACD,MAAM;AAAA,cACN,SAAS;AAAA,gBACR;AAAA,kBACC,MAAM;AAAA,kBACN,OAAO;AAAA,kBACP,aAAa;AAAA,gBACd;AAAA,gBACA;AAAA,kBACC,MAAM;AAAA,kBACN,OAAO;AAAA,kBACP,aAAa;AAAA,gBACd;AAAA,gBACA;AAAA,kBACC,MAAM;AAAA,kBACN,OAAO;AAAA,kBACP,aACC;AAAA,gBACF;AAAA,cACD;AAAA,cACA,gBAAgB;AAAA,gBACf,MAAM;AAAA;AAAA,kBAEL,UAAU,CAAC,EAAE,MAAM,EAAE,OAAO,+BAA+B,EAAE,CAAC;AAAA,gBAC/D;AAAA,cACD;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,cACb,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,cACb,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,GAAG,UAAU,GAAG,iBAAiB,EAAE;AAAA,cAC5D,aACC;AAAA,cACD,MAAM;AAAA,YACP;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAAA;AAAA,EAEA,MAAM,WAAuC,WAAwC;AACpF,UAAM,cAAc,MAAM,KAAK,eAAe,WAAW;AAEzD,UAAM,UAAU,KAAK,QAAQ,EAAE;AAC/B,UAAM,YACL,WAAW,MACP,KAAK,iBAAiB,eAAe,SAAS,IAC9C,KAAK,iBAAiB,SAAS,SAAS;AAE7C,UAAM,UAAU,KAAK,iBAAiB,WAAW,WAAW,CAAC,CAAC;AAa9D,UAAM,gBAA+B,CAAC;AACtC,QAAI,QAAQ,SAAS;AACpB,oBAAc,UAAU,QAAQ;AAAA,IACjC,WAAW,YAAY,KAAK;AAC3B,oBAAc,UAAU,YAAY;AAAA,IACrC;AAGA,UAAM,cAGF,CAAC;AACL,QAAI,QAAQ,eAAgB,aAAY,kBAAkB,EAAE,MAAM,QAAQ,eAAe;AACzF,QAAI,QAAQ,mBAAmB,CAAC,OAAO,UAAU,MAAM,EAAE,SAAS,QAAQ,eAAe;AACxF,kBAAY,mBAAmB,QAAQ;AAExC,UAAM,QAAQ,IAAI,yBAAW;AAAA,MAC5B,cAAc,YAAY;AAAA,MAC1B;AAAA,MACA,GAAG;AAAA,MACH,SAAS,QAAQ,WAAW;AAAA,MAC5B,YAAY,QAAQ,cAAc;AAAA,MAClC;AAAA,MACA,WAAW,CAAC,IAAI,mCAAc,IAAI,CAAC;AAAA,MACnC;AAAA,MACA,qBAAiB,kEAA+B,MAAM,gDAA0B;AAAA,IACjF,CAAC;AAED,WAAO;AAAA,MACN,UAAU;AAAA,IACX;AAAA,EACD;AACD;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../../nodes/llms/LMChatOpenAi/methods/loadModels.ts"],"sourcesContent":["import type { ILoadOptionsFunctions, INodeListSearchResult } from 'n8n-workflow';\nimport OpenAI from 'openai';\n\nexport async function searchModels(\n\tthis: ILoadOptionsFunctions,\n\tfilter?: string,\n): Promise<INodeListSearchResult> {\n\tconst credentials = await this.getCredentials('openAiApi');\n\tconst baseURL =\n\t\t(this.getNodeParameter('options.baseURL', '') as string) ||\n\t\t(credentials.url as string) ||\n\t\t'https://api.openai.com/v1';\n\n\tconst openai = new OpenAI({ baseURL, apiKey: credentials.apiKey as string });\n\tconst { data: models = [] } = await openai.models.list();\n\n\tconst filteredModels = models.filter((model: { id: string }) => {\n\t\tconst url = baseURL && new URL(baseURL);\n\t\tconst isValidModel =\n\t\t\t(url && url.hostname !== 'api.openai.com') ||\n\t\t\tmodel.id.startsWith('ft:') ||\n\t\t\tmodel.id.startsWith('o1') ||\n\t\t\tmodel.id.startsWith('o3') ||\n\t\t\t(model.id.startsWith('gpt-') && !model.id.includes('instruct'));\n\n\t\tif (!filter) return isValidModel;\n\n\t\treturn isValidModel && model.id.toLowerCase().includes(filter.toLowerCase());\n\t});\n\n\tfilteredModels.sort((a, b) => a.id.localeCompare(b.id));\n\n\tconst results = {\n\t\tresults: filteredModels.map((model: { id: string }) => ({\n\t\t\tname: model.id,\n\t\t\tvalue: model.id,\n\t\t})),\n\t};\n\n\treturn results;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,oBAAmB;AAEnB,eAAsB,aAErB,QACiC;AACjC,QAAM,cAAc,MAAM,KAAK,eAAe,WAAW;AACzD,QAAM,UACJ,KAAK,iBAAiB,mBAAmB,EAAE,KAC3C,YAAY,OACb;AAED,QAAM,SAAS,IAAI,cAAAA,QAAO,EAAE,SAAS,QAAQ,YAAY,OAAiB,CAAC;AAC3E,QAAM,EAAE,MAAM,SAAS,CAAC,EAAE,IAAI,MAAM,OAAO,OAAO,KAAK;AAEvD,QAAM,iBAAiB,OAAO,OAAO,CAAC,UAA0B;AAC/D,UAAM,MAAM,WAAW,IAAI,IAAI,OAAO;AACtC,UAAM,eACJ,OAAO,IAAI,aAAa,oBACzB,MAAM,GAAG,WAAW,KAAK,KACzB,MAAM,GAAG,WAAW,IAAI,KACxB,MAAM,GAAG,WAAW,IAAI,KACvB,MAAM,GAAG,WAAW,MAAM,KAAK,CAAC,MAAM,GAAG,SAAS,UAAU;AAE9D,QAAI,CAAC,OAAQ,QAAO;AAEpB,WAAO,gBAAgB,MAAM,GAAG,YAAY,EAAE,SAAS,OAAO,YAAY,CAAC;AAAA,EAC5E,CAAC;AAED,iBAAe,KAAK,CAAC,GAAG,MAAM,EAAE,GAAG,cAAc,EAAE,EAAE,CAAC;AAEtD,QAAM,UAAU;AAAA,IACf,SAAS,eAAe,IAAI,CAAC,WAA2B;AAAA,MACvD,MAAM,MAAM;AAAA,MACZ,OAAO,MAAM;AAAA,IACd,EAAE;AAAA,EACH;AAEA,SAAO;AACR;","names":["OpenAI"]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../nodes/llms/LMCohere/LmCohere.node.ts"],"sourcesContent":["/* eslint-disable n8n-nodes-base/node-dirname-against-convention */\nimport { Cohere } from '@langchain/cohere';\nimport {\n\tNodeConnectionTypes,\n\ttype INodeType,\n\ttype INodeTypeDescription,\n\ttype ISupplyDataFunctions,\n\ttype SupplyData,\n} from 'n8n-workflow';\n\nimport { getConnectionHintNoticeField } from '@utils/sharedFields';\n\nimport { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';\nimport { N8nLlmTracing } from '../N8nLlmTracing';\n\nexport class LmCohere implements INodeType {\n\tdescription: INodeTypeDescription = {\n\t\tdisplayName: 'Cohere Model',\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-name-miscased\n\t\tname: 'lmCohere',\n\t\ticon: { light: 'file:cohere.svg', dark: 'file:cohere.dark.svg' },\n\t\tgroup: ['transform'],\n\t\tversion: 1,\n\t\tdescription: 'Language Model Cohere',\n\t\tdefaults: {\n\t\t\tname: 'Cohere Model',\n\t\t},\n\t\tcodex: {\n\t\t\tcategories: ['AI'],\n\t\t\tsubcategories: {\n\t\t\t\tAI: ['Language Models', 'Root Nodes'],\n\t\t\t\t'Language Models': ['Text Completion Models'],\n\t\t\t},\n\t\t\tresources: {\n\t\t\t\tprimaryDocumentation: [\n\t\t\t\t\t{\n\t\t\t\t\t\turl: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmcohere/',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t},\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node\n\t\tinputs: [],\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong\n\t\toutputs: [NodeConnectionTypes.AiLanguageModel],\n\t\toutputNames: ['Model'],\n\t\tcredentials: [\n\t\t\t{\n\t\t\t\tname: 'cohereApi',\n\t\t\t\trequired: true,\n\t\t\t},\n\t\t],\n\t\tproperties: [\n\t\t\tgetConnectionHintNoticeField([NodeConnectionTypes.AiChain, NodeConnectionTypes.AiAgent]),\n\t\t\t{\n\t\t\t\tdisplayName: 'Options',\n\t\t\t\tname: 'options',\n\t\t\t\tplaceholder: 'Add Option',\n\t\t\t\tdescription: 'Additional options to add',\n\t\t\t\ttype: 'collection',\n\t\t\t\tdefault: {},\n\t\t\t\toptions: [\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Maximum Number of Tokens',\n\t\t\t\t\t\tname: 'maxTokens',\n\t\t\t\t\t\tdefault: 250,\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t\ttypeOptions: {\n\t\t\t\t\t\t\tmaxValue: 32768,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Model',\n\t\t\t\t\t\tname: 'model',\n\t\t\t\t\t\ttype: 'string',\n\t\t\t\t\t\tdescription: 'The name of the model to use',\n\t\t\t\t\t\tdefault: '',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Sampling Temperature',\n\t\t\t\t\t\tname: 'temperature',\n\t\t\t\t\t\tdefault: 0,\n\t\t\t\t\t\ttypeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t],\n\t};\n\n\tasync supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {\n\t\tconst credentials = await this.getCredentials('cohereApi');\n\n\t\tconst options = this.getNodeParameter('options', itemIndex, {}) as object;\n\n\t\tconst model = new Cohere({\n\t\t\tapiKey: credentials.apiKey as string,\n\t\t\t...options,\n\t\t\tcallbacks: [new N8nLlmTracing(this)],\n\t\t\tonFailedAttempt: makeN8nLlmFailedAttemptHandler(this),\n\t\t});\n\n\t\treturn {\n\t\t\tresponse: model,\n\t\t};\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,oBAAuB;AACvB,0BAMO;AAEP,0BAA6C;AAE7C,wCAA+C;AAC/C,2BAA8B;AAEvB,MAAM,SAA8B;AAAA,EAApC;AACN,uBAAoC;AAAA,MACnC,aAAa;AAAA;AAAA,MAEb,MAAM;AAAA,MACN,MAAM,EAAE,OAAO,mBAAmB,MAAM,uBAAuB;AAAA,MAC/D,OAAO,CAAC,WAAW;AAAA,MACnB,SAAS;AAAA,MACT,aAAa;AAAA,MACb,UAAU;AAAA,QACT,MAAM;AAAA,MACP;AAAA,MACA,OAAO;AAAA,QACN,YAAY,CAAC,IAAI;AAAA,QACjB,eAAe;AAAA,UACd,IAAI,CAAC,mBAAmB,YAAY;AAAA,UACpC,mBAAmB,CAAC,wBAAwB;AAAA,QAC7C;AAAA,QACA,WAAW;AAAA,UACV,sBAAsB;AAAA,YACrB;AAAA,cACC,KAAK;AAAA,YACN;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA;AAAA,MAEA,QAAQ,CAAC;AAAA;AAAA,MAET,SAAS,CAAC,wCAAoB,eAAe;AAAA,MAC7C,aAAa,CAAC,OAAO;AAAA,MACrB,aAAa;AAAA,QACZ;AAAA,UACC,MAAM;AAAA,UACN,UAAU;AAAA,QACX;AAAA,MACD;AAAA,MACA,YAAY;AAAA,YACX,kDAA6B,CAAC,wCAAoB,SAAS,wCAAoB,OAAO,CAAC;AAAA,QACvF;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,aAAa;AAAA,UACb,aAAa;AAAA,UACb,MAAM;AAAA,UACN,SAAS,CAAC;AAAA,UACV,SAAS;AAAA,YACR;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aACC;AAAA,cACD,MAAM;AAAA,cACN,aAAa;AAAA,gBACZ,UAAU;AAAA,cACX;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,MAAM;AAAA,cACN,aAAa;AAAA,cACb,SAAS;AAAA,YACV;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,GAAG,UAAU,GAAG,iBAAiB,EAAE;AAAA,cAC5D,aACC;AAAA,cACD,MAAM;AAAA,YACP;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAAA;AAAA,EAEA,MAAM,WAAuC,WAAwC;AACpF,UAAM,cAAc,MAAM,KAAK,eAAe,WAAW;AAEzD,UAAM,UAAU,KAAK,iBAAiB,WAAW,WAAW,CAAC,CAAC;AAE9D,UAAM,QAAQ,IAAI,qBAAO;AAAA,MACxB,QAAQ,YAAY;AAAA,MACpB,GAAG;AAAA,MACH,WAAW,CAAC,IAAI,mCAAc,IAAI,CAAC;AAAA,MACnC,qBAAiB,kEAA+B,IAAI;AAAA,IACrD,CAAC;AAED,WAAO;AAAA,MACN,UAAU;AAAA,IACX;AAAA,EACD;AACD;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../nodes/llms/LMOllama/LmOllama.node.ts"],"sourcesContent":["/* eslint-disable n8n-nodes-base/node-dirname-against-convention */\n\nimport { Ollama } from '@langchain/community/llms/ollama';\nimport {\n\tNodeConnectionTypes,\n\ttype INodeType,\n\ttype INodeTypeDescription,\n\ttype ISupplyDataFunctions,\n\ttype SupplyData,\n} from 'n8n-workflow';\n\nimport { getConnectionHintNoticeField } from '@utils/sharedFields';\n\nimport { ollamaDescription, ollamaModel, ollamaOptions } from './description';\nimport { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';\nimport { N8nLlmTracing } from '../N8nLlmTracing';\n\nexport class LmOllama implements INodeType {\n\tdescription: INodeTypeDescription = {\n\t\tdisplayName: 'Ollama Model',\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-name-miscased\n\t\tname: 'lmOllama',\n\t\ticon: 'file:ollama.svg',\n\t\tgroup: ['transform'],\n\t\tversion: 1,\n\t\tdescription: 'Language Model Ollama',\n\t\tdefaults: {\n\t\t\tname: 'Ollama Model',\n\t\t},\n\t\tcodex: {\n\t\t\tcategories: ['AI'],\n\t\t\tsubcategories: {\n\t\t\t\tAI: ['Language Models', 'Root Nodes'],\n\t\t\t\t'Language Models': ['Text Completion Models'],\n\t\t\t},\n\t\t\tresources: {\n\t\t\t\tprimaryDocumentation: [\n\t\t\t\t\t{\n\t\t\t\t\t\turl: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmollama/',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t},\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node\n\t\tinputs: [],\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong\n\t\toutputs: [NodeConnectionTypes.AiLanguageModel],\n\t\toutputNames: ['Model'],\n\t\t...ollamaDescription,\n\t\tproperties: [\n\t\t\tgetConnectionHintNoticeField([NodeConnectionTypes.AiChain, NodeConnectionTypes.AiAgent]),\n\t\t\tollamaModel,\n\t\t\tollamaOptions,\n\t\t],\n\t};\n\n\tasync supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {\n\t\tconst credentials = await this.getCredentials('ollamaApi');\n\n\t\tconst modelName = this.getNodeParameter('model', itemIndex) as string;\n\t\tconst options = this.getNodeParameter('options', itemIndex, {}) as object;\n\n\t\tconst model = new Ollama({\n\t\t\tbaseUrl: credentials.baseUrl as string,\n\t\t\tmodel: modelName,\n\t\t\t...options,\n\t\t\tcallbacks: [new N8nLlmTracing(this)],\n\t\t\tonFailedAttempt: makeN8nLlmFailedAttemptHandler(this),\n\t\t});\n\n\t\treturn {\n\t\t\tresponse: model,\n\t\t};\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,oBAAuB;AACvB,0BAMO;AAEP,0BAA6C;AAE7C,yBAA8D;AAC9D,wCAA+C;AAC/C,2BAA8B;AAEvB,MAAM,SAA8B;AAAA,EAApC;AACN,uBAAoC;AAAA,MACnC,aAAa;AAAA;AAAA,MAEb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,OAAO,CAAC,WAAW;AAAA,MACnB,SAAS;AAAA,MACT,aAAa;AAAA,MACb,UAAU;AAAA,QACT,MAAM;AAAA,MACP;AAAA,MACA,OAAO;AAAA,QACN,YAAY,CAAC,IAAI;AAAA,QACjB,eAAe;AAAA,UACd,IAAI,CAAC,mBAAmB,YAAY;AAAA,UACpC,mBAAmB,CAAC,wBAAwB;AAAA,QAC7C;AAAA,QACA,WAAW;AAAA,UACV,sBAAsB;AAAA,YACrB;AAAA,cACC,KAAK;AAAA,YACN;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA;AAAA,MAEA,QAAQ,CAAC;AAAA;AAAA,MAET,SAAS,CAAC,wCAAoB,eAAe;AAAA,MAC7C,aAAa,CAAC,OAAO;AAAA,MACrB,GAAG;AAAA,MACH,YAAY;AAAA,YACX,kDAA6B,CAAC,wCAAoB,SAAS,wCAAoB,OAAO,CAAC;AAAA,QACvF;AAAA,QACA;AAAA,MACD;AAAA,IACD;AAAA;AAAA,EAEA,MAAM,WAAuC,WAAwC;AACpF,UAAM,cAAc,MAAM,KAAK,eAAe,WAAW;AAEzD,UAAM,YAAY,KAAK,iBAAiB,SAAS,SAAS;AAC1D,UAAM,UAAU,KAAK,iBAAiB,WAAW,WAAW,CAAC,CAAC;AAE9D,UAAM,QAAQ,IAAI,qBAAO;AAAA,MACxB,SAAS,YAAY;AAAA,MACrB,OAAO;AAAA,MACP,GAAG;AAAA,MACH,WAAW,CAAC,IAAI,mCAAc,IAAI,CAAC;AAAA,MACnC,qBAAiB,kEAA+B,IAAI;AAAA,IACrD,CAAC;AAED,WAAO;AAAA,MACN,UAAU;AAAA,IACX;AAAA,EACD;AACD;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../nodes/llms/LMOllama/description.ts"],"sourcesContent":["import type { INodeProperties, INodeTypeDescription } from 'n8n-workflow';\n\nexport const ollamaDescription: Partial<INodeTypeDescription> = {\n\tcredentials: [\n\t\t{\n\t\t\tname: 'ollamaApi',\n\t\t\trequired: true,\n\t\t},\n\t],\n\trequestDefaults: {\n\t\tignoreHttpStatusErrors: true,\n\t\tbaseURL: '={{ $credentials.baseUrl.replace(new RegExp(\"/$\"), \"\") }}',\n\t},\n};\n\nexport const ollamaModel: INodeProperties = {\n\tdisplayName: 'Model',\n\tname: 'model',\n\ttype: 'options',\n\tdefault: 'llama3.2',\n\tdescription:\n\t\t'The model which will generate the completion. To download models, visit <a href=\"https://ollama.ai/library\">Ollama Models Library</a>.',\n\ttypeOptions: {\n\t\tloadOptions: {\n\t\t\trouting: {\n\t\t\t\trequest: {\n\t\t\t\t\tmethod: 'GET',\n\t\t\t\t\turl: '/api/tags',\n\t\t\t\t},\n\t\t\t\toutput: {\n\t\t\t\t\tpostReceive: [\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\ttype: 'rootProperty',\n\t\t\t\t\t\t\tproperties: {\n\t\t\t\t\t\t\t\tproperty: 'models',\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\ttype: 'setKeyValue',\n\t\t\t\t\t\t\tproperties: {\n\t\t\t\t\t\t\t\tname: '={{$responseItem.name}}',\n\t\t\t\t\t\t\t\tvalue: '={{$responseItem.name}}',\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\ttype: 'sort',\n\t\t\t\t\t\t\tproperties: {\n\t\t\t\t\t\t\t\tkey: 'name',\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t],\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t},\n\trouting: {\n\t\tsend: {\n\t\t\ttype: 'body',\n\t\t\tproperty: 'model',\n\t\t},\n\t},\n\trequired: true,\n};\n\nexport const ollamaOptions: INodeProperties = {\n\tdisplayName: 'Options',\n\tname: 'options',\n\tplaceholder: 'Add Option',\n\tdescription: 'Additional options to add',\n\ttype: 'collection',\n\tdefault: {},\n\toptions: [\n\t\t{\n\t\t\tdisplayName: 'Sampling Temperature',\n\t\t\tname: 'temperature',\n\t\t\tdefault: 0.7,\n\t\t\ttypeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },\n\t\t\tdescription:\n\t\t\t\t'Controls the randomness of the generated text. Lower values make the output more focused and deterministic, while higher values make it more diverse and random.',\n\t\t\ttype: 'number',\n\t\t},\n\t\t{\n\t\t\tdisplayName: 'Top K',\n\t\t\tname: 'topK',\n\t\t\tdefault: -1,\n\t\t\ttypeOptions: { maxValue: 100, minValue: -1, numberPrecision: 1 },\n\t\t\tdescription:\n\t\t\t\t'Limits the number of highest probability vocabulary tokens to consider at each step. A higher value increases diversity but may reduce coherence. Set to -1 to disable.',\n\t\t\ttype: 'number',\n\t\t},\n\t\t{\n\t\t\tdisplayName: 'Top P',\n\t\t\tname: 'topP',\n\t\t\tdefault: 1,\n\t\t\ttypeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },\n\t\t\tdescription:\n\t\t\t\t'Chooses from the smallest possible set of tokens whose cumulative probability exceeds the probability top_p. Helps generate more human-like text by reducing repetitions.',\n\t\t\ttype: 'number',\n\t\t},\n\t\t{\n\t\t\tdisplayName: 'Frequency Penalty',\n\t\t\tname: 'frequencyPenalty',\n\t\t\ttype: 'number',\n\t\t\tdefault: 0.0,\n\t\t\ttypeOptions: { minValue: 0 },\n\t\t\tdescription:\n\t\t\t\t'Adjusts the penalty for tokens that have already appeared in the generated text. Higher values discourage repetition.',\n\t\t},\n\t\t{\n\t\t\tdisplayName: 'Keep Alive',\n\t\t\tname: 'keepAlive',\n\t\t\ttype: 'string',\n\t\t\tdefault: '5m',\n\t\t\tdescription:\n\t\t\t\t'Specifies the duration to keep the loaded model in memory after use. Useful for frequently used models. Format: 1h30m (1 hour 30 minutes).',\n\t\t},\n\t\t{\n\t\t\tdisplayName: 'Low VRAM Mode',\n\t\t\tname: 'lowVram',\n\t\t\ttype: 'boolean',\n\t\t\tdefault: false,\n\t\t\tdescription:\n\t\t\t\t'Whether to Activate low VRAM mode, which reduces memory usage at the cost of slower generation speed. Useful for GPUs with limited memory.',\n\t\t},\n\t\t{\n\t\t\tdisplayName: 'Main GPU ID',\n\t\t\tname: 'mainGpu',\n\t\t\ttype: 'number',\n\t\t\tdefault: 0,\n\t\t\tdescription:\n\t\t\t\t'Specifies the ID of the GPU to use for the main computation. Only change this if you have multiple GPUs.',\n\t\t},\n\t\t{\n\t\t\tdisplayName: 'Context Batch Size',\n\t\t\tname: 'numBatch',\n\t\t\ttype: 'number',\n\t\t\tdefault: 512,\n\t\t\tdescription:\n\t\t\t\t'Sets the batch size for prompt processing. Larger batch sizes may improve generation speed but increase memory usage.',\n\t\t},\n\t\t{\n\t\t\tdisplayName: 'Context Length',\n\t\t\tname: 'numCtx',\n\t\t\ttype: 'number',\n\t\t\tdefault: 2048,\n\t\t\tdescription:\n\t\t\t\t'The maximum number of tokens to use as context for generating the next token. Smaller values reduce memory usage, while larger values provide more context to the model.',\n\t\t},\n\t\t{\n\t\t\tdisplayName: 'Number of GPUs',\n\t\t\tname: 'numGpu',\n\t\t\ttype: 'number',\n\t\t\tdefault: -1,\n\t\t\tdescription:\n\t\t\t\t'Specifies the number of GPUs to use for parallel processing. Set to -1 for auto-detection.',\n\t\t},\n\t\t{\n\t\t\tdisplayName: 'Max Tokens to Generate',\n\t\t\tname: 'numPredict',\n\t\t\ttype: 'number',\n\t\t\tdefault: -1,\n\t\t\tdescription:\n\t\t\t\t'The maximum number of tokens to generate. Set to -1 for no limit. Be cautious when setting this to a large value, as it can lead to very long outputs.',\n\t\t},\n\t\t{\n\t\t\tdisplayName: 'Number of CPU Threads',\n\t\t\tname: 'numThread',\n\t\t\ttype: 'number',\n\t\t\tdefault: 0,\n\t\t\tdescription:\n\t\t\t\t'Specifies the number of CPU threads to use for processing. Set to 0 for auto-detection.',\n\t\t},\n\t\t{\n\t\t\tdisplayName: 'Penalize Newlines',\n\t\t\tname: 'penalizeNewline',\n\t\t\ttype: 'boolean',\n\t\t\tdefault: true,\n\t\t\tdescription:\n\t\t\t\t'Whether the model will be less likely to generate newline characters, encouraging longer continuous sequences of text',\n\t\t},\n\t\t{\n\t\t\tdisplayName: 'Presence Penalty',\n\t\t\tname: 'presencePenalty',\n\t\t\ttype: 'number',\n\t\t\tdefault: 0.0,\n\t\t\tdescription:\n\t\t\t\t'Adjusts the penalty for tokens based on their presence in the generated text so far. Positive values penalize tokens that have already appeared, encouraging diversity.',\n\t\t},\n\t\t{\n\t\t\tdisplayName: 'Repetition Penalty',\n\t\t\tname: 'repeatPenalty',\n\t\t\ttype: 'number',\n\t\t\tdefault: 1.0,\n\t\t\tdescription:\n\t\t\t\t'Adjusts the penalty factor for repeated tokens. Higher values more strongly discourage repetition. Set to 1.0 to disable repetition penalty.',\n\t\t},\n\t\t{\n\t\t\tdisplayName: 'Use Memory Locking',\n\t\t\tname: 'useMLock',\n\t\t\ttype: 'boolean',\n\t\t\tdefault: false,\n\t\t\tdescription:\n\t\t\t\t'Whether to lock the model in memory to prevent swapping. This can improve performance but requires sufficient available memory.',\n\t\t},\n\t\t{\n\t\t\tdisplayName: 'Use Memory Mapping',\n\t\t\tname: 'useMMap',\n\t\t\ttype: 'boolean',\n\t\t\tdefault: true,\n\t\t\tdescription:\n\t\t\t\t'Whether to use memory mapping for loading the model. This can reduce memory usage but may impact performance. Recommended to keep enabled.',\n\t\t},\n\t\t{\n\t\t\tdisplayName: 'Load Vocabulary Only',\n\t\t\tname: 'vocabOnly',\n\t\t\ttype: 'boolean',\n\t\t\tdefault: false,\n\t\t\tdescription:\n\t\t\t\t'Whether to only load the model vocabulary without the weights. Useful for quickly testing tokenization.',\n\t\t},\n\t\t{\n\t\t\tdisplayName: 'Output Format',\n\t\t\tname: 'format',\n\t\t\ttype: 'options',\n\t\t\toptions: [\n\t\t\t\t{ name: 'Default', value: 'default' },\n\t\t\t\t{ name: 'JSON', value: 'json' },\n\t\t\t],\n\t\t\tdefault: 'default',\n\t\t\tdescription: 'Specifies the format of the API response',\n\t\t},\n\t],\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAEO,MAAM,oBAAmD;AAAA,EAC/D,aAAa;AAAA,IACZ;AAAA,MACC,MAAM;AAAA,MACN,UAAU;AAAA,IACX;AAAA,EACD;AAAA,EACA,iBAAiB;AAAA,IAChB,wBAAwB;AAAA,IACxB,SAAS;AAAA,EACV;AACD;AAEO,MAAM,cAA+B;AAAA,EAC3C,aAAa;AAAA,EACb,MAAM;AAAA,EACN,MAAM;AAAA,EACN,SAAS;AAAA,EACT,aACC;AAAA,EACD,aAAa;AAAA,IACZ,aAAa;AAAA,MACZ,SAAS;AAAA,QACR,SAAS;AAAA,UACR,QAAQ;AAAA,UACR,KAAK;AAAA,QACN;AAAA,QACA,QAAQ;AAAA,UACP,aAAa;AAAA,YACZ;AAAA,cACC,MAAM;AAAA,cACN,YAAY;AAAA,gBACX,UAAU;AAAA,cACX;AAAA,YACD;AAAA,YACA;AAAA,cACC,MAAM;AAAA,cACN,YAAY;AAAA,gBACX,MAAM;AAAA,gBACN,OAAO;AAAA,cACR;AAAA,YACD;AAAA,YACA;AAAA,cACC,MAAM;AAAA,cACN,YAAY;AAAA,gBACX,KAAK;AAAA,cACN;AAAA,YACD;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAAA,EACD;AAAA,EACA,SAAS;AAAA,IACR,MAAM;AAAA,MACL,MAAM;AAAA,MACN,UAAU;AAAA,IACX;AAAA,EACD;AAAA,EACA,UAAU;AACX;AAEO,MAAM,gBAAiC;AAAA,EAC7C,aAAa;AAAA,EACb,MAAM;AAAA,EACN,aAAa;AAAA,EACb,aAAa;AAAA,EACb,MAAM;AAAA,EACN,SAAS,CAAC;AAAA,EACV,SAAS;AAAA,IACR;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aAAa,EAAE,UAAU,GAAG,UAAU,GAAG,iBAAiB,EAAE;AAAA,MAC5D,aACC;AAAA,MACD,MAAM;AAAA,IACP;AAAA,IACA;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aAAa,EAAE,UAAU,KAAK,UAAU,IAAI,iBAAiB,EAAE;AAAA,MAC/D,aACC;AAAA,MACD,MAAM;AAAA,IACP;AAAA,IACA;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aAAa,EAAE,UAAU,GAAG,UAAU,GAAG,iBAAiB,EAAE;AAAA,MAC5D,aACC;AAAA,MACD,MAAM;AAAA,IACP;AAAA,IACA;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aAAa,EAAE,UAAU,EAAE;AAAA,MAC3B,aACC;AAAA,IACF;AAAA,IACA;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aACC;AAAA,IACF;AAAA,IACA;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aACC;AAAA,IACF;AAAA,IACA;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aACC;AAAA,IACF;AAAA,IACA;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aACC;AAAA,IACF;AAAA,IACA;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aACC;AAAA,IACF;AAAA,IACA;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aACC;AAAA,IACF;AAAA,IACA;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aACC;AAAA,IACF;AAAA,IACA;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aACC;AAAA,IACF;AAAA,IACA;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aACC;AAAA,IACF;AAAA,IACA;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aACC;AAAA,IACF;AAAA,IACA;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aACC;AAAA,IACF;AAAA,IACA;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aACC;AAAA,IACF;AAAA,IACA;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aACC;AAAA,IACF;AAAA,IACA;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aACC;AAAA,IACF;AAAA,IACA;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,QACR,EAAE,MAAM,WAAW,OAAO,UAAU;AAAA,QACpC,EAAE,MAAM,QAAQ,OAAO,OAAO;AAAA,MAC/B;AAAA,MACA,SAAS;AAAA,MACT,aAAa;AAAA,IACd;AAAA,EACD;AACD;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../nodes/llms/LMOpenAi/LmOpenAi.node.ts"],"sourcesContent":["/* eslint-disable n8n-nodes-base/node-dirname-against-convention */\nimport { OpenAI, type ClientOptions } from '@langchain/openai';\nimport { NodeConnectionTypes } from 'n8n-workflow';\nimport type {\n\tINodeType,\n\tINodeTypeDescription,\n\tISupplyDataFunctions,\n\tSupplyData,\n\tILoadOptionsFunctions,\n} from 'n8n-workflow';\n\nimport { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';\nimport { N8nLlmTracing } from '../N8nLlmTracing';\n\ntype LmOpenAiOptions = {\n\tbaseURL?: string;\n\tfrequencyPenalty?: number;\n\tmaxTokens?: number;\n\tpresencePenalty?: number;\n\ttemperature?: number;\n\ttimeout?: number;\n\tmaxRetries?: number;\n\ttopP?: number;\n};\n\nexport class LmOpenAi implements INodeType {\n\tdescription: INodeTypeDescription = {\n\t\tdisplayName: 'OpenAI Model',\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-name-miscased\n\t\tname: 'lmOpenAi',\n\t\thidden: true,\n\t\ticon: { light: 'file:openAiLight.svg', dark: 'file:openAiLight.dark.svg' },\n\t\tgroup: ['transform'],\n\t\tversion: 1,\n\t\tdescription: 'For advanced usage with an AI chain',\n\t\tdefaults: {\n\t\t\tname: 'OpenAI Model',\n\t\t},\n\t\tcodex: {\n\t\t\tcategories: ['AI'],\n\t\t\tsubcategories: {\n\t\t\t\tAI: ['Language Models', 'Root Nodes'],\n\t\t\t\t'Language Models': ['Text Completion Models'],\n\t\t\t},\n\t\t\tresources: {\n\t\t\t\tprimaryDocumentation: [\n\t\t\t\t\t{\n\t\t\t\t\t\turl: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmopenai/',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t},\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node\n\t\tinputs: [],\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong\n\t\toutputs: [NodeConnectionTypes.AiLanguageModel],\n\t\toutputNames: ['Model'],\n\t\tcredentials: [\n\t\t\t{\n\t\t\t\tname: 'openAiApi',\n\t\t\t\trequired: true,\n\t\t\t},\n\t\t],\n\t\trequestDefaults: {\n\t\t\tignoreHttpStatusErrors: true,\n\t\t\tbaseURL:\n\t\t\t\t'={{ $parameter.options?.baseURL?.split(\"/\").slice(0,-1).join(\"/\") || \"https://api.openai.com\" }}',\n\t\t},\n\t\tproperties: [\n\t\t\t{\n\t\t\t\tdisplayName:\n\t\t\t\t\t'This node is using OpenAI completions which are now deprecated. Please use the OpenAI Chat Model node instead.',\n\t\t\t\tname: 'deprecated',\n\t\t\t\ttype: 'notice',\n\t\t\t\tdefault: '',\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Model',\n\t\t\t\tname: 'model',\n\t\t\t\ttype: 'resourceLocator',\n\t\t\t\tdefault: { mode: 'list', value: 'gpt-3.5-turbo-instruct' },\n\t\t\t\trequired: true,\n\t\t\t\tdescription:\n\t\t\t\t\t'The model which will generate the completion. <a href=\"https://beta.openai.com/docs/models/overview\">Learn more</a>.',\n\t\t\t\tmodes: [\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'From List',\n\t\t\t\t\t\tname: 'list',\n\t\t\t\t\t\ttype: 'list',\n\t\t\t\t\t\ttypeOptions: {\n\t\t\t\t\t\t\tsearchListMethod: 'openAiModelSearch',\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'ID',\n\t\t\t\t\t\tname: 'id',\n\t\t\t\t\t\ttype: 'string',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t\trouting: {\n\t\t\t\t\tsend: {\n\t\t\t\t\t\ttype: 'body',\n\t\t\t\t\t\tproperty: 'model',\n\t\t\t\t\t\tvalue: '={{$parameter.model.value}}',\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName:\n\t\t\t\t\t'When using non OpenAI models via Base URL override, not all models might be chat-compatible or support other features, like tools calling or JSON response format.',\n\t\t\t\tname: 'notice',\n\t\t\t\ttype: 'notice',\n\t\t\t\tdefault: '',\n\t\t\t\tdisplayOptions: {\n\t\t\t\t\tshow: {\n\t\t\t\t\t\t'/options.baseURL': [{ _cnd: { exists: true } }],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Options',\n\t\t\t\tname: 'options',\n\t\t\t\tplaceholder: 'Add Option',\n\t\t\t\tdescription: 'Additional options to add',\n\t\t\t\ttype: 'collection',\n\t\t\t\tdefault: {},\n\t\t\t\toptions: [\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Base URL',\n\t\t\t\t\t\tname: 'baseURL',\n\t\t\t\t\t\tdefault: 'https://api.openai.com/v1',\n\t\t\t\t\t\tdescription: 'Override the default base URL for the API',\n\t\t\t\t\t\ttype: 'string',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Frequency Penalty',\n\t\t\t\t\t\tname: 'frequencyPenalty',\n\t\t\t\t\t\tdefault: 0,\n\t\t\t\t\t\ttypeOptions: { maxValue: 2, minValue: -2, numberPrecision: 1 },\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t\"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim\",\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Maximum Number of Tokens',\n\t\t\t\t\t\tname: 'maxTokens',\n\t\t\t\t\t\tdefault: -1,\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t\ttypeOptions: {\n\t\t\t\t\t\t\tmaxValue: 32768,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Presence Penalty',\n\t\t\t\t\t\tname: 'presencePenalty',\n\t\t\t\t\t\tdefault: 0,\n\t\t\t\t\t\ttypeOptions: { maxValue: 2, minValue: -2, numberPrecision: 1 },\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t\"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics\",\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Sampling Temperature',\n\t\t\t\t\t\tname: 'temperature',\n\t\t\t\t\t\tdefault: 0.7,\n\t\t\t\t\t\ttypeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Timeout',\n\t\t\t\t\t\tname: 'timeout',\n\t\t\t\t\t\tdefault: 60000,\n\t\t\t\t\t\tdescription: 'Maximum amount of time a request is allowed to take in milliseconds',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Max Retries',\n\t\t\t\t\t\tname: 'maxRetries',\n\t\t\t\t\t\tdefault: 2,\n\t\t\t\t\t\tdescription: 'Maximum number of retries to attempt',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Top P',\n\t\t\t\t\t\tname: 'topP',\n\t\t\t\t\t\tdefault: 1,\n\t\t\t\t\t\ttypeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t],\n\t};\n\n\tmethods = {\n\t\tlistSearch: {\n\t\t\tasync openAiModelSearch(this: ILoadOptionsFunctions) {\n\t\t\t\tconst results = [];\n\n\t\t\t\tconst options = this.getNodeParameter('options', {}) as LmOpenAiOptions;\n\n\t\t\t\tlet uri = 'https://api.openai.com/v1/models';\n\n\t\t\t\tif (options.baseURL) {\n\t\t\t\t\turi = `${options.baseURL}/models`;\n\t\t\t\t}\n\n\t\t\t\tconst { data } = (await this.helpers.requestWithAuthentication.call(this, 'openAiApi', {\n\t\t\t\t\tmethod: 'GET',\n\t\t\t\t\turi,\n\t\t\t\t\tjson: true,\n\t\t\t\t})) as { data: Array<{ owned_by: string; id: string }> };\n\n\t\t\t\tfor (const model of data) {\n\t\t\t\t\tif (!options.baseURL && !model.owned_by?.startsWith('system')) continue;\n\t\t\t\t\tresults.push({\n\t\t\t\t\t\tname: model.id,\n\t\t\t\t\t\tvalue: model.id,\n\t\t\t\t\t});\n\t\t\t\t}\n\n\t\t\t\treturn { results };\n\t\t\t},\n\t\t},\n\t};\n\n\tasync supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {\n\t\tconst credentials = await this.getCredentials('openAiApi');\n\n\t\tconst modelName = this.getNodeParameter('model', itemIndex, '', {\n\t\t\textractValue: true,\n\t\t}) as string;\n\n\t\tconst options = this.getNodeParameter('options', itemIndex, {}) as {\n\t\t\tbaseURL?: string;\n\t\t\tfrequencyPenalty?: number;\n\t\t\tmaxTokens?: number;\n\t\t\tpresencePenalty?: number;\n\t\t\ttemperature?: number;\n\t\t\ttimeout?: number;\n\t\t\tmaxRetries?: number;\n\t\t\ttopP?: number;\n\t\t};\n\n\t\tconst configuration: ClientOptions = {};\n\t\tif (options.baseURL) {\n\t\t\tconfiguration.baseURL = options.baseURL;\n\t\t}\n\n\t\tconst model = new OpenAI({\n\t\t\topenAIApiKey: credentials.apiKey as string,\n\t\t\tmodelName,\n\t\t\t...options,\n\t\t\tconfiguration,\n\t\t\ttimeout: options.timeout ?? 60000,\n\t\t\tmaxRetries: options.maxRetries ?? 2,\n\t\t\tcallbacks: [new N8nLlmTracing(this)],\n\t\t\tonFailedAttempt: makeN8nLlmFailedAttemptHandler(this),\n\t\t});\n\n\t\treturn {\n\t\t\tresponse: model,\n\t\t};\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,oBAA2C;AAC3C,0BAAoC;AASpC,wCAA+C;AAC/C,2BAA8B;AAavB,MAAM,SAA8B;AAAA,EAApC;AACN,uBAAoC;AAAA,MACnC,aAAa;AAAA;AAAA,MAEb,MAAM;AAAA,MACN,QAAQ;AAAA,MACR,MAAM,EAAE,OAAO,wBAAwB,MAAM,4BAA4B;AAAA,MACzE,OAAO,CAAC,WAAW;AAAA,MACnB,SAAS;AAAA,MACT,aAAa;AAAA,MACb,UAAU;AAAA,QACT,MAAM;AAAA,MACP;AAAA,MACA,OAAO;AAAA,QACN,YAAY,CAAC,IAAI;AAAA,QACjB,eAAe;AAAA,UACd,IAAI,CAAC,mBAAmB,YAAY;AAAA,UACpC,mBAAmB,CAAC,wBAAwB;AAAA,QAC7C;AAAA,QACA,WAAW;AAAA,UACV,sBAAsB;AAAA,YACrB;AAAA,cACC,KAAK;AAAA,YACN;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA;AAAA,MAEA,QAAQ,CAAC;AAAA;AAAA,MAET,SAAS,CAAC,wCAAoB,eAAe;AAAA,MAC7C,aAAa,CAAC,OAAO;AAAA,MACrB,aAAa;AAAA,QACZ;AAAA,UACC,MAAM;AAAA,UACN,UAAU;AAAA,QACX;AAAA,MACD;AAAA,MACA,iBAAiB;AAAA,QAChB,wBAAwB;AAAA,QACxB,SACC;AAAA,MACF;AAAA,MACA,YAAY;AAAA,QACX;AAAA,UACC,aACC;AAAA,UACD,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,QACV;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS,EAAE,MAAM,QAAQ,OAAO,yBAAyB;AAAA,UACzD,UAAU;AAAA,UACV,aACC;AAAA,UACD,OAAO;AAAA,YACN;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,MAAM;AAAA,cACN,aAAa;AAAA,gBACZ,kBAAkB;AAAA,cACnB;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,MAAM;AAAA,YACP;AAAA,UACD;AAAA,UACA,SAAS;AAAA,YACR,MAAM;AAAA,cACL,MAAM;AAAA,cACN,UAAU;AAAA,cACV,OAAO;AAAA,YACR;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,aACC;AAAA,UACD,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,UACT,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,oBAAoB,CAAC,EAAE,MAAM,EAAE,QAAQ,KAAK,EAAE,CAAC;AAAA,YAChD;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,aAAa;AAAA,UACb,aAAa;AAAA,UACb,MAAM;AAAA,UACN,SAAS,CAAC;AAAA,UACV,SAAS;AAAA,YACR;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,cACb,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,GAAG,UAAU,IAAI,iBAAiB,EAAE;AAAA,cAC7D,aACC;AAAA,cACD,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aACC;AAAA,cACD,MAAM;AAAA,cACN,aAAa;AAAA,gBACZ,UAAU;AAAA,cACX;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,GAAG,UAAU,IAAI,iBAAiB,EAAE;AAAA,cAC7D,aACC;AAAA,cACD,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,GAAG,UAAU,GAAG,iBAAiB,EAAE;AAAA,cAC5D,aACC;AAAA,cACD,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,cACb,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,cACb,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,GAAG,UAAU,GAAG,iBAAiB,EAAE;AAAA,cAC5D,aACC;AAAA,cACD,MAAM;AAAA,YACP;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAEA,mBAAU;AAAA,MACT,YAAY;AAAA,QACX,MAAM,oBAA+C;AACpD,gBAAM,UAAU,CAAC;AAEjB,gBAAM,UAAU,KAAK,iBAAiB,WAAW,CAAC,CAAC;AAEnD,cAAI,MAAM;AAEV,cAAI,QAAQ,SAAS;AACpB,kBAAM,GAAG,QAAQ,OAAO;AAAA,UACzB;AAEA,gBAAM,EAAE,KAAK,IAAK,MAAM,KAAK,QAAQ,0BAA0B,KAAK,MAAM,aAAa;AAAA,YACtF,QAAQ;AAAA,YACR;AAAA,YACA,MAAM;AAAA,UACP,CAAC;AAED,qBAAW,SAAS,MAAM;AACzB,gBAAI,CAAC,QAAQ,WAAW,CAAC,MAAM,UAAU,WAAW,QAAQ,EAAG;AAC/D,oBAAQ,KAAK;AAAA,cACZ,MAAM,MAAM;AAAA,cACZ,OAAO,MAAM;AAAA,YACd,CAAC;AAAA,UACF;AAEA,iBAAO,EAAE,QAAQ;AAAA,QAClB;AAAA,MACD;AAAA,IACD;AAAA;AAAA,EAEA,MAAM,WAAuC,WAAwC;AACpF,UAAM,cAAc,MAAM,KAAK,eAAe,WAAW;AAEzD,UAAM,YAAY,KAAK,iBAAiB,SAAS,WAAW,IAAI;AAAA,MAC/D,cAAc;AAAA,IACf,CAAC;AAED,UAAM,UAAU,KAAK,iBAAiB,WAAW,WAAW,CAAC,CAAC;AAW9D,UAAM,gBAA+B,CAAC;AACtC,QAAI,QAAQ,SAAS;AACpB,oBAAc,UAAU,QAAQ;AAAA,IACjC;AAEA,UAAM,QAAQ,IAAI,qBAAO;AAAA,MACxB,cAAc,YAAY;AAAA,MAC1B;AAAA,MACA,GAAG;AAAA,MACH;AAAA,MACA,SAAS,QAAQ,WAAW;AAAA,MAC5B,YAAY,QAAQ,cAAc;AAAA,MAClC,WAAW,CAAC,IAAI,mCAAc,IAAI,CAAC;AAAA,MACnC,qBAAiB,kEAA+B,IAAI;AAAA,IACrD,CAAC;AAED,WAAO;AAAA,MACN,UAAU;AAAA,IACX;AAAA,EACD;AACD;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../nodes/llms/LMOpenHuggingFaceInference/LmOpenHuggingFaceInference.node.ts"],"sourcesContent":["/* eslint-disable n8n-nodes-base/node-dirname-against-convention */\nimport { HuggingFaceInference } from '@langchain/community/llms/hf';\nimport {\n\tNodeConnectionTypes,\n\ttype INodeType,\n\ttype INodeTypeDescription,\n\ttype ISupplyDataFunctions,\n\ttype SupplyData,\n} from 'n8n-workflow';\n\nimport { getConnectionHintNoticeField } from '@utils/sharedFields';\n\nimport { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';\nimport { N8nLlmTracing } from '../N8nLlmTracing';\n\nexport class LmOpenHuggingFaceInference implements INodeType {\n\tdescription: INodeTypeDescription = {\n\t\tdisplayName: 'Hugging Face Inference Model',\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-name-miscased\n\t\tname: 'lmOpenHuggingFaceInference',\n\t\ticon: 'file:huggingface.svg',\n\t\tgroup: ['transform'],\n\t\tversion: 1,\n\t\tdescription: 'Language Model HuggingFaceInference',\n\t\tdefaults: {\n\t\t\tname: 'Hugging Face Inference Model',\n\t\t},\n\t\tcodex: {\n\t\t\tcategories: ['AI'],\n\t\t\tsubcategories: {\n\t\t\t\tAI: ['Language Models', 'Root Nodes'],\n\t\t\t\t'Language Models': ['Text Completion Models'],\n\t\t\t},\n\t\t\tresources: {\n\t\t\t\tprimaryDocumentation: [\n\t\t\t\t\t{\n\t\t\t\t\t\turl: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmopenhuggingfaceinference/',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t},\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node\n\t\tinputs: [],\n\t\t// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong\n\t\toutputs: [NodeConnectionTypes.AiLanguageModel],\n\t\toutputNames: ['Model'],\n\t\tcredentials: [\n\t\t\t{\n\t\t\t\tname: 'huggingFaceApi',\n\t\t\t\trequired: true,\n\t\t\t},\n\t\t],\n\t\tproperties: [\n\t\t\tgetConnectionHintNoticeField([NodeConnectionTypes.AiChain, NodeConnectionTypes.AiAgent]),\n\t\t\t{\n\t\t\t\tdisplayName: 'Model',\n\t\t\t\tname: 'model',\n\t\t\t\ttype: 'string',\n\t\t\t\tdefault: 'gpt2',\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Options',\n\t\t\t\tname: 'options',\n\t\t\t\tplaceholder: 'Add Option',\n\t\t\t\tdescription: 'Additional options to add',\n\t\t\t\ttype: 'collection',\n\t\t\t\tdefault: {},\n\t\t\t\toptions: [\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Custom Inference Endpoint',\n\t\t\t\t\t\tname: 'endpointUrl',\n\t\t\t\t\t\tdefault: '',\n\t\t\t\t\t\tdescription: 'Custom endpoint URL',\n\t\t\t\t\t\ttype: 'string',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Frequency Penalty',\n\t\t\t\t\t\tname: 'frequencyPenalty',\n\t\t\t\t\t\tdefault: 0,\n\t\t\t\t\t\ttypeOptions: { maxValue: 2, minValue: -2, numberPrecision: 1 },\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t\"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim\",\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Maximum Number of Tokens',\n\t\t\t\t\t\tname: 'maxTokens',\n\t\t\t\t\t\tdefault: 128,\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t\ttypeOptions: {\n\t\t\t\t\t\t\tmaxValue: 32768,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Presence Penalty',\n\t\t\t\t\t\tname: 'presencePenalty',\n\t\t\t\t\t\tdefault: 0,\n\t\t\t\t\t\ttypeOptions: { maxValue: 2, minValue: -2, numberPrecision: 1 },\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t\"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics\",\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Sampling Temperature',\n\t\t\t\t\t\tname: 'temperature',\n\t\t\t\t\t\tdefault: 1,\n\t\t\t\t\t\ttypeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Top K',\n\t\t\t\t\t\tname: 'topK',\n\t\t\t\t\t\tdefault: 1,\n\t\t\t\t\t\ttypeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'Controls the top tokens to consider within the sample operation to create new text',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Top P',\n\t\t\t\t\t\tname: 'topP',\n\t\t\t\t\t\tdefault: 1,\n\t\t\t\t\t\ttypeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t],\n\t};\n\n\tasync supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {\n\t\tconst credentials = await this.getCredentials('huggingFaceApi');\n\n\t\tconst modelName = this.getNodeParameter('model', itemIndex) as string;\n\t\tconst options = this.getNodeParameter('options', itemIndex, {}) as object;\n\n\t\tconst model = new HuggingFaceInference({\n\t\t\tmodel: modelName,\n\t\t\tapiKey: credentials.apiKey as string,\n\t\t\t...options,\n\t\t\tcallbacks: [new N8nLlmTracing(this)],\n\t\t\tonFailedAttempt: makeN8nLlmFailedAttemptHandler(this),\n\t\t});\n\n\t\treturn {\n\t\t\tresponse: model,\n\t\t};\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,gBAAqC;AACrC,0BAMO;AAEP,0BAA6C;AAE7C,wCAA+C;AAC/C,2BAA8B;AAEvB,MAAM,2BAAgD;AAAA,EAAtD;AACN,uBAAoC;AAAA,MACnC,aAAa;AAAA;AAAA,MAEb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,OAAO,CAAC,WAAW;AAAA,MACnB,SAAS;AAAA,MACT,aAAa;AAAA,MACb,UAAU;AAAA,QACT,MAAM;AAAA,MACP;AAAA,MACA,OAAO;AAAA,QACN,YAAY,CAAC,IAAI;AAAA,QACjB,eAAe;AAAA,UACd,IAAI,CAAC,mBAAmB,YAAY;AAAA,UACpC,mBAAmB,CAAC,wBAAwB;AAAA,QAC7C;AAAA,QACA,WAAW;AAAA,UACV,sBAAsB;AAAA,YACrB;AAAA,cACC,KAAK;AAAA,YACN;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA;AAAA,MAEA,QAAQ,CAAC;AAAA;AAAA,MAET,SAAS,CAAC,wCAAoB,eAAe;AAAA,MAC7C,aAAa,CAAC,OAAO;AAAA,MACrB,aAAa;AAAA,QACZ;AAAA,UACC,MAAM;AAAA,UACN,UAAU;AAAA,QACX;AAAA,MACD;AAAA,MACA,YAAY;AAAA,YACX,kDAA6B,CAAC,wCAAoB,SAAS,wCAAoB,OAAO,CAAC;AAAA,QACvF;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,QACV;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,aAAa;AAAA,UACb,aAAa;AAAA,UACb,MAAM;AAAA,UACN,SAAS,CAAC;AAAA,UACV,SAAS;AAAA,YACR;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,cACb,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,GAAG,UAAU,IAAI,iBAAiB,EAAE;AAAA,cAC7D,aACC;AAAA,cACD,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aACC;AAAA,cACD,MAAM;AAAA,cACN,aAAa;AAAA,gBACZ,UAAU;AAAA,cACX;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,GAAG,UAAU,IAAI,iBAAiB,EAAE;AAAA,cAC7D,aACC;AAAA,cACD,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,GAAG,UAAU,GAAG,iBAAiB,EAAE;AAAA,cAC5D,aACC;AAAA,cACD,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,GAAG,UAAU,GAAG,iBAAiB,EAAE;AAAA,cAC5D,aACC;AAAA,cACD,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,GAAG,UAAU,GAAG,iBAAiB,EAAE;AAAA,cAC5D,aACC;AAAA,cACD,MAAM;AAAA,YACP;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAAA;AAAA,EAEA,MAAM,WAAuC,WAAwC;AACpF,UAAM,cAAc,MAAM,KAAK,eAAe,gBAAgB;AAE9D,UAAM,YAAY,KAAK,iBAAiB,SAAS,SAAS;AAC1D,UAAM,UAAU,KAAK,iBAAiB,WAAW,WAAW,CAAC,CAAC;AAE9D,UAAM,QAAQ,IAAI,+BAAqB;AAAA,MACtC,OAAO;AAAA,MACP,QAAQ,YAAY;AAAA,MACpB,GAAG;AAAA,MACH,WAAW,CAAC,IAAI,mCAAc,IAAI,CAAC;AAAA,MACnC,qBAAiB,kEAA+B,IAAI;AAAA,IACrD,CAAC;AAED,WAAO;AAAA,MACN,UAAU;AAAA,IACX;AAAA,EACD;AACD;","names":[]}
|