@n8n/n8n-nodes-langchain 1.106.2 → 1.107.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/credentials/ZepApi.credentials.js +6 -0
- package/dist/credentials/ZepApi.credentials.js.map +1 -1
- package/dist/nodes/embeddings/EmbeddingsOpenAI/EmbeddingsOpenAi.node.js +1 -1
- package/dist/nodes/embeddings/EmbeddingsOpenAI/EmbeddingsOpenAi.node.js.map +1 -1
- package/dist/nodes/llms/LMChatOpenAi/LmChatOpenAi.node.js +1 -1
- package/dist/nodes/llms/LMChatOpenAi/LmChatOpenAi.node.js.map +1 -1
- package/dist/nodes/llms/LMOpenAi/LmOpenAi.node.js +1 -1
- package/dist/nodes/llms/LMOpenAi/LmOpenAi.node.js.map +1 -1
- package/dist/nodes/llms/LmChatDeepSeek/LmChatDeepSeek.node.js +1 -1
- package/dist/nodes/llms/LmChatDeepSeek/LmChatDeepSeek.node.js.map +1 -1
- package/dist/nodes/llms/LmChatOpenRouter/LmChatOpenRouter.node.js +1 -1
- package/dist/nodes/llms/LmChatOpenRouter/LmChatOpenRouter.node.js.map +1 -1
- package/dist/nodes/llms/LmChatVercelAiGateway/LmChatVercelAiGateway.node.js +1 -1
- package/dist/nodes/llms/LmChatVercelAiGateway/LmChatVercelAiGateway.node.js.map +1 -1
- package/dist/nodes/llms/LmChatXAiGrok/LmChatXAiGrok.node.js +1 -1
- package/dist/nodes/llms/LmChatXAiGrok/LmChatXAiGrok.node.js.map +1 -1
- package/dist/nodes/mcp/McpClientTool/McpClientTool.node.js +22 -1
- package/dist/nodes/mcp/McpClientTool/McpClientTool.node.js.map +1 -1
- package/dist/nodes/mcp/McpClientTool/utils.js +4 -2
- package/dist/nodes/mcp/McpClientTool/utils.js.map +1 -1
- package/dist/nodes/memory/MemoryZep/MemoryZep.node.js +7 -0
- package/dist/nodes/memory/MemoryZep/MemoryZep.node.js.map +1 -1
- package/dist/nodes/vector_store/VectorStoreMongoDBAtlas/VectorStoreMongoDBAtlas.node.js +93 -83
- package/dist/nodes/vector_store/VectorStoreMongoDBAtlas/VectorStoreMongoDBAtlas.node.js.map +1 -1
- package/dist/nodes/vector_store/VectorStoreZep/VectorStoreZep.node.js +7 -0
- package/dist/nodes/vector_store/VectorStoreZep/VectorStoreZep.node.js.map +1 -1
- package/dist/nodes/vector_store/VectorStoreZepInsert/VectorStoreZepInsert.node.js +6 -0
- package/dist/nodes/vector_store/VectorStoreZepInsert/VectorStoreZepInsert.node.js.map +1 -1
- package/dist/nodes/vector_store/VectorStoreZepLoad/VectorStoreZepLoad.node.js +6 -0
- package/dist/nodes/vector_store/VectorStoreZepLoad/VectorStoreZepLoad.node.js.map +1 -1
- package/dist/nodes/vector_store/shared/createVectorStoreNode/types.js.map +1 -1
- package/dist/nodes/vendors/GoogleGemini/transport/index.js +2 -2
- package/dist/nodes/vendors/GoogleGemini/transport/index.js.map +1 -1
- package/dist/nodes/vendors/OpenAi/actions/text/message.operation.js +1 -1
- package/dist/nodes/vendors/OpenAi/actions/text/message.operation.js.map +1 -1
- package/dist/types/credentials.json +1 -1
- package/dist/types/nodes.json +5 -5
- package/package.json +11 -11
package/dist/types/nodes.json
CHANGED
|
@@ -45,7 +45,7 @@
|
|
|
45
45
|
{"displayName":"Cohere Model","name":"lmCohere","group":["transform"],"version":1,"description":"Language Model Cohere","defaults":{"name":"Cohere Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Text Completion Models"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmcohere/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"cohereApi","required":true}],"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":250,"description":"The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).","type":"number","typeOptions":{"maxValue":32768}},{"displayName":"Model","name":"model","type":"string","description":"The name of the model to use","default":""},{"displayName":"Sampling Temperature","name":"temperature","default":0,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"}]}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LMCohere/cohere.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LMCohere/cohere.dark.svg"}},
|
|
46
46
|
{"displayName":"Ollama Model","name":"lmOllama","group":["transform"],"version":1,"description":"Language Model Ollama","defaults":{"name":"Ollama Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Text Completion Models"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmollama/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"ollamaApi","required":true}],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"={{ $credentials.baseUrl.replace(new RegExp(\"/$\"), \"\") }}"},"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Model","name":"model","type":"options","default":"llama3.2","description":"The model which will generate the completion. To download models, visit <a href=\"https://ollama.ai/library\">Ollama Models Library</a>.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/api/tags"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"models"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.name}}","value":"={{$responseItem.name}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"required":true},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Sampling Temperature","name":"temperature","default":0.7,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls the randomness of the generated text. Lower values make the output more focused and deterministic, while higher values make it more diverse and random.","type":"number"},{"displayName":"Top K","name":"topK","default":-1,"typeOptions":{"maxValue":100,"minValue":-1,"numberPrecision":1},"description":"Limits the number of highest probability vocabulary tokens to consider at each step. A higher value increases diversity but may reduce coherence. Set to -1 to disable.","type":"number"},{"displayName":"Top P","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Chooses from the smallest possible set of tokens whose cumulative probability exceeds the probability top_p. Helps generate more human-like text by reducing repetitions.","type":"number"},{"displayName":"Frequency Penalty","name":"frequencyPenalty","type":"number","default":0,"typeOptions":{"minValue":0},"description":"Adjusts the penalty for tokens that have already appeared in the generated text. Higher values discourage repetition."},{"displayName":"Keep Alive","name":"keepAlive","type":"string","default":"5m","description":"Specifies the duration to keep the loaded model in memory after use. Useful for frequently used models. Format: 1h30m (1 hour 30 minutes)."},{"displayName":"Low VRAM Mode","name":"lowVram","type":"boolean","default":false,"description":"Whether to Activate low VRAM mode, which reduces memory usage at the cost of slower generation speed. Useful for GPUs with limited memory."},{"displayName":"Main GPU ID","name":"mainGpu","type":"number","default":0,"description":"Specifies the ID of the GPU to use for the main computation. Only change this if you have multiple GPUs."},{"displayName":"Context Batch Size","name":"numBatch","type":"number","default":512,"description":"Sets the batch size for prompt processing. Larger batch sizes may improve generation speed but increase memory usage."},{"displayName":"Context Length","name":"numCtx","type":"number","default":2048,"description":"The maximum number of tokens to use as context for generating the next token. Smaller values reduce memory usage, while larger values provide more context to the model."},{"displayName":"Number of GPUs","name":"numGpu","type":"number","default":-1,"description":"Specifies the number of GPUs to use for parallel processing. Set to -1 for auto-detection."},{"displayName":"Max Tokens to Generate","name":"numPredict","type":"number","default":-1,"description":"The maximum number of tokens to generate. Set to -1 for no limit. Be cautious when setting this to a large value, as it can lead to very long outputs."},{"displayName":"Number of CPU Threads","name":"numThread","type":"number","default":0,"description":"Specifies the number of CPU threads to use for processing. Set to 0 for auto-detection."},{"displayName":"Penalize Newlines","name":"penalizeNewline","type":"boolean","default":true,"description":"Whether the model will be less likely to generate newline characters, encouraging longer continuous sequences of text"},{"displayName":"Presence Penalty","name":"presencePenalty","type":"number","default":0,"description":"Adjusts the penalty for tokens based on their presence in the generated text so far. Positive values penalize tokens that have already appeared, encouraging diversity."},{"displayName":"Repetition Penalty","name":"repeatPenalty","type":"number","default":1,"description":"Adjusts the penalty factor for repeated tokens. Higher values more strongly discourage repetition. Set to 1.0 to disable repetition penalty."},{"displayName":"Use Memory Locking","name":"useMLock","type":"boolean","default":false,"description":"Whether to lock the model in memory to prevent swapping. This can improve performance but requires sufficient available memory."},{"displayName":"Use Memory Mapping","name":"useMMap","type":"boolean","default":true,"description":"Whether to use memory mapping for loading the model. This can reduce memory usage but may impact performance. Recommended to keep enabled."},{"displayName":"Load Vocabulary Only","name":"vocabOnly","type":"boolean","default":false,"description":"Whether to only load the model vocabulary without the weights. Useful for quickly testing tokenization."},{"displayName":"Output Format","name":"format","type":"options","options":[{"name":"Default","value":"default"},{"name":"JSON","value":"json"}],"default":"default","description":"Specifies the format of the API response"}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LMOllama/ollama.svg"},
|
|
47
47
|
{"displayName":"Hugging Face Inference Model","name":"lmOpenHuggingFaceInference","group":["transform"],"version":1,"description":"Language Model HuggingFaceInference","defaults":{"name":"Hugging Face Inference Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Text Completion Models"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmopenhuggingfaceinference/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"huggingFaceApi","required":true}],"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Model","name":"model","type":"string","default":"mistralai/Mistral-Nemo-Base-2407"},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Custom Inference Endpoint","name":"endpointUrl","default":"","description":"Custom endpoint URL","type":"string"},{"displayName":"Frequency Penalty","name":"frequencyPenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim","type":"number"},{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":128,"description":"The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).","type":"number","typeOptions":{"maxValue":32768}},{"displayName":"Presence Penalty","name":"presencePenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics","type":"number"},{"displayName":"Sampling Temperature","name":"temperature","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"},{"displayName":"Top K","name":"topK","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls the top tokens to consider within the sample operation to create new text","type":"number"},{"displayName":"Top P","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LMOpenHuggingFaceInference/huggingface.svg"},
|
|
48
|
-
{"displayName":"MCP Client Tool","name":"mcpClientTool","group":["output"],"version":[1,1.1],"description":"Connect tools from an MCP Server","defaults":{"name":"MCP Client"},"codex":{"categories":["AI"],"subcategories":{"AI":["Model Context Protocol","Tools"]},"alias":["Model Context Protocol","MCP Client"],"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.toolmcp/"}]}},"inputs":[],"outputs":[{"type":"ai_tool","displayName":"Tools"}],"credentials":[{"name":"httpBearerAuth","required":true,"displayOptions":{"show":{"authentication":["bearerAuth"]}}},{"name":"httpHeaderAuth","required":true,"displayOptions":{"show":{"authentication":["headerAuth"]}}}],"properties":[{"displayName":"This node must be connected to an AI agent. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"SSE Endpoint","name":"sseEndpoint","type":"string","description":"SSE Endpoint of your MCP server","placeholder":"e.g. https://my-mcp-server.ai/sse","default":"","required":true,"displayOptions":{"show":{"@version":[1]}}},{"displayName":"Endpoint","name":"endpointUrl","type":"string","description":"Endpoint of your MCP server","placeholder":"e.g. https://my-mcp-server.ai/mcp","default":"","required":true,"displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.1}}]}}},{"displayName":"Server Transport","name":"serverTransport","type":"options","options":[{"name":"Server Sent Events (Deprecated)","value":"sse"},{"name":"HTTP Streamable","value":"httpStreamable"}],"default":"sse","description":"The transport used by your endpoint","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.1}}]}}},{"displayName":"Authentication","name":"authentication","type":"options","options":[{"name":"Bearer Auth","value":"bearerAuth"},{"name":"Header Auth","value":"headerAuth"},{"name":"None","value":"none"}],"default":"none","description":"The way to authenticate with your endpoint"},{"displayName":"Credentials","name":"credentials","type":"credentials","default":"","displayOptions":{"show":{"authentication":["headerAuth","bearerAuth"]}}},{"displayName":"Tools to Include","name":"include","type":"options","description":"How to select the tools you want to be exposed to the AI Agent","default":"all","options":[{"name":"All","value":"all","description":"Also include all unchanged fields from the input"},{"name":"Selected","value":"selected","description":"Also include the tools listed in the parameter \"Tools to Include\""},{"name":"All Except","value":"except","description":"Exclude the tools listed in the parameter \"Tools to Exclude\""}]},{"displayName":"Tools to Include","name":"includeTools","type":"multiOptions","default":[],"description":"Choose from the list, or specify IDs using an <a href=\"https://docs.n8n.io/code/expressions/\">expression</a>","typeOptions":{"loadOptionsMethod":"getTools","loadOptionsDependsOn":["sseEndpoint"]},"displayOptions":{"show":{"include":["selected"]}}},{"displayName":"Tools to Exclude","name":"excludeTools","type":"multiOptions","default":[],"description":"Choose from the list, or specify IDs using an <a href=\"https://docs.n8n.io/code/expressions/\">expression</a>","typeOptions":{"loadOptionsMethod":"getTools"},"displayOptions":{"show":{"include":["except"]}}}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/mcp/mcp.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/mcp/mcp.dark.svg"}},
|
|
48
|
+
{"displayName":"MCP Client Tool","name":"mcpClientTool","group":["output"],"version":[1,1.1],"description":"Connect tools from an MCP Server","defaults":{"name":"MCP Client"},"codex":{"categories":["AI"],"subcategories":{"AI":["Model Context Protocol","Tools"]},"alias":["Model Context Protocol","MCP Client"],"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.toolmcp/"}]}},"inputs":[],"outputs":[{"type":"ai_tool","displayName":"Tools"}],"credentials":[{"name":"httpBearerAuth","required":true,"displayOptions":{"show":{"authentication":["bearerAuth"]}}},{"name":"httpHeaderAuth","required":true,"displayOptions":{"show":{"authentication":["headerAuth"]}}}],"properties":[{"displayName":"This node must be connected to an AI agent. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"SSE Endpoint","name":"sseEndpoint","type":"string","description":"SSE Endpoint of your MCP server","placeholder":"e.g. https://my-mcp-server.ai/sse","default":"","required":true,"displayOptions":{"show":{"@version":[1]}}},{"displayName":"Endpoint","name":"endpointUrl","type":"string","description":"Endpoint of your MCP server","placeholder":"e.g. https://my-mcp-server.ai/mcp","default":"","required":true,"displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.1}}]}}},{"displayName":"Server Transport","name":"serverTransport","type":"options","options":[{"name":"Server Sent Events (Deprecated)","value":"sse"},{"name":"HTTP Streamable","value":"httpStreamable"}],"default":"sse","description":"The transport used by your endpoint","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.1}}]}}},{"displayName":"Authentication","name":"authentication","type":"options","options":[{"name":"Bearer Auth","value":"bearerAuth"},{"name":"Header Auth","value":"headerAuth"},{"name":"None","value":"none"}],"default":"none","description":"The way to authenticate with your endpoint"},{"displayName":"Credentials","name":"credentials","type":"credentials","default":"","displayOptions":{"show":{"authentication":["headerAuth","bearerAuth"]}}},{"displayName":"Tools to Include","name":"include","type":"options","description":"How to select the tools you want to be exposed to the AI Agent","default":"all","options":[{"name":"All","value":"all","description":"Also include all unchanged fields from the input"},{"name":"Selected","value":"selected","description":"Also include the tools listed in the parameter \"Tools to Include\""},{"name":"All Except","value":"except","description":"Exclude the tools listed in the parameter \"Tools to Exclude\""}]},{"displayName":"Tools to Include","name":"includeTools","type":"multiOptions","default":[],"description":"Choose from the list, or specify IDs using an <a href=\"https://docs.n8n.io/code/expressions/\">expression</a>","typeOptions":{"loadOptionsMethod":"getTools","loadOptionsDependsOn":["sseEndpoint"]},"displayOptions":{"show":{"include":["selected"]}}},{"displayName":"Tools to Exclude","name":"excludeTools","type":"multiOptions","default":[],"description":"Choose from the list, or specify IDs using an <a href=\"https://docs.n8n.io/code/expressions/\">expression</a>","typeOptions":{"loadOptionsMethod":"getTools"},"displayOptions":{"show":{"include":["except"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Timeout","name":"timeout","type":"number","typeOptions":{"minValue":1},"default":60000,"description":"Time in ms to wait for tool calls to finish"}]}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/mcp/mcp.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/mcp/mcp.dark.svg"}},
|
|
49
49
|
{"displayName":"MCP Server Trigger","name":"mcpTrigger","group":["trigger"],"version":[1,1.1,2],"description":"Expose n8n tools as an MCP Server endpoint","activationMessage":"You can now connect your MCP Clients to the URL, using SSE or Streamable HTTP transports.","defaults":{"name":"MCP Server Trigger"},"codex":{"categories":["AI","Core Nodes"],"subcategories":{"AI":["Root Nodes","Model Context Protocol"],"Core Nodes":["Other Trigger Nodes"]},"alias":["Model Context Protocol","MCP Server"],"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-langchain.mcptrigger/"}]}},"triggerPanel":{"header":"Listen for MCP events","executionsHelp":{"inactive":"This trigger has two modes: test and production.<br /><br /><b>Use test mode while you build your workflow</b>. Click the 'execute step' button, then make an MCP request to the test URL. The executions will show up in the editor.<br /><br /><b>Use production mode to run your workflow automatically</b>. <a data-key='activate'>Activate</a> the workflow, then make requests to the production URL. These executions will show up in the <a data-key='executions'>executions list</a>, but not the editor.","active":"This trigger has two modes: test and production.<br /><br /><b>Use test mode while you build your workflow</b>. Click the 'execute step' button, then make an MCP request to the test URL. The executions will show up in the editor.<br /><br /><b>Use production mode to run your workflow automatically</b>. Since your workflow is activated, you can make requests to the production URL. These executions will show up in the <a data-key='executions'>executions list</a>, but not the editor."},"activationHint":"Once you’ve finished building your workflow, run it without having to click this button by using the production URL."},"inputs":[{"type":"ai_tool","displayName":"Tools"}],"outputs":[],"credentials":[{"name":"httpBearerAuth","required":true,"displayOptions":{"show":{"authentication":["bearerAuth"]}}},{"name":"httpHeaderAuth","required":true,"displayOptions":{"show":{"authentication":["headerAuth"]}}}],"properties":[{"displayName":"Authentication","name":"authentication","type":"options","options":[{"name":"None","value":"none"},{"name":"Bearer Auth","value":"bearerAuth"},{"name":"Header Auth","value":"headerAuth"}],"default":"none","description":"The way to authenticate"},{"displayName":"Path","name":"path","type":"string","default":"","placeholder":"webhook","required":true,"description":"The base path for this MCP server"}],"webhooks":[{"name":"setup","httpMethod":"GET","responseMode":"onReceived","isFullPath":true,"path":"={{$parameter[\"path\"]}}{{parseFloat($nodeVersion)<2 ? '/sse' : ''}}","nodeType":"mcp","ndvHideMethod":true,"ndvHideUrl":false},{"name":"default","httpMethod":"POST","responseMode":"onReceived","isFullPath":true,"path":"={{$parameter[\"path\"]}}{{parseFloat($nodeVersion)<2 ? '/messages' : ''}}","nodeType":"mcp","ndvHideMethod":true,"ndvHideUrl":true},{"name":"default","httpMethod":"DELETE","responseMode":"onReceived","isFullPath":true,"path":"={{$parameter[\"path\"]}}","nodeType":"mcp","ndvHideMethod":true,"ndvHideUrl":true}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/mcp/mcp.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/mcp/mcp.dark.svg"}},
|
|
50
50
|
{"displayName":"Simple Memory","name":"memoryBufferWindow","icon":"fa:database","iconColor":"black","group":["transform"],"version":[1,1.1,1.2,1.3],"description":"Stores in n8n memory, so no credentials required","defaults":{"name":"Simple Memory"},"codex":{"categories":["AI"],"subcategories":{"AI":["Memory"],"Memory":["For beginners"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.memorybufferwindow/"}]}},"inputs":[],"outputs":["ai_memory"],"outputNames":["Memory"],"properties":[{"displayName":"This node must be connected to an AI agent. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Session Key","name":"sessionKey","type":"string","default":"chat_history","description":"The key to use to store the memory in the workflow data","displayOptions":{"show":{"@version":[1]}}},{"displayName":"Session ID","name":"sessionKey","type":"string","default":"={{ $json.sessionId }}","description":"The key to use to store the memory","displayOptions":{"show":{"@version":[1.1]}}},{"displayName":"Session ID","name":"sessionIdType","type":"options","options":[{"name":"Connected Chat Trigger Node","value":"fromInput","description":"Looks for an input field called 'sessionId' that is coming from a directly connected Chat Trigger"},{"name":"Define below","value":"customKey","description":"Use an expression to reference data in previous nodes or enter static text"}],"default":"fromInput","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.2}}]}}},{"displayName":"Session Key From Previous Node","name":"sessionKey","type":"string","default":"={{ $json.sessionId }}","disabledOptions":{"show":{"sessionIdType":["fromInput"]}},"displayOptions":{"show":{"sessionIdType":["fromInput"],"@version":[{"_cnd":{"gte":1.3}}]}}},{"displayName":"Key","name":"sessionKey","type":"string","default":"","description":"The key to use to store session ID in the memory","displayOptions":{"show":{"sessionIdType":["customKey"]}}},{"displayName":"Context Window Length","name":"contextWindowLength","type":"number","default":5,"hint":"How many past interactions the model receives as context"}]},
|
|
51
51
|
{"displayName":"Motorhead","name":"memoryMotorhead","icon":"fa:file-export","iconColor":"black","group":["transform"],"version":[1,1.1,1.2,1.3],"description":"Use Motorhead Memory","defaults":{"name":"Motorhead"},"codex":{"categories":["AI"],"subcategories":{"AI":["Memory"],"Memory":["Other memories"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.memorymotorhead/"}]}},"inputs":[],"outputs":["ai_memory"],"outputNames":["Memory"],"credentials":[{"name":"motorheadApi","required":true}],"properties":[{"displayName":"This node must be connected to an AI agent. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Session ID","name":"sessionId","type":"string","required":true,"default":"","displayOptions":{"show":{"@version":[1]}}},{"displayName":"Session ID","name":"sessionId","type":"string","default":"={{ $json.sessionId }}","description":"The key to use to store the memory","displayOptions":{"show":{"@version":[1.1]}}},{"displayName":"Session ID","name":"sessionIdType","type":"options","options":[{"name":"Connected Chat Trigger Node","value":"fromInput","description":"Looks for an input field called 'sessionId' that is coming from a directly connected Chat Trigger"},{"name":"Define below","value":"customKey","description":"Use an expression to reference data in previous nodes or enter static text"}],"default":"fromInput","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.2}}]}}},{"displayName":"Session Key From Previous Node","name":"sessionKey","type":"string","default":"={{ $json.sessionId }}","disabledOptions":{"show":{"sessionIdType":["fromInput"]}},"displayOptions":{"show":{"sessionIdType":["fromInput"],"@version":[{"_cnd":{"gte":1.3}}]}}},{"displayName":"Key","name":"sessionKey","type":"string","default":"","description":"The key to use to store session ID in the memory","displayOptions":{"show":{"sessionIdType":["customKey"]}}}]},
|
|
@@ -55,7 +55,7 @@
|
|
|
55
55
|
{"displayName":"Chat Memory Manager","name":"memoryManager","icon":"fa:database","iconColor":"black","group":["transform"],"version":[1,1.1],"description":"Manage chat messages memory and use it in the workflow","defaults":{"name":"Chat Memory Manager"},"codex":{"categories":["AI"],"subcategories":{"AI":["Miscellaneous","Root Nodes"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.memorymanager/"}]}},"inputs":[{"displayName":"","type":"main"},{"displayName":"Memory","type":"ai_memory","required":true,"maxConnections":1}],"outputs":[{"displayName":"","type":"main"}],"properties":[{"displayName":"Operation Mode","name":"mode","type":"options","noDataExpression":true,"default":"load","options":[{"name":"Get Many Messages","description":"Retrieve chat messages from connected memory","value":"load"},{"name":"Insert Messages","description":"Insert chat messages into connected memory","value":"insert"},{"name":"Delete Messages","description":"Delete chat messages from connected memory","value":"delete"}]},{"displayName":"Insert Mode","name":"insertMode","type":"options","description":"Choose how new messages are inserted into the memory","noDataExpression":true,"default":"insert","options":[{"name":"Insert Messages","value":"insert","description":"Add messages alongside existing ones"},{"name":"Override All Messages","value":"override","description":"Replace the current memory with new messages"}],"displayOptions":{"show":{"mode":["insert"]}}},{"displayName":"Delete Mode","name":"deleteMode","type":"options","description":"How messages are deleted from memory","noDataExpression":true,"default":"lastN","options":[{"name":"Last N","value":"lastN","description":"Delete the last N messages"},{"name":"All Messages","value":"all","description":"Clear all messages from memory"}],"displayOptions":{"show":{"mode":["delete"]}}},{"displayName":"Chat Messages","name":"messages","description":"Chat messages to insert into memory","type":"fixedCollection","typeOptions":{"multipleValues":true},"default":{},"placeholder":"Add message","options":[{"name":"messageValues","displayName":"Message","values":[{"displayName":"Type Name or ID","name":"type","type":"options","options":[{"name":"AI","value":"ai"},{"name":"System","value":"system"},{"name":"User","value":"user"}],"default":"system"},{"displayName":"Message","name":"message","type":"string","required":true,"default":""},{"displayName":"Hide Message in Chat","name":"hideFromUI","type":"boolean","required":true,"default":false,"description":"Whether to hide the message from the chat UI"}]}],"displayOptions":{"show":{"mode":["insert"]}}},{"displayName":"Messages Count","name":"lastMessagesCount","type":"number","description":"The amount of last messages to delete","default":2,"displayOptions":{"show":{"mode":["delete"],"deleteMode":["lastN"]}}},{"displayName":"Simplify Output","name":"simplifyOutput","type":"boolean","description":"Whether to simplify the output to only include the sender and the text","default":true,"displayOptions":{"show":{"mode":["load"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Group Messages","name":"groupMessages","type":"boolean","default":true,"description":"Whether to group messages into a single item or return each message as a separate item"}],"displayOptions":{"show":{"mode":["load"]}}}]},
|
|
56
56
|
{"displayName":"Chat Messages Retriever","name":"memoryChatRetriever","icon":"fa:database","iconColor":"black","group":["transform"],"hidden":true,"version":1,"description":"Retrieve chat messages from memory and use them in the workflow","defaults":{"name":"Chat Messages Retriever"},"codex":{"categories":["AI"],"subcategories":{"AI":["Miscellaneous"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.memorymanager/"}]}},"inputs":["main",{"displayName":"Memory","maxConnections":1,"type":"ai_memory","required":true}],"outputs":["main"],"properties":[{"displayName":"This node is deprecated. Use 'Chat Memory Manager' node instead.","type":"notice","default":"","name":"deprecatedNotice"},{"displayName":"Simplify Output","name":"simplifyOutput","type":"boolean","description":"Whether to simplify the output to only include the sender and the text","default":true}]},
|
|
57
57
|
{"displayName":"Xata","name":"memoryXata","group":["transform"],"version":[1,1.1,1.2,1.3,1.4],"description":"Use Xata Memory","defaults":{"name":"Xata","color":"#1321A7"},"codex":{"categories":["AI"],"subcategories":{"AI":["Memory"],"Memory":["Other memories"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.memoryxata/"}]}},"inputs":[],"outputs":["ai_memory"],"outputNames":["Memory"],"credentials":[{"name":"xataApi","required":true}],"properties":[{"displayName":"This node must be connected to an AI agent. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Session ID","name":"sessionId","type":"string","required":true,"default":"","displayOptions":{"show":{"@version":[1]}}},{"displayName":"Session ID","name":"sessionId","type":"string","default":"={{ $json.sessionId }}","description":"The key to use to store the memory","displayOptions":{"show":{"@version":[1.1]}}},{"displayName":"Session ID","name":"sessionIdType","type":"options","options":[{"name":"Connected Chat Trigger Node","value":"fromInput","description":"Looks for an input field called 'sessionId' that is coming from a directly connected Chat Trigger"},{"name":"Define below","value":"customKey","description":"Use an expression to reference data in previous nodes or enter static text"}],"default":"fromInput","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.2}}]}}},{"displayName":"Key","name":"sessionKey","type":"string","default":"","description":"The key to use to store session ID in the memory","displayOptions":{"show":{"sessionIdType":["customKey"]}}},{"displayName":"Session Key From Previous Node","name":"sessionKey","type":"string","default":"={{ $json.sessionId }}","disabledOptions":{"show":{"sessionIdType":["fromInput"]}},"displayOptions":{"show":{"sessionIdType":["fromInput"],"@version":[{"_cnd":{"gte":1.4}}]}}},{"displayName":"Context Window Length","name":"contextWindowLength","type":"number","default":5,"hint":"How many past interactions the model receives as context","displayOptions":{"hide":{"@version":[{"_cnd":{"lt":1.3}}]}}}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/memory/MemoryXata/xata.svg"},
|
|
58
|
-
{"displayName":"Zep","name":"memoryZep","group":["transform"],"version":[1,1.1,1.2,1.3],"description":"Use Zep Memory","defaults":{"name":"Zep"},"codex":{"categories":["AI"],"subcategories":{"AI":["Memory"],"Memory":["Other memories"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.memoryzep/"}]}},"inputs":[],"outputs":["ai_memory"],"outputNames":["Memory"],"credentials":[{"name":"zepApi","required":true}],"properties":[{"displayName":"This node must be connected to an AI agent. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Only works with Zep Cloud and Community edition <= v0.27.2","name":"supportedVersions","type":"notice","default":""},{"displayName":"Session ID","name":"sessionId","type":"string","required":true,"default":"","displayOptions":{"show":{"@version":[1]}}},{"displayName":"Session ID","name":"sessionId","type":"string","default":"={{ $json.sessionId }}","description":"The key to use to store the memory","displayOptions":{"show":{"@version":[1.1]}}},{"displayName":"Session ID","name":"sessionIdType","type":"options","options":[{"name":"Connected Chat Trigger Node","value":"fromInput","description":"Looks for an input field called 'sessionId' that is coming from a directly connected Chat Trigger"},{"name":"Define below","value":"customKey","description":"Use an expression to reference data in previous nodes or enter static text"}],"default":"fromInput","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.2}}]}}},{"displayName":"Session Key From Previous Node","name":"sessionKey","type":"string","default":"={{ $json.sessionId }}","disabledOptions":{"show":{"sessionIdType":["fromInput"]}},"displayOptions":{"show":{"sessionIdType":["fromInput"],"@version":[{"_cnd":{"gte":1.3}}]}}},{"displayName":"Key","name":"sessionKey","type":"string","default":"","description":"The key to use to store session ID in the memory","displayOptions":{"show":{"sessionIdType":["customKey"]}}}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/memory/MemoryZep/zep.png"},
|
|
58
|
+
{"displayName":"Zep","name":"memoryZep","hidden":true,"group":["transform"],"version":[1,1.1,1.2,1.3],"description":"Use Zep Memory","defaults":{"name":"Zep"},"codex":{"categories":["AI"],"subcategories":{"AI":["Memory"],"Memory":["Other memories"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.memoryzep/"}]}},"inputs":[],"outputs":["ai_memory"],"outputNames":["Memory"],"credentials":[{"name":"zepApi","required":true}],"properties":[{"displayName":"This Zep integration is deprecated and will be removed in a future version.","name":"deprecationNotice","type":"notice","default":""},{"displayName":"This node must be connected to an AI agent. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Only works with Zep Cloud and Community edition <= v0.27.2","name":"supportedVersions","type":"notice","default":""},{"displayName":"Session ID","name":"sessionId","type":"string","required":true,"default":"","displayOptions":{"show":{"@version":[1]}}},{"displayName":"Session ID","name":"sessionId","type":"string","default":"={{ $json.sessionId }}","description":"The key to use to store the memory","displayOptions":{"show":{"@version":[1.1]}}},{"displayName":"Session ID","name":"sessionIdType","type":"options","options":[{"name":"Connected Chat Trigger Node","value":"fromInput","description":"Looks for an input field called 'sessionId' that is coming from a directly connected Chat Trigger"},{"name":"Define below","value":"customKey","description":"Use an expression to reference data in previous nodes or enter static text"}],"default":"fromInput","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.2}}]}}},{"displayName":"Session Key From Previous Node","name":"sessionKey","type":"string","default":"={{ $json.sessionId }}","disabledOptions":{"show":{"sessionIdType":["fromInput"]}},"displayOptions":{"show":{"sessionIdType":["fromInput"],"@version":[{"_cnd":{"gte":1.3}}]}}},{"displayName":"Key","name":"sessionKey","type":"string","default":"","description":"The key to use to store session ID in the memory","displayOptions":{"show":{"sessionIdType":["customKey"]}}}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/memory/MemoryZep/zep.png"},
|
|
59
59
|
{"displayName":"Auto-fixing Output Parser","name":"outputParserAutofixing","icon":"fa:tools","iconColor":"black","group":["transform"],"version":1,"description":"Deprecated, use structured output parser","defaults":{"name":"Auto-fixing Output Parser"},"codex":{"categories":["AI"],"subcategories":{"AI":["Output Parsers"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.outputparserautofixing/"}]}},"inputs":[{"displayName":"Model","maxConnections":1,"type":"ai_languageModel","required":true},{"displayName":"Output Parser","maxConnections":1,"required":true,"type":"ai_outputParser"}],"outputs":["ai_outputParser"],"outputNames":["Output Parser"],"properties":[{"displayName":"This node wraps another output parser. If the first one fails it calls an LLM to fix the format","name":"info","type":"notice","default":""},{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Retry Prompt","name":"prompt","type":"string","default":"Instructions:\n--------------\n{instructions}\n--------------\nCompletion:\n--------------\n{completion}\n--------------\n\nAbove, the Completion did not satisfy the constraints given in the Instructions.\nError:\n--------------\n{error}\n--------------\n\nPlease try again. Please only respond with an answer that satisfies the constraints laid out in the Instructions:","typeOptions":{"rows":10},"hint":"Should include \"{error}\", \"{instructions}\", and \"{completion}\" placeholders","description":"Prompt template used for fixing the output. Uses placeholders: \"{instructions}\" for parsing rules, \"{completion}\" for the failed attempt, and \"{error}\" for the validation error message."}]}]},
|
|
60
60
|
{"displayName":"Item List Output Parser","name":"outputParserItemList","icon":"fa:bars","iconColor":"black","group":["transform"],"version":1,"description":"Return the results as separate items","defaults":{"name":"Item List Output Parser"},"codex":{"categories":["AI"],"subcategories":{"AI":["Output Parsers"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.outputparseritemlist/"}]}},"inputs":[],"outputs":["ai_outputParser"],"outputNames":["Output Parser"],"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Number Of Items","name":"numberOfItems","type":"number","default":-1,"description":"Defines how many items should be returned maximally. If set to -1, there is no limit."},{"displayName":"Separator","name":"separator","type":"string","default":"\\n","description":"Defines the separator that should be used to split the results into separate items. Defaults to a new line but can be changed depending on the data that should be returned."}]}]},
|
|
61
61
|
{"displayName":"Structured Output Parser","name":"outputParserStructured","icon":"fa:code","iconColor":"black","group":["transform"],"version":[1,1.1,1.2,1.3],"defaultVersion":1.3,"description":"Return data in a defined JSON format","defaults":{"name":"Structured Output Parser"},"codex":{"alias":["json","zod"],"categories":["AI"],"subcategories":{"AI":["Output Parsers"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.outputparserstructured/"}]}},"inputs":"={{\n\t\t\t((parameters) => {\n\t\t\t\tif (parameters?.autoFix) {\n\t\t\t\t\treturn [\n\t\t\t\t\t\t{ displayName: 'Model', maxConnections: 1, type: \"ai_languageModel\", required: true }\n\t\t\t\t\t];\n\t\t\t\t}\n\n\t\t\t\treturn [];\n\t\t\t})($parameter)\n\t\t}}","outputs":["ai_outputParser"],"outputNames":["Output Parser"],"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Schema Type","name":"schemaType","type":"options","noDataExpression":true,"options":[{"name":"Generate From JSON Example","value":"fromJson","description":"Generate a schema from an example JSON object"},{"name":"Define using JSON Schema","value":"manual","description":"Define the JSON schema manually"}],"default":"fromJson","description":"How to specify the schema for the function","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.2}}]}}},{"displayName":"JSON Example","name":"jsonSchemaExample","type":"json","default":"{\n\t\"state\": \"California\",\n\t\"cities\": [\"Los Angeles\", \"San Francisco\", \"San Diego\"]\n}","noDataExpression":true,"typeOptions":{"rows":10},"displayOptions":{"show":{"schemaType":["fromJson"]}},"description":"Example JSON object to use to generate the schema"},{"displayName":"All properties will be required. To make them optional, use the 'JSON Schema' schema type instead","name":"notice","type":"notice","default":"","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.3}}],"schemaType":["fromJson"]}}},{"displayName":"Input Schema","name":"inputSchema","type":"json","default":"{\n\t\"type\": \"object\",\n\t\"properties\": {\n\t\t\"state\": {\n\t\t\t\"type\": \"string\"\n\t\t},\n\t\t\"cities\": {\n\t\t\t\"type\": \"array\",\n\t\t\t\"items\": {\n\t\t\t\t\"type\": \"string\"\n\t\t\t}\n\t\t}\n\t}\n}","noDataExpression":false,"typeOptions":{"rows":10},"displayOptions":{"show":{"schemaType":["manual"]}},"description":"Schema to use for the function","hint":"Use <a target=\"_blank\" href=\"https://json-schema.org/\">JSON Schema</a> format (<a target=\"_blank\" href=\"https://json-schema.org/learn/miscellaneous-examples.html\">examples</a>). $refs syntax is currently not supported."},{"displayName":"JSON Schema","name":"jsonSchema","type":"json","description":"JSON Schema to structure and validate the output against","default":"{\n \"type\": \"object\",\n \"properties\": {\n \"state\": {\n \"type\": \"string\"\n },\n \"cities\": {\n \"type\": \"array\",\n \"items\": {\n \"type\": \"string\"\n }\n }\n }\n}","typeOptions":{"rows":10},"required":true,"displayOptions":{"show":{"@version":[{"_cnd":{"lte":1.1}}]}}},{"displayName":"Auto-Fix Format","description":"Whether to automatically fix the output when it is not in the correct format. Will cause another LLM call.","name":"autoFix","type":"boolean","default":false},{"displayName":"Customize Retry Prompt","name":"customizeRetryPrompt","type":"boolean","displayOptions":{"show":{"autoFix":[true]}},"default":false,"description":"Whether to customize the prompt used for retrying the output parsing. If disabled, a default prompt will be used."},{"displayName":"Custom Prompt","name":"prompt","type":"string","displayOptions":{"show":{"autoFix":[true],"customizeRetryPrompt":[true]}},"default":"Instructions:\n--------------\n{instructions}\n--------------\nCompletion:\n--------------\n{completion}\n--------------\n\nAbove, the Completion did not satisfy the constraints given in the Instructions.\nError:\n--------------\n{error}\n--------------\n\nPlease try again. Please only respond with an answer that satisfies the constraints laid out in the Instructions:","typeOptions":{"rows":10},"hint":"Should include \"{error}\", \"{instructions}\", and \"{completion}\" placeholders","description":"Prompt template used for fixing the output. Uses placeholders: \"{instructions}\" for parsing rules, \"{completion}\" for the failed attempt, and \"{error}\" for the validation error message."}],"hints":[{"message":"Fields that use $refs might have the wrong type, since this syntax is not currently supported","type":"warning","location":"outputPane","whenToDisplay":"afterExecution","displayCondition":"={{ $parameter[\"schemaType\"] === \"manual\" && $parameter[\"inputSchema\"]?.includes(\"$ref\") }}"}]},
|
|
@@ -95,9 +95,9 @@
|
|
|
95
95
|
{"displayName":"Supabase: Insert","hidden":true,"name":"vectorStoreSupabaseInsert","group":["transform"],"version":1,"description":"Insert data into Supabase Vector Store index [https://supabase.com/docs/guides/ai/langchain]","defaults":{"name":"Supabase: Insert"},"codex":{"categories":["AI"],"subcategories":{"AI":["Vector Stores"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstoresupabase/"}]}},"credentials":[{"name":"supabaseApi","required":true}],"inputs":["main",{"displayName":"Document","maxConnections":1,"type":"ai_document","required":true},{"displayName":"Embedding","maxConnections":1,"type":"ai_embedding","required":true}],"outputs":["main"],"properties":[{"displayName":"Please refer to the <a href=\"https://supabase.com/docs/guides/ai/langchain\" target=\"_blank\">Supabase documentation</a> for more information on how to setup your database as a Vector Store.","name":"setupNotice","type":"notice","default":""},{"displayName":"Table Name","name":"tableName","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"supabaseTableNameSearch"}},{"displayName":"ID","name":"id","type":"string"}]},{"displayName":"Query Name","name":"queryName","type":"string","default":"match_documents","required":true,"description":"Name of the query to use for matching documents"},{"displayName":"Specify the document to load in the document loader sub-node","name":"notice","type":"notice","default":""}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vector_store/VectorStoreSupabaseInsert/supabase.svg"},
|
|
96
96
|
{"displayName":"Supabase: Load","name":"vectorStoreSupabaseLoad","hidden":true,"group":["transform"],"version":1,"description":"Load data from Supabase Vector Store index","defaults":{"name":"Supabase: Load"},"codex":{"categories":["AI"],"subcategories":{"AI":["Vector Stores"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstoresupabase/"}]}},"credentials":[{"name":"supabaseApi","required":true}],"inputs":[{"displayName":"Embedding","maxConnections":1,"type":"ai_embedding","required":true}],"outputs":["ai_vectorStore"],"outputNames":["Vector Store"],"properties":[{"displayName":"Table Name","name":"tableName","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"supabaseTableNameSearch"}},{"displayName":"ID","name":"id","type":"string"}]},{"displayName":"Query Name","name":"queryName","type":"string","default":"match_documents","required":true,"description":"Name of the query to use for matching documents"},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Metadata Filter","name":"metadata","type":"fixedCollection","description":"Metadata to filter the document by","typeOptions":{"multipleValues":true},"default":{},"placeholder":"Add filter field","options":[{"name":"metadataValues","displayName":"Fields to Set","values":[{"displayName":"Name","name":"name","type":"string","default":"","required":true},{"displayName":"Value","name":"value","type":"string","default":""}]}]}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vector_store/VectorStoreSupabaseLoad/supabase.svg"},
|
|
97
97
|
{"displayName":"Weaviate Vector Store","name":"vectorStoreWeaviate","description":"Work with your data in a Weaviate Cluster","group":["transform"],"version":[1,1.1,1.2,1.3],"defaults":{"name":"Weaviate Vector Store"},"codex":{"categories":["AI"],"subcategories":{"AI":["Vector Stores","Tools","Root Nodes"],"Vector Stores":["Other Vector Stores"],"Tools":["Other Tools"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstoreweaviate/"}]}},"credentials":[{"name":"weaviateApi","required":true}],"inputs":"={{\n\t\t\t((parameters) => {\n\t\t\t\tconst mode = parameters?.mode;\n\t\t\t\tconst useReranker = parameters?.useReranker;\n\t\t\t\tconst inputs = [{ displayName: \"Embedding\", type: \"ai_embedding\", required: true, maxConnections: 1}]\n\n\t\t\t\tif (['load', 'retrieve', 'retrieve-as-tool'].includes(mode) && useReranker) {\n\t\t\t\t\tinputs.push({ displayName: \"Reranker\", type: \"ai_reranker\", required: true, maxConnections: 1})\n\t\t\t\t}\n\n\t\t\t\tif (mode === 'retrieve-as-tool') {\n\t\t\t\t\treturn inputs;\n\t\t\t\t}\n\n\t\t\t\tif (['insert', 'load', 'update'].includes(mode)) {\n\t\t\t\t\tinputs.push({ displayName: \"\", type: \"main\"})\n\t\t\t\t}\n\n\t\t\t\tif (['insert'].includes(mode)) {\n\t\t\t\t\tinputs.push({ displayName: \"Document\", type: \"ai_document\", required: true, maxConnections: 1})\n\t\t\t\t}\n\t\t\t\treturn inputs\n\t\t\t})($parameter)\n\t\t}}","outputs":"={{\n\t\t\t((parameters) => {\n\t\t\t\tconst mode = parameters?.mode ?? 'retrieve';\n\n\t\t\t\tif (mode === 'retrieve-as-tool') {\n\t\t\t\t\treturn [{ displayName: \"Tool\", type: \"ai_tool\"}]\n\t\t\t\t}\n\n\t\t\t\tif (mode === 'retrieve') {\n\t\t\t\t\treturn [{ displayName: \"Vector Store\", type: \"ai_vectorStore\"}]\n\t\t\t\t}\n\t\t\t\treturn [{ displayName: \"\", type: \"main\"}]\n\t\t\t})($parameter)\n\t\t}}","properties":[{"displayName":"Tip: Get a feel for vector stores in n8n with our","name":"ragStarterCallout","type":"callout","typeOptions":{"calloutAction":{"label":"RAG starter template","type":"openSampleWorkflowTemplate","templateId":"rag-starter-template"}},"default":""},{"displayName":"Operation Mode","name":"mode","type":"options","noDataExpression":true,"default":"retrieve","options":[{"name":"Get Many","value":"load","description":"Get many ranked documents from vector store for query","action":"Get ranked documents from vector store"},{"name":"Insert Documents","value":"insert","description":"Insert documents into vector store","action":"Add documents to vector store"},{"name":"Retrieve Documents (As Vector Store for Chain/Tool)","value":"retrieve","description":"Retrieve documents from vector store to be used as vector store with AI nodes","action":"Retrieve documents for Chain/Tool as Vector Store","outputConnectionType":"ai_vectorStore"},{"name":"Retrieve Documents (As Tool for AI Agent)","value":"retrieve-as-tool","description":"Retrieve documents from vector store to be used as tool with AI nodes","action":"Retrieve documents for AI Agent as Tool","outputConnectionType":"ai_tool"}]},{"displayName":"This node must be connected to a vector store retriever. <a data-action='openSelectiveNodeCreator' data-action-parameter-connectiontype='ai_retriever'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"},"displayOptions":{"show":{"mode":["retrieve"]}}},{"displayName":"Name","name":"toolName","type":"string","default":"","required":true,"description":"Name of the vector store","placeholder":"e.g. company_knowledge_base","validateType":"string-alphanumeric","displayOptions":{"show":{"@version":[{"_cnd":{"lte":1.2}}],"mode":["retrieve-as-tool"]}}},{"displayName":"Description","name":"toolDescription","type":"string","default":"","required":true,"typeOptions":{"rows":2},"description":"Explain to the LLM what this tool does, a good, specific description would allow LLMs to produce expected results much more often","placeholder":"e.g. Work with your data in a Weaviate Cluster","displayOptions":{"show":{"mode":["retrieve-as-tool"]}}},{"displayName":"Weaviate Collection","name":"weaviateCollection","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"weaviateCollectionsSearch"}},{"displayName":"ID","name":"id","type":"string"}]},{"displayName":"Embedding Batch Size","name":"embeddingBatchSize","type":"number","default":200,"description":"Number of documents to embed in a single batch","displayOptions":{"show":{"mode":["insert"],"@version":[{"_cnd":{"gte":1.1}}]}}},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Tenant Name","name":"tenant","type":"string","validateType":"string","description":"Tenant Name. Collection must have been created with tenant support enabled."},{"displayName":"Text Key","name":"textKey","type":"string","default":"text","validateType":"string","description":"The key in the document that contains the embedded text"},{"displayName":"Skip Init Checks","name":"skip_init_checks","type":"boolean","default":false,"validateType":"boolean","description":"Whether to skip init checks while instantiating the client"},{"displayName":"Init Timeout","name":"timeout_init","type":"number","default":2,"validateType":"number","description":"Number of timeout seconds for initial checks"},{"displayName":"Insert Timeout","name":"timeout_insert","type":"number","default":90,"validateType":"number","description":"Number of timeout seconds for inserts"},{"displayName":"Query Timeout","name":"timeout_query","type":"number","default":30,"validateType":"number","description":"Number of timeout seconds for queries"},{"displayName":"GRPC Proxy","name":"proxy_grpc","type":"string","validateType":"string","description":"Proxy to use for GRPC"},{"displayName":"Clear Data","name":"clearStore","type":"boolean","default":false,"description":"Whether to clear the Collection/Tenant before inserting new data"}],"displayOptions":{"show":{"mode":["insert"]}}},{"displayName":"Prompt","name":"prompt","type":"string","default":"","required":true,"description":"Search prompt to retrieve matching documents from the vector store using similarity-based ranking","displayOptions":{"show":{"mode":["load"]}}},{"displayName":"Limit","name":"topK","type":"number","default":4,"description":"Number of top results to fetch from vector store","displayOptions":{"show":{"mode":["load","retrieve-as-tool"]}}},{"displayName":"Include Metadata","name":"includeDocumentMetadata","type":"boolean","default":true,"description":"Whether or not to include document metadata","displayOptions":{"show":{"mode":["load","retrieve-as-tool"]}}},{"displayName":"Rerank Results","name":"useReranker","type":"boolean","default":false,"description":"Whether or not to rerank results","displayOptions":{"show":{"mode":["load","retrieve","retrieve-as-tool"]}}},{"displayName":"ID","name":"id","type":"string","default":"","required":true,"description":"ID of an embedding entry","displayOptions":{"show":{"mode":["update"]}}},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Search Filters","name":"searchFilterJson","type":"json","typeOptions":{"rows":5},"default":"{\n \"OR\": [\n {\n \"path\": [\"pdf_info_Author\"],\n \"operator\": \"Equal\",\n \"valueString\": \"Elis\"\n },\n {\n \"path\": [\"pdf_info_Author\"],\n \"operator\": \"Equal\",\n \"valueString\": \"Pinnacle\"\n } \n ]\n}","validateType":"object","description":"Filter pageContent or metadata using this <a href=\"https://weaviate.io/\" target=\"_blank\">filtering syntax</a>"},{"displayName":"Metadata Keys","name":"metadataKeys","type":"string","default":"source,page","validateType":"string","description":"Select the metadata to retrieve along the content"},{"displayName":"Tenant Name","name":"tenant","type":"string","validateType":"string","description":"Tenant Name. Collection must have been created with tenant support enabled."},{"displayName":"Text Key","name":"textKey","type":"string","default":"text","validateType":"string","description":"The key in the document that contains the embedded text"},{"displayName":"Skip Init Checks","name":"skip_init_checks","type":"boolean","default":false,"validateType":"boolean","description":"Whether to skip init checks while instantiating the client"},{"displayName":"Init Timeout","name":"timeout_init","type":"number","default":2,"validateType":"number","description":"Number of timeout seconds for initial checks"},{"displayName":"Insert Timeout","name":"timeout_insert","type":"number","default":90,"validateType":"number","description":"Number of timeout seconds for inserts"},{"displayName":"Query Timeout","name":"timeout_query","type":"number","default":30,"validateType":"number","description":"Number of timeout seconds for queries"},{"displayName":"GRPC Proxy","name":"proxy_grpc","type":"string","validateType":"string","description":"Proxy to use for GRPC"}],"displayOptions":{"show":{"mode":["load","retrieve-as-tool"]}}},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Search Filters","name":"searchFilterJson","type":"json","typeOptions":{"rows":5},"default":"{\n \"OR\": [\n {\n \"path\": [\"pdf_info_Author\"],\n \"operator\": \"Equal\",\n \"valueString\": \"Elis\"\n },\n {\n \"path\": [\"pdf_info_Author\"],\n \"operator\": \"Equal\",\n \"valueString\": \"Pinnacle\"\n } \n ]\n}","validateType":"object","description":"Filter pageContent or metadata using this <a href=\"https://weaviate.io/\" target=\"_blank\">filtering syntax</a>"},{"displayName":"Metadata Keys","name":"metadataKeys","type":"string","default":"source,page","validateType":"string","description":"Select the metadata to retrieve along the content"},{"displayName":"Tenant Name","name":"tenant","type":"string","validateType":"string","description":"Tenant Name. Collection must have been created with tenant support enabled."},{"displayName":"Text Key","name":"textKey","type":"string","default":"text","validateType":"string","description":"The key in the document that contains the embedded text"},{"displayName":"Skip Init Checks","name":"skip_init_checks","type":"boolean","default":false,"validateType":"boolean","description":"Whether to skip init checks while instantiating the client"},{"displayName":"Init Timeout","name":"timeout_init","type":"number","default":2,"validateType":"number","description":"Number of timeout seconds for initial checks"},{"displayName":"Insert Timeout","name":"timeout_insert","type":"number","default":90,"validateType":"number","description":"Number of timeout seconds for inserts"},{"displayName":"Query Timeout","name":"timeout_query","type":"number","default":30,"validateType":"number","description":"Number of timeout seconds for queries"},{"displayName":"GRPC Proxy","name":"proxy_grpc","type":"string","validateType":"string","description":"Proxy to use for GRPC"}],"displayOptions":{"show":{"mode":["retrieve"]}}}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vector_store/VectorStoreWeaviate/weaviate.svg"},
|
|
98
|
-
{"displayName":"Zep Vector Store","name":"vectorStoreZep","description":"Work with your data in Zep Vector Store","group":["transform"],"version":[1,1.1,1.2,1.3],"defaults":{"name":"Zep Vector Store"},"codex":{"categories":["AI"],"subcategories":{"AI":["Vector Stores","Tools","Root Nodes"],"Vector Stores":["Other Vector Stores"],"Tools":["Other Tools"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstorezep/"}]}},"credentials":[{"name":"zepApi","required":true}],"inputs":"={{\n\t\t\t((parameters) => {\n\t\t\t\tconst mode = parameters?.mode;\n\t\t\t\tconst useReranker = parameters?.useReranker;\n\t\t\t\tconst inputs = [{ displayName: \"Embedding\", type: \"ai_embedding\", required: true, maxConnections: 1}]\n\n\t\t\t\tif (['load', 'retrieve', 'retrieve-as-tool'].includes(mode) && useReranker) {\n\t\t\t\t\tinputs.push({ displayName: \"Reranker\", type: \"ai_reranker\", required: true, maxConnections: 1})\n\t\t\t\t}\n\n\t\t\t\tif (mode === 'retrieve-as-tool') {\n\t\t\t\t\treturn inputs;\n\t\t\t\t}\n\n\t\t\t\tif (['insert', 'load', 'update'].includes(mode)) {\n\t\t\t\t\tinputs.push({ displayName: \"\", type: \"main\"})\n\t\t\t\t}\n\n\t\t\t\tif (['insert'].includes(mode)) {\n\t\t\t\t\tinputs.push({ displayName: \"Document\", type: \"ai_document\", required: true, maxConnections: 1})\n\t\t\t\t}\n\t\t\t\treturn inputs\n\t\t\t})($parameter)\n\t\t}}","outputs":"={{\n\t\t\t((parameters) => {\n\t\t\t\tconst mode = parameters?.mode ?? 'retrieve';\n\n\t\t\t\tif (mode === 'retrieve-as-tool') {\n\t\t\t\t\treturn [{ displayName: \"Tool\", type: \"ai_tool\"}]\n\t\t\t\t}\n\n\t\t\t\tif (mode === 'retrieve') {\n\t\t\t\t\treturn [{ displayName: \"Vector Store\", type: \"ai_vectorStore\"}]\n\t\t\t\t}\n\t\t\t\treturn [{ displayName: \"\", type: \"main\"}]\n\t\t\t})($parameter)\n\t\t}}","properties":[{"displayName":"Tip: Get a feel for vector stores in n8n with our","name":"ragStarterCallout","type":"callout","typeOptions":{"calloutAction":{"label":"RAG starter template","type":"openSampleWorkflowTemplate","templateId":"rag-starter-template"}},"default":""},{"displayName":"Operation Mode","name":"mode","type":"options","noDataExpression":true,"default":"retrieve","options":[{"name":"Get Many","value":"load","description":"Get many ranked documents from vector store for query","action":"Get ranked documents from vector store"},{"name":"Insert Documents","value":"insert","description":"Insert documents into vector store","action":"Add documents to vector store"},{"name":"Retrieve Documents (As Vector Store for Chain/Tool)","value":"retrieve","description":"Retrieve documents from vector store to be used as vector store with AI nodes","action":"Retrieve documents for Chain/Tool as Vector Store","outputConnectionType":"ai_vectorStore"},{"name":"Retrieve Documents (As Tool for AI Agent)","value":"retrieve-as-tool","description":"Retrieve documents from vector store to be used as tool with AI nodes","action":"Retrieve documents for AI Agent as Tool","outputConnectionType":"ai_tool"}]},{"displayName":"This node must be connected to a vector store retriever. <a data-action='openSelectiveNodeCreator' data-action-parameter-connectiontype='ai_retriever'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"},"displayOptions":{"show":{"mode":["retrieve"]}}},{"displayName":"Name","name":"toolName","type":"string","default":"","required":true,"description":"Name of the vector store","placeholder":"e.g. company_knowledge_base","validateType":"string-alphanumeric","displayOptions":{"show":{"@version":[{"_cnd":{"lte":1.2}}],"mode":["retrieve-as-tool"]}}},{"displayName":"Description","name":"toolDescription","type":"string","default":"","required":true,"typeOptions":{"rows":2},"description":"Explain to the LLM what this tool does, a good, specific description would allow LLMs to produce expected results much more often","placeholder":"e.g. Work with your data in Zep Vector Store","displayOptions":{"show":{"mode":["retrieve-as-tool"]}}},{"displayName":"Collection Name","name":"collectionName","type":"string","default":"","required":true},{"displayName":"Embedding Batch Size","name":"embeddingBatchSize","type":"number","default":200,"description":"Number of documents to embed in a single batch","displayOptions":{"show":{"mode":["insert"],"@version":[{"_cnd":{"gte":1.1}}]}}},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Embedding Dimensions","name":"embeddingDimensions","type":"number","default":1536,"description":"Whether to allow using characters from the Unicode surrogate blocks"},{"displayName":"Is Auto Embedded","name":"isAutoEmbedded","type":"boolean","default":true,"description":"Whether to automatically embed documents when they are added"}],"displayOptions":{"show":{"mode":["insert"]}}},{"displayName":"Prompt","name":"prompt","type":"string","default":"","required":true,"description":"Search prompt to retrieve matching documents from the vector store using similarity-based ranking","displayOptions":{"show":{"mode":["load"]}}},{"displayName":"Limit","name":"topK","type":"number","default":4,"description":"Number of top results to fetch from vector store","displayOptions":{"show":{"mode":["load","retrieve-as-tool"]}}},{"displayName":"Include Metadata","name":"includeDocumentMetadata","type":"boolean","default":true,"description":"Whether or not to include document metadata","displayOptions":{"show":{"mode":["load","retrieve-as-tool"]}}},{"displayName":"Rerank Results","name":"useReranker","type":"boolean","default":false,"description":"Whether or not to rerank results","displayOptions":{"show":{"mode":["load","retrieve","retrieve-as-tool"]}}},{"displayName":"ID","name":"id","type":"string","default":"","required":true,"description":"ID of an embedding entry","displayOptions":{"show":{"mode":["update"]}}},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Embedding Dimensions","name":"embeddingDimensions","type":"number","default":1536,"description":"Whether to allow using characters from the Unicode surrogate blocks"},{"displayName":"Metadata Filter","name":"metadata","type":"fixedCollection","description":"Metadata to filter the document by","typeOptions":{"multipleValues":true},"default":{},"placeholder":"Add filter field","options":[{"name":"metadataValues","displayName":"Fields to Set","values":[{"displayName":"Name","name":"name","type":"string","default":"","required":true},{"displayName":"Value","name":"value","type":"string","default":""}]}]}],"displayOptions":{"show":{"mode":["load","retrieve-as-tool"]}}},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Embedding Dimensions","name":"embeddingDimensions","type":"number","default":1536,"description":"Whether to allow using characters from the Unicode surrogate blocks"},{"displayName":"Metadata Filter","name":"metadata","type":"fixedCollection","description":"Metadata to filter the document by","typeOptions":{"multipleValues":true},"default":{},"placeholder":"Add filter field","options":[{"name":"metadataValues","displayName":"Fields to Set","values":[{"displayName":"Name","name":"name","type":"string","default":"","required":true},{"displayName":"Value","name":"value","type":"string","default":""}]}]}],"displayOptions":{"show":{"mode":["retrieve"]}}}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vector_store/VectorStoreZep/zep.png"},
|
|
99
|
-
{"displayName":"Zep Vector Store: Insert","name":"vectorStoreZepInsert","hidden":true,"group":["transform"],"version":1,"description":"Insert data into Zep Vector Store index","defaults":{"name":"Zep: Insert"},"codex":{"categories":["AI"],"subcategories":{"AI":["Vector Stores"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstorezep/"}]}},"credentials":[{"name":"zepApi","required":true}],"inputs":["main",{"displayName":"Document","maxConnections":1,"type":"ai_document","required":true},{"displayName":"Embedding","maxConnections":1,"type":"ai_embedding","required":true}],"outputs":["main"],"properties":[{"displayName":"Collection Name","name":"collectionName","type":"string","default":"","required":true},{"displayName":"Specify the document to load in the document loader sub-node","name":"notice","type":"notice","default":""},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Embedding Dimensions","name":"embeddingDimensions","type":"number","default":1536,"description":"Whether to allow using characters from the Unicode surrogate blocks"},{"displayName":"Is Auto Embedded","name":"isAutoEmbedded","type":"boolean","default":true,"description":"Whether to automatically embed documents when they are added"}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vector_store/VectorStoreZepInsert/zep.png"},
|
|
100
|
-
{"displayName":"Zep Vector Store: Load","name":"vectorStoreZepLoad","hidden":true,"group":["transform"],"version":1,"description":"Load data from Zep Vector Store index","defaults":{"name":"Zep: Load"},"codex":{"categories":["AI"],"subcategories":{"AI":["Vector Stores"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstorezep/"}]}},"credentials":[{"name":"zepApi","required":true}],"inputs":[{"displayName":"Embedding","maxConnections":1,"type":"ai_embedding","required":true}],"outputs":["ai_vectorStore"],"outputNames":["Vector Store"],"properties":[{"displayName":"Collection Name","name":"collectionName","type":"string","default":"","required":true},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Embedding Dimensions","name":"embeddingDimensions","type":"number","default":1536,"description":"Whether to allow using characters from the Unicode surrogate blocks"},{"displayName":"Metadata Filter","name":"metadata","type":"fixedCollection","description":"Metadata to filter the document by","typeOptions":{"multipleValues":true},"default":{},"placeholder":"Add filter field","options":[{"name":"metadataValues","displayName":"Fields to Set","values":[{"displayName":"Name","name":"name","type":"string","default":"","required":true},{"displayName":"Value","name":"value","type":"string","default":""}]}]}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vector_store/VectorStoreZepLoad/zep.png"},
|
|
98
|
+
{"displayName":"Zep Vector Store","name":"vectorStoreZep","description":"Work with your data in Zep Vector Store","group":["transform"],"version":[1,1.1,1.2,1.3],"defaults":{"name":"Zep Vector Store"},"codex":{"categories":["AI"],"subcategories":{"AI":["Vector Stores","Tools","Root Nodes"],"Vector Stores":["Other Vector Stores"],"Tools":["Other Tools"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstorezep/"}]}},"credentials":[{"name":"zepApi","required":true}],"inputs":"={{\n\t\t\t((parameters) => {\n\t\t\t\tconst mode = parameters?.mode;\n\t\t\t\tconst useReranker = parameters?.useReranker;\n\t\t\t\tconst inputs = [{ displayName: \"Embedding\", type: \"ai_embedding\", required: true, maxConnections: 1}]\n\n\t\t\t\tif (['load', 'retrieve', 'retrieve-as-tool'].includes(mode) && useReranker) {\n\t\t\t\t\tinputs.push({ displayName: \"Reranker\", type: \"ai_reranker\", required: true, maxConnections: 1})\n\t\t\t\t}\n\n\t\t\t\tif (mode === 'retrieve-as-tool') {\n\t\t\t\t\treturn inputs;\n\t\t\t\t}\n\n\t\t\t\tif (['insert', 'load', 'update'].includes(mode)) {\n\t\t\t\t\tinputs.push({ displayName: \"\", type: \"main\"})\n\t\t\t\t}\n\n\t\t\t\tif (['insert'].includes(mode)) {\n\t\t\t\t\tinputs.push({ displayName: \"Document\", type: \"ai_document\", required: true, maxConnections: 1})\n\t\t\t\t}\n\t\t\t\treturn inputs\n\t\t\t})($parameter)\n\t\t}}","outputs":"={{\n\t\t\t((parameters) => {\n\t\t\t\tconst mode = parameters?.mode ?? 'retrieve';\n\n\t\t\t\tif (mode === 'retrieve-as-tool') {\n\t\t\t\t\treturn [{ displayName: \"Tool\", type: \"ai_tool\"}]\n\t\t\t\t}\n\n\t\t\t\tif (mode === 'retrieve') {\n\t\t\t\t\treturn [{ displayName: \"Vector Store\", type: \"ai_vectorStore\"}]\n\t\t\t\t}\n\t\t\t\treturn [{ displayName: \"\", type: \"main\"}]\n\t\t\t})($parameter)\n\t\t}}","properties":[{"displayName":"Tip: Get a feel for vector stores in n8n with our","name":"ragStarterCallout","type":"callout","typeOptions":{"calloutAction":{"label":"RAG starter template","type":"openSampleWorkflowTemplate","templateId":"rag-starter-template"}},"default":""},{"displayName":"Operation Mode","name":"mode","type":"options","noDataExpression":true,"default":"retrieve","options":[{"name":"Get Many","value":"load","description":"Get many ranked documents from vector store for query","action":"Get ranked documents from vector store"},{"name":"Insert Documents","value":"insert","description":"Insert documents into vector store","action":"Add documents to vector store"},{"name":"Retrieve Documents (As Vector Store for Chain/Tool)","value":"retrieve","description":"Retrieve documents from vector store to be used as vector store with AI nodes","action":"Retrieve documents for Chain/Tool as Vector Store","outputConnectionType":"ai_vectorStore"},{"name":"Retrieve Documents (As Tool for AI Agent)","value":"retrieve-as-tool","description":"Retrieve documents from vector store to be used as tool with AI nodes","action":"Retrieve documents for AI Agent as Tool","outputConnectionType":"ai_tool"}]},{"displayName":"This node must be connected to a vector store retriever. <a data-action='openSelectiveNodeCreator' data-action-parameter-connectiontype='ai_retriever'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"},"displayOptions":{"show":{"mode":["retrieve"]}}},{"displayName":"Name","name":"toolName","type":"string","default":"","required":true,"description":"Name of the vector store","placeholder":"e.g. company_knowledge_base","validateType":"string-alphanumeric","displayOptions":{"show":{"@version":[{"_cnd":{"lte":1.2}}],"mode":["retrieve-as-tool"]}}},{"displayName":"Description","name":"toolDescription","type":"string","default":"","required":true,"typeOptions":{"rows":2},"description":"Explain to the LLM what this tool does, a good, specific description would allow LLMs to produce expected results much more often","placeholder":"e.g. Work with your data in Zep Vector Store","displayOptions":{"show":{"mode":["retrieve-as-tool"]}}},{"displayName":"This Zep integration is deprecated and will be removed in a future version.","name":"deprecationNotice","type":"notice","default":""},{"displayName":"Collection Name","name":"collectionName","type":"string","default":"","required":true},{"displayName":"Embedding Batch Size","name":"embeddingBatchSize","type":"number","default":200,"description":"Number of documents to embed in a single batch","displayOptions":{"show":{"mode":["insert"],"@version":[{"_cnd":{"gte":1.1}}]}}},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Embedding Dimensions","name":"embeddingDimensions","type":"number","default":1536,"description":"Whether to allow using characters from the Unicode surrogate blocks"},{"displayName":"Is Auto Embedded","name":"isAutoEmbedded","type":"boolean","default":true,"description":"Whether to automatically embed documents when they are added"}],"displayOptions":{"show":{"mode":["insert"]}}},{"displayName":"Prompt","name":"prompt","type":"string","default":"","required":true,"description":"Search prompt to retrieve matching documents from the vector store using similarity-based ranking","displayOptions":{"show":{"mode":["load"]}}},{"displayName":"Limit","name":"topK","type":"number","default":4,"description":"Number of top results to fetch from vector store","displayOptions":{"show":{"mode":["load","retrieve-as-tool"]}}},{"displayName":"Include Metadata","name":"includeDocumentMetadata","type":"boolean","default":true,"description":"Whether or not to include document metadata","displayOptions":{"show":{"mode":["load","retrieve-as-tool"]}}},{"displayName":"Rerank Results","name":"useReranker","type":"boolean","default":false,"description":"Whether or not to rerank results","displayOptions":{"show":{"mode":["load","retrieve","retrieve-as-tool"]}}},{"displayName":"ID","name":"id","type":"string","default":"","required":true,"description":"ID of an embedding entry","displayOptions":{"show":{"mode":["update"]}}},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Embedding Dimensions","name":"embeddingDimensions","type":"number","default":1536,"description":"Whether to allow using characters from the Unicode surrogate blocks"},{"displayName":"Metadata Filter","name":"metadata","type":"fixedCollection","description":"Metadata to filter the document by","typeOptions":{"multipleValues":true},"default":{},"placeholder":"Add filter field","options":[{"name":"metadataValues","displayName":"Fields to Set","values":[{"displayName":"Name","name":"name","type":"string","default":"","required":true},{"displayName":"Value","name":"value","type":"string","default":""}]}]}],"displayOptions":{"show":{"mode":["load","retrieve-as-tool"]}}},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Embedding Dimensions","name":"embeddingDimensions","type":"number","default":1536,"description":"Whether to allow using characters from the Unicode surrogate blocks"},{"displayName":"Metadata Filter","name":"metadata","type":"fixedCollection","description":"Metadata to filter the document by","typeOptions":{"multipleValues":true},"default":{},"placeholder":"Add filter field","options":[{"name":"metadataValues","displayName":"Fields to Set","values":[{"displayName":"Name","name":"name","type":"string","default":"","required":true},{"displayName":"Value","name":"value","type":"string","default":""}]}]}],"displayOptions":{"show":{"mode":["retrieve"]}}}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vector_store/VectorStoreZep/zep.png"},
|
|
99
|
+
{"displayName":"Zep Vector Store: Insert","name":"vectorStoreZepInsert","hidden":true,"group":["transform"],"version":1,"description":"Insert data into Zep Vector Store index","defaults":{"name":"Zep: Insert"},"codex":{"categories":["AI"],"subcategories":{"AI":["Vector Stores"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstorezep/"}]}},"credentials":[{"name":"zepApi","required":true}],"inputs":["main",{"displayName":"Document","maxConnections":1,"type":"ai_document","required":true},{"displayName":"Embedding","maxConnections":1,"type":"ai_embedding","required":true}],"outputs":["main"],"properties":[{"displayName":"This Zep integration is deprecated and will be removed in a future version.","name":"deprecationNotice","type":"notice","default":""},{"displayName":"Collection Name","name":"collectionName","type":"string","default":"","required":true},{"displayName":"Specify the document to load in the document loader sub-node","name":"notice","type":"notice","default":""},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Embedding Dimensions","name":"embeddingDimensions","type":"number","default":1536,"description":"Whether to allow using characters from the Unicode surrogate blocks"},{"displayName":"Is Auto Embedded","name":"isAutoEmbedded","type":"boolean","default":true,"description":"Whether to automatically embed documents when they are added"}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vector_store/VectorStoreZepInsert/zep.png"},
|
|
100
|
+
{"displayName":"Zep Vector Store: Load","name":"vectorStoreZepLoad","hidden":true,"group":["transform"],"version":1,"description":"Load data from Zep Vector Store index","defaults":{"name":"Zep: Load"},"codex":{"categories":["AI"],"subcategories":{"AI":["Vector Stores"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstorezep/"}]}},"credentials":[{"name":"zepApi","required":true}],"inputs":[{"displayName":"Embedding","maxConnections":1,"type":"ai_embedding","required":true}],"outputs":["ai_vectorStore"],"outputNames":["Vector Store"],"properties":[{"displayName":"This Zep integration is deprecated and will be removed in a future version.","name":"deprecationNotice","type":"notice","default":""},{"displayName":"Collection Name","name":"collectionName","type":"string","default":"","required":true},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Embedding Dimensions","name":"embeddingDimensions","type":"number","default":1536,"description":"Whether to allow using characters from the Unicode surrogate blocks"},{"displayName":"Metadata Filter","name":"metadata","type":"fixedCollection","description":"Metadata to filter the document by","typeOptions":{"multipleValues":true},"default":{},"placeholder":"Add filter field","options":[{"name":"metadataValues","displayName":"Fields to Set","values":[{"displayName":"Name","name":"name","type":"string","default":"","required":true},{"displayName":"Value","name":"value","type":"string","default":""}]}]}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vector_store/VectorStoreZepLoad/zep.png"},
|
|
101
101
|
{"displayName":"Tool Executor","name":"toolExecutor","version":1,"defaults":{"name":"Tool Executor"},"hidden":true,"inputs":["main","ai_tool"],"outputs":["main"],"properties":[{"displayName":"Query","name":"query","type":"json","default":"{}","description":"Parameters to pass to the tool as JSON or string"},{"displayName":"Tool Name","name":"toolName","type":"string","default":"","description":"Name of the tool to execute if the connected tool is a toolkit"}],"group":["transform"],"description":"Node to execute tools without an AI Agent","codex":{"categories":["Core Nodes"],"subcategories":{"Core Nodes":["Helpers"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-base.editimage/"}]}}},
|
|
102
102
|
{"displayName":"Model Selector","name":"modelSelector","icon":"fa:map-signs","iconColor":"green","defaults":{"name":"Model Selector"},"version":1,"group":["transform"],"description":"Use this node to select one of the connected models to this node based on workflow data","inputs":"={{\n\t\t\t\t((parameters) => {\n\t\t\t\t\tfunction configuredInputs(parameters) {\n return Array.from({ length: parameters.numberInputs || 2 }, (_, i) => ({\n type: \"ai_languageModel\",\n displayName: `Model ${(i + 1).toString()}`,\n required: true,\n maxConnections: 1\n }));\n};\n\t\t\t\t\treturn configuredInputs(parameters)\n\t\t\t\t})($parameter)\n\t\t\t}}","codex":{"categories":["AI"],"subcategories":{"AI":["Language Models"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.modelselector/"}]}},"outputs":["ai_languageModel"],"requiredInputs":1,"properties":[{"displayName":"Number of Inputs","name":"numberInputs","type":"options","noDataExpression":true,"default":2,"options":[{"name":"2","value":2},{"name":"3","value":3},{"name":"4","value":4},{"name":"5","value":5},{"name":"6","value":6},{"name":"7","value":7},{"name":"8","value":8},{"name":"9","value":9},{"name":"10","value":10}],"validateType":"number","description":"The number of data inputs you want to merge. The node waits for all connected inputs to be executed."},{"displayName":"Rules","name":"rules","placeholder":"Add Rule","type":"fixedCollection","typeOptions":{"multipleValues":true,"sortable":true},"description":"Rules to map workflow data to specific models","default":{},"options":[{"displayName":"Rule","name":"rule","values":[{"displayName":"Model","name":"modelIndex","type":"options","description":"Choose model input from the list","default":1,"required":true,"placeholder":"Choose model input from the list","typeOptions":{"loadOptionsMethod":"getModels"}},{"displayName":"Conditions","name":"conditions","placeholder":"Add Condition","type":"filter","default":{},"typeOptions":{"filter":{"caseSensitive":true,"typeValidation":"strict","version":2}},"description":"Conditions that must be met to select this model"}]}]}]}
|
|
103
103
|
]
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@n8n/n8n-nodes-langchain",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.107.0",
|
|
4
4
|
"description": "",
|
|
5
5
|
"main": "index.js",
|
|
6
6
|
"files": [
|
|
@@ -148,7 +148,7 @@
|
|
|
148
148
|
"fast-glob": "3.2.12",
|
|
149
149
|
"jest-mock-extended": "^3.0.4",
|
|
150
150
|
"tsup": "^8.5.0",
|
|
151
|
-
"n8n-core": "1.
|
|
151
|
+
"n8n-core": "1.107.0"
|
|
152
152
|
},
|
|
153
153
|
"dependencies": {
|
|
154
154
|
"@aws-sdk/client-sso-oidc": "3.808.0",
|
|
@@ -159,18 +159,18 @@
|
|
|
159
159
|
"@google-cloud/resource-manager": "5.3.0",
|
|
160
160
|
"@google/generative-ai": "0.21.0",
|
|
161
161
|
"@huggingface/inference": "4.0.5",
|
|
162
|
-
"@langchain/anthropic": "0.3.
|
|
162
|
+
"@langchain/anthropic": "0.3.26",
|
|
163
163
|
"@langchain/aws": "0.1.11",
|
|
164
164
|
"@langchain/cohere": "0.3.4",
|
|
165
|
-
"@langchain/community": "0.3.
|
|
166
|
-
"@langchain/core": "0.3.
|
|
165
|
+
"@langchain/community": "0.3.50",
|
|
166
|
+
"@langchain/core": "0.3.68",
|
|
167
167
|
"@langchain/google-genai": "0.2.13",
|
|
168
168
|
"@langchain/google-vertexai": "0.2.13",
|
|
169
169
|
"@langchain/groq": "0.2.3",
|
|
170
170
|
"@langchain/mistralai": "0.2.1",
|
|
171
171
|
"@langchain/mongodb": "^0.1.0",
|
|
172
172
|
"@langchain/ollama": "0.2.3",
|
|
173
|
-
"@langchain/openai": "0.
|
|
173
|
+
"@langchain/openai": "0.6.7",
|
|
174
174
|
"@langchain/pinecone": "0.2.0",
|
|
175
175
|
"@langchain/qdrant": "0.1.2",
|
|
176
176
|
"@langchain/redis": "0.1.1",
|
|
@@ -197,12 +197,12 @@
|
|
|
197
197
|
"ignore": "^5.2.0",
|
|
198
198
|
"js-tiktoken": "^1.0.12",
|
|
199
199
|
"jsdom": "23.0.1",
|
|
200
|
-
"langchain": "0.3.
|
|
200
|
+
"langchain": "0.3.30",
|
|
201
201
|
"lodash": "4.17.21",
|
|
202
202
|
"mammoth": "1.7.2",
|
|
203
203
|
"mime-types": "2.1.35",
|
|
204
204
|
"mongodb": "6.11.0",
|
|
205
|
-
"openai": "5.
|
|
205
|
+
"openai": "5.12.2",
|
|
206
206
|
"pdf-parse": "1.1.1",
|
|
207
207
|
"pg": "8.12.0",
|
|
208
208
|
"proxy-from-env": "^1.1.0",
|
|
@@ -216,11 +216,11 @@
|
|
|
216
216
|
"zod": "3.25.67",
|
|
217
217
|
"zod-to-json-schema": "3.23.3",
|
|
218
218
|
"@n8n/client-oauth2": "0.28.0",
|
|
219
|
+
"@n8n/errors": "^0.4.0",
|
|
219
220
|
"@n8n/json-schema-to-zod": "1.5.0",
|
|
220
|
-
"n8n-nodes-base": "1.105.2",
|
|
221
|
-
"n8n-workflow": "1.104.1",
|
|
222
221
|
"@n8n/typescript-config": "1.3.0",
|
|
223
|
-
"
|
|
222
|
+
"n8n-workflow": "1.105.0",
|
|
223
|
+
"n8n-nodes-base": "1.106.0"
|
|
224
224
|
},
|
|
225
225
|
"license": "SEE LICENSE IN LICENSE.md",
|
|
226
226
|
"homepage": "https://n8n.io",
|