@n8n/n8n-nodes-langchain 2.18.2 → 2.19.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/credentials/SerpApi.credentials.js +6 -0
- package/dist/credentials/SerpApi.credentials.js.map +1 -1
- package/dist/node-definitions/.nodes-hash +1 -1
- package/dist/node-definitions/index.ts +0 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agent/v1.schema.js +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agent/v1.ts +2 -7
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agent/v11.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agent/v11.ts +1 -6
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agent/v12.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agent/v12.ts +1 -6
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agent/v13.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agent/v13.ts +1 -6
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agent/v14.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agent/v14.ts +1 -6
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agent/v15.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agent/v15.ts +1 -6
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agent/v16.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agent/v16.ts +1 -6
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agent/v17.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agent/v17.ts +6 -11
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agent/v18.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agent/v18.ts +6 -11
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agent/v19.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agent/v19.ts +6 -11
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agent/v2.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agent/v2.ts +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agent/v21.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agent/v21.ts +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agent/v22.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agent/v22.ts +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agent/v3.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agent/v3.ts +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agent/v31.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agent/v31.ts +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agentTool/v22.schema.js +2 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agentTool/v22.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agentTool/v3.schema.js +2 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/agentTool/v3.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/alibabaCloud/index.ts +3 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/alibabaCloud/v1/resource_image/operation_analyze.schema.js +2 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/alibabaCloud/v1/resource_image/operation_analyze.ts +2 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/alibabaCloud/v1/resource_image/operation_generate.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/alibabaCloud/v1/resource_image/operation_generate.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/alibabaCloud/v1/resource_video/operation_image_to_video.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/alibabaCloud/v1/resource_video/operation_image_to_video.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/alibabaCloud/v1/resource_video/operation_text_to_video.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/alibabaCloud/v1/resource_video/operation_text_to_video.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/alibabaCloud/v11/index.schema.js +24 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/alibabaCloud/v11/index.ts +18 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/alibabaCloud/v11/resource_image/index.schema.js +22 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/alibabaCloud/v11/resource_image/index.ts +15 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/alibabaCloud/v11/resource_image/operation_analyze.schema.js +34 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/alibabaCloud/v11/resource_image/operation_analyze.ts +72 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/alibabaCloud/v11/resource_image/operation_generate.schema.js +31 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/alibabaCloud/v11/resource_image/operation_generate.ts +61 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/alibabaCloud/v11/resource_text/index.schema.js +18 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/alibabaCloud/v11/resource_text/index.ts +10 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/alibabaCloud/v11/resource_text/operation_message.schema.js +31 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/alibabaCloud/v11/resource_text/operation_message.ts +98 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/alibabaCloud/v11/resource_video/index.schema.js +22 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/alibabaCloud/v11/resource_video/index.ts +15 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/alibabaCloud/v11/resource_video/operation_image_to_video.schema.js +38 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/alibabaCloud/v11/resource_video/operation_image_to_video.ts +106 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/alibabaCloud/v11/resource_video/operation_text_to_video.schema.js +35 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/alibabaCloud/v11/resource_video/operation_text_to_video.ts +89 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainLlm/v1.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainLlm/v1.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainLlm/v11.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainLlm/v11.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainLlm/v12.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainLlm/v12.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainLlm/v13.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainLlm/v13.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainLlm/v14.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainLlm/v14.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainLlm/v15.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainLlm/v15.ts +3 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainLlm/v16.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainLlm/v16.ts +3 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainLlm/v17.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainLlm/v17.ts +3 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainLlm/v18.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainLlm/v18.ts +3 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainLlm/v19.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainLlm/v19.ts +3 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainRetrievalQa/v1.schema.js +2 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainRetrievalQa/v1.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainRetrievalQa/v11.schema.js +2 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainRetrievalQa/v11.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainRetrievalQa/v12.schema.js +2 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainRetrievalQa/v12.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainRetrievalQa/v13.schema.js +2 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainRetrievalQa/v13.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainRetrievalQa/v14.schema.js +2 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainRetrievalQa/v14.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainRetrievalQa/v15.schema.js +2 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainRetrievalQa/v15.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainRetrievalQa/v16.schema.js +2 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainRetrievalQa/v16.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainRetrievalQa/v17.schema.js +2 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainRetrievalQa/v17.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainSummarization/v2.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chainSummarization/v21.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chat/v1.schema.js +3 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chat/v1.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chat/v11.schema.js +3 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chat/v11.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chat/v12.schema.js +3 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chat/v12.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chat/v13.schema.js +3 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chat/v13.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chatTrigger/v1.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chatTrigger/v11.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chatTrigger/v12.schema.js +2 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chatTrigger/v12.ts +3 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chatTrigger/v13.schema.js +2 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chatTrigger/v13.ts +3 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chatTrigger/v14.schema.js +2 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/chatTrigger/v14.ts +3 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/documentDefaultDataLoader/v1.schema.js +2 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/documentDefaultDataLoader/v1.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/documentDefaultDataLoader/v11.schema.js +2 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/documentDefaultDataLoader/v11.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/embeddingsGoogleGemini/v1.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/embeddingsLemonade/v1.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/embeddingsLemonade/v1.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/index.ts +3 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v1/resource_file_search/operation_create_store.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v1/resource_file_search/operation_create_store.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v1/resource_file_search/operation_delete_store.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v1/resource_file_search/operation_delete_store.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v1/resource_file_search/operation_upload_to_store.schema.js +2 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v1/resource_file_search/operation_upload_to_store.ts +2 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v1/resource_image/operation_generate.ts +2 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v11/resource_file_search/operation_create_store.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v11/resource_file_search/operation_create_store.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v11/resource_file_search/operation_delete_store.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v11/resource_file_search/operation_delete_store.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v11/resource_file_search/operation_upload_to_store.schema.js +2 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v11/resource_file_search/operation_upload_to_store.ts +2 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v11/resource_image/operation_generate.ts +2 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v11/resource_text/operation_message.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/index.schema.js +32 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/index.ts +30 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_audio/index.schema.js +22 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_audio/index.ts +15 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_audio/operation_analyze.schema.js +34 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_audio/operation_analyze.ts +69 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_audio/operation_transcribe.schema.js +33 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_audio/operation_transcribe.ts +66 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_document/index.schema.js +18 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_document/index.ts +10 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_document/operation_analyze.schema.js +34 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_document/operation_analyze.ts +69 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_file/index.schema.js +18 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_file/index.ts +10 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_file/operation_upload.schema.js +30 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_file/operation_upload.ts +43 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_file_search/index.schema.js +26 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_file_search/index.ts +21 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_file_search/operation_create_store.schema.js +28 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_file_search/operation_create_store.ts +30 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_file_search/operation_delete_store.schema.js +29 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_file_search/operation_delete_store.ts +35 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_file_search/operation_list_stores.schema.js +29 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_file_search/operation_list_stores.ts +35 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_file_search/operation_upload_to_store.schema.js +32 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_file_search/operation_upload_to_store.ts +51 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_image/index.schema.js +24 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_image/index.ts +18 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_image/operation_analyze.schema.js +34 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_image/operation_analyze.ts +69 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_image/operation_edit.schema.js +31 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_image/operation_edit.ts +61 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_image/operation_generate.schema.js +30 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_image/operation_generate.ts +52 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_text/index.schema.js +18 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_text/index.ts +10 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_text/operation_message.schema.js +33 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_text/operation_message.ts +153 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_video/index.schema.js +24 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_video/index.ts +18 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_video/operation_analyze.schema.js +34 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_video/operation_analyze.ts +69 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_video/operation_download.schema.js +29 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_video/operation_download.ts +41 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_video/operation_generate.schema.js +31 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/googleGemini/v12/resource_video/operation_generate.ts +68 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/guardrails/v1.schema.js +2 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/guardrails/v1.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/guardrails/v2.schema.js +2 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/guardrails/v2.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/informationExtractor/v1.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/informationExtractor/v11.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/informationExtractor/v12.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatAlibabaCloud/v1.ts +1 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatAnthropic/index.ts +3 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatAnthropic/v1.ts +1 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatAnthropic/v11.ts +1 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatAnthropic/v12.ts +1 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatAnthropic/v13.ts +1 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatAnthropic/v14.schema.js +25 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatAnthropic/v14.ts +66 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatAwsBedrock/v1.ts +1 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatAwsBedrock/v11.ts +1 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatAzureOpenAi/v1.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatAzureOpenAi/v1.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatCohere/v1.ts +1 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatDeepSeek/v1.ts +1 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatGoogleGemini/index.ts +3 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatGoogleGemini/v1.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatGoogleGemini/v1.ts +1 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatGoogleGemini/v11.schema.js +25 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatGoogleGemini/v11.ts +70 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatGoogleVertex/v1.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatGoogleVertex/v1.ts +1 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatGroq/v1.ts +1 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatLemonade/v1.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatLemonade/v1.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatMinimax/v1.ts +1 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatMistralCloud/v1.ts +1 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatMoonshot/index.ts +3 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatMoonshot/v1.ts +1 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/{toolSerpApi/v1.schema.js → lmChatMoonshot/v11.schema.js} +4 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatMoonshot/v11.ts +68 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatOpenAi/v1.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatOpenAi/v1.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatOpenAi/v11.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatOpenAi/v11.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatOpenAi/v12.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatOpenAi/v12.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatOpenAi/v13.schema.js +2 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatOpenAi/v13.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatOpenRouter/v1.ts +1 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatVercelAiGateway/v1.ts +1 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmChatXAiGrok/v1.ts +1 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmLemonade/v1.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/lmLemonade/v1.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/mcpClient/v1.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/mcpClient/v1.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/mcpClientTool/v1.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/mcpClientTool/v1.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/mcpClientTool/v11.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/mcpClientTool/v11.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/mcpClientTool/v12.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/mcpClientTool/v12.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/mcpTrigger/v1.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/mcpTrigger/v1.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/mcpTrigger/v11.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/mcpTrigger/v11.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/mcpTrigger/v2.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/mcpTrigger/v2.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/memoryBufferWindow/v13.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/memoryBufferWindow/v13.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/memoryMongoDbChat/v1.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/memoryMongoDbChat/v1.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/memoryPostgresChat/v12.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/memoryPostgresChat/v12.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/memoryPostgresChat/v13.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/memoryPostgresChat/v13.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/memoryRedisChat/v14.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/memoryRedisChat/v14.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/memoryRedisChat/v15.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/memoryRedisChat/v15.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/memoryXata/v1.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/memoryXata/v1.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/memoryXata/v14.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/minimax/v1/resource_audio/operation_text_to_speech.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/minimax/v1/resource_audio/operation_text_to_speech.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/minimax/v1/resource_image/operation_generate.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/minimax/v1/resource_image/operation_generate.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/minimax/v1/resource_video/operation_image_to_video.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/minimax/v1/resource_video/operation_image_to_video.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/minimax/v1/resource_video/operation_text_to_video.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/minimax/v1/resource_video/operation_text_to_video.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/index.ts +3 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v1/resource_assistant/operation_create.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v1/resource_assistant/operation_create.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v11/resource_assistant/operation_create.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v11/resource_assistant/operation_create.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v12/resource_assistant/operation_create.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v12/resource_assistant/operation_create.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v13/resource_assistant/operation_create.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v13/resource_assistant/operation_create.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v14/resource_assistant/operation_create.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v14/resource_assistant/operation_create.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v15/resource_assistant/operation_create.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v15/resource_assistant/operation_create.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v16/resource_assistant/operation_create.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v16/resource_assistant/operation_create.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v17/resource_assistant/operation_create.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v17/resource_assistant/operation_create.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v18/resource_assistant/operation_create.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v18/resource_assistant/operation_create.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v2/resource_conversation/operation_get.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v2/resource_conversation/operation_get.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v2/resource_conversation/operation_remove.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v2/resource_conversation/operation_remove.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v2/resource_conversation/operation_update.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v2/resource_conversation/operation_update.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v2/resource_image/operation_edit.schema.js +7 -7
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v2/resource_image/operation_edit.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v2/resource_text/operation_response.schema.js +2 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v21/resource_conversation/operation_get.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v21/resource_conversation/operation_get.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v21/resource_conversation/operation_remove.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v21/resource_conversation/operation_remove.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v21/resource_conversation/operation_update.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v21/resource_conversation/operation_update.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v21/resource_image/operation_edit.schema.js +7 -7
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v21/resource_image/operation_edit.ts +7 -7
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v21/resource_text/operation_response.schema.js +2 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v22/resource_conversation/operation_get.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v22/resource_conversation/operation_get.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v22/resource_conversation/operation_remove.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v22/resource_conversation/operation_remove.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v22/resource_conversation/operation_update.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v22/resource_conversation/operation_update.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v22/resource_image/operation_edit.schema.js +7 -7
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v22/resource_image/operation_edit.ts +7 -7
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v22/resource_text/operation_response.schema.js +2 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/index.schema.js +30 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/index.ts +27 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_audio/index.schema.js +24 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_audio/index.ts +18 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_audio/operation_generate.schema.js +32 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_audio/operation_generate.ts +60 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_audio/operation_transcribe.schema.js +30 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_audio/operation_transcribe.ts +46 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_audio/operation_translate.schema.js +30 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_audio/operation_translate.ts +43 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_conversation/index.schema.js +26 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_conversation/index.ts +21 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_conversation/operation_create.schema.js +30 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_conversation/operation_create.ts +54 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_conversation/operation_get.schema.js +29 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_conversation/operation_get.ts +31 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_conversation/operation_remove.schema.js +29 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_conversation/operation_remove.ts +31 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_conversation/operation_update.schema.js +30 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_conversation/operation_update.ts +36 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_file/index.schema.js +24 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_file/index.ts +18 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_file/operation_delete_file.schema.js +29 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_file/operation_delete_file.ts +33 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_file/operation_list.schema.js +29 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_file/operation_list.ts +37 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_file/operation_upload.schema.js +30 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_file/operation_upload.ts +43 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_image/index.schema.js +24 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_image/index.ts +18 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_image/operation_analyze.schema.js +35 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_image/operation_analyze.ts +74 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_image/operation_edit.schema.js +39 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_image/operation_edit.ts +117 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_image/operation_generate.schema.js +31 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_image/operation_generate.ts +89 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_text/index.schema.js +22 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_text/index.ts +15 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_text/operation_classify.schema.js +30 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_text/operation_classify.ts +36 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_text/operation_response.schema.js +34 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_text/operation_response.ts +317 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_video/index.schema.js +18 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_video/index.ts +10 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_video/operation_generate.schema.js +33 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/openAi/v23/resource_video/operation_generate.ts +67 -0
- package/dist/node-definitions/nodes/n8n-nodes-langchain/retrieverWorkflow/v1.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/retrieverWorkflow/v1.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/retrieverWorkflow/v11.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/retrieverWorkflow/v11.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/sentimentAnalysis/v1.schema.js +2 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/sentimentAnalysis/v1.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/sentimentAnalysis/v11.schema.js +2 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/sentimentAnalysis/v11.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/textClassifier/v1.schema.js +2 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/textClassifier/v1.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/textClassifier/v11.schema.js +2 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/textClassifier/v11.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/toolWorkflow/v1.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/toolWorkflow/v1.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/toolWorkflow/v11.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/toolWorkflow/v11.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/toolWorkflow/v12.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/toolWorkflow/v12.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/toolWorkflow/v13.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/toolWorkflow/v13.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/toolWorkflow/v2.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/toolWorkflow/v2.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/toolWorkflow/v21.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/toolWorkflow/v21.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/toolWorkflow/v22.schema.js +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/toolWorkflow/v22.ts +1 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v1/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v1/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v1/mode_load.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v1/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v1/mode_retrieve.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v1/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v1/mode_retrieve_as_tool.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v1/mode_retrieve_as_tool.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v1/mode_update.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v1/mode_update.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v11/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v11/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v11/mode_load.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v11/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v11/mode_retrieve.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v11/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v11/mode_retrieve_as_tool.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v11/mode_retrieve_as_tool.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v11/mode_update.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v11/mode_update.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v12/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v12/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v12/mode_load.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v12/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v12/mode_retrieve.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v12/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v12/mode_retrieve_as_tool.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v12/mode_retrieve_as_tool.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v12/mode_update.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v12/mode_update.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v13/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v13/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v13/mode_load.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v13/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v13/mode_retrieve.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v13/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v13/mode_retrieve_as_tool.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v13/mode_retrieve_as_tool.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v13/mode_update.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreAzureAISearch/v13/mode_update.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreChromaDB/v1/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreChromaDB/v1/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreChromaDB/v1/mode_load.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreChromaDB/v1/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreChromaDB/v1/mode_retrieve.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreChromaDB/v1/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreChromaDB/v1/mode_retrieve_as_tool.schema.js +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreChromaDB/v1/mode_retrieve_as_tool.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreChromaDB/v11/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreChromaDB/v11/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreChromaDB/v11/mode_load.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreChromaDB/v11/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreChromaDB/v11/mode_retrieve.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreChromaDB/v11/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreChromaDB/v11/mode_retrieve_as_tool.schema.js +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreChromaDB/v11/mode_retrieve_as_tool.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreChromaDB/v12/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreChromaDB/v12/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreChromaDB/v12/mode_load.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreChromaDB/v12/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreChromaDB/v12/mode_retrieve.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreChromaDB/v12/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreChromaDB/v12/mode_retrieve_as_tool.schema.js +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreChromaDB/v12/mode_retrieve_as_tool.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreChromaDB/v13/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreChromaDB/v13/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreChromaDB/v13/mode_load.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreChromaDB/v13/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreChromaDB/v13/mode_retrieve.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreChromaDB/v13/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreChromaDB/v13/mode_retrieve_as_tool.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreChromaDB/v13/mode_retrieve_as_tool.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreInMemory/v1/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreInMemory/v1/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreInMemory/v1/mode_load.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreInMemory/v1/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreInMemory/v1/mode_retrieve.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreInMemory/v1/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreInMemory/v1/mode_retrieve_as_tool.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreInMemory/v1/mode_retrieve_as_tool.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreInMemory/v11/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreInMemory/v11/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreInMemory/v11/mode_load.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreInMemory/v11/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreInMemory/v11/mode_retrieve.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreInMemory/v11/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreInMemory/v11/mode_retrieve_as_tool.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreInMemory/v11/mode_retrieve_as_tool.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreInMemory/v12/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreInMemory/v12/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreInMemory/v12/mode_load.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreInMemory/v12/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreInMemory/v12/mode_retrieve.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreInMemory/v12/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreInMemory/v12/mode_retrieve_as_tool.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreInMemory/v12/mode_retrieve_as_tool.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreInMemory/v13/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreInMemory/v13/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreInMemory/v13/mode_load.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreInMemory/v13/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreInMemory/v13/mode_retrieve.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreInMemory/v13/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreInMemory/v13/mode_retrieve_as_tool.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreInMemory/v13/mode_retrieve_as_tool.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMilvus/v1/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMilvus/v1/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMilvus/v1/mode_load.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMilvus/v1/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMilvus/v1/mode_retrieve.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMilvus/v1/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMilvus/v1/mode_retrieve_as_tool.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMilvus/v1/mode_retrieve_as_tool.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMilvus/v11/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMilvus/v11/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMilvus/v11/mode_load.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMilvus/v11/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMilvus/v11/mode_retrieve.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMilvus/v11/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMilvus/v11/mode_retrieve_as_tool.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMilvus/v11/mode_retrieve_as_tool.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMilvus/v12/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMilvus/v12/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMilvus/v12/mode_load.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMilvus/v12/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMilvus/v12/mode_retrieve.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMilvus/v12/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMilvus/v12/mode_retrieve_as_tool.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMilvus/v12/mode_retrieve_as_tool.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMilvus/v13/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMilvus/v13/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMilvus/v13/mode_load.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMilvus/v13/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMilvus/v13/mode_retrieve.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMilvus/v13/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMilvus/v13/mode_retrieve_as_tool.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMilvus/v13/mode_retrieve_as_tool.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v1/mode_insert.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v1/mode_insert.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v1/mode_load.schema.js +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v1/mode_load.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v1/mode_retrieve.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v1/mode_retrieve.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v1/mode_retrieve_as_tool.schema.js +4 -5
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v1/mode_retrieve_as_tool.ts +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v1/mode_update.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v1/mode_update.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v11/mode_insert.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v11/mode_insert.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v11/mode_load.schema.js +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v11/mode_load.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v11/mode_retrieve.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v11/mode_retrieve.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v11/mode_retrieve_as_tool.schema.js +4 -5
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v11/mode_retrieve_as_tool.ts +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v11/mode_update.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v11/mode_update.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v12/mode_insert.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v12/mode_insert.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v12/mode_load.schema.js +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v12/mode_load.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v12/mode_retrieve.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v12/mode_retrieve.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v12/mode_retrieve_as_tool.schema.js +4 -5
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v12/mode_retrieve_as_tool.ts +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v12/mode_update.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v12/mode_update.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v13/mode_insert.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v13/mode_insert.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v13/mode_load.schema.js +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v13/mode_load.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v13/mode_retrieve.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v13/mode_retrieve.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v13/mode_retrieve_as_tool.schema.js +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v13/mode_retrieve_as_tool.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v13/mode_update.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreMongoDBAtlas/v13/mode_update.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePGVector/v1/mode_insert.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePGVector/v1/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePGVector/v1/mode_load.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePGVector/v1/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePGVector/v1/mode_retrieve.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePGVector/v1/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePGVector/v1/mode_retrieve_as_tool.schema.js +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePGVector/v1/mode_retrieve_as_tool.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePGVector/v11/mode_insert.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePGVector/v11/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePGVector/v11/mode_load.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePGVector/v11/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePGVector/v11/mode_retrieve.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePGVector/v11/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePGVector/v11/mode_retrieve_as_tool.schema.js +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePGVector/v11/mode_retrieve_as_tool.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePGVector/v12/mode_insert.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePGVector/v12/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePGVector/v12/mode_load.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePGVector/v12/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePGVector/v12/mode_retrieve.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePGVector/v12/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePGVector/v12/mode_retrieve_as_tool.schema.js +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePGVector/v12/mode_retrieve_as_tool.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePGVector/v13/mode_insert.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePGVector/v13/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePGVector/v13/mode_load.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePGVector/v13/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePGVector/v13/mode_retrieve.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePGVector/v13/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePGVector/v13/mode_retrieve_as_tool.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePGVector/v13/mode_retrieve_as_tool.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v1/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v1/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v1/mode_load.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v1/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v1/mode_retrieve.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v1/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v1/mode_retrieve_as_tool.schema.js +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v1/mode_retrieve_as_tool.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v1/mode_update.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v1/mode_update.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v11/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v11/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v11/mode_load.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v11/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v11/mode_retrieve.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v11/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v11/mode_retrieve_as_tool.schema.js +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v11/mode_retrieve_as_tool.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v11/mode_update.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v11/mode_update.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v12/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v12/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v12/mode_load.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v12/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v12/mode_retrieve.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v12/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v12/mode_retrieve_as_tool.schema.js +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v12/mode_retrieve_as_tool.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v12/mode_update.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v12/mode_update.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v13/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v13/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v13/mode_load.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v13/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v13/mode_retrieve.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v13/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v13/mode_retrieve_as_tool.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v13/mode_retrieve_as_tool.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v13/mode_update.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStorePinecone/v13/mode_update.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreQdrant/v1/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreQdrant/v1/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreQdrant/v1/mode_load.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreQdrant/v1/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreQdrant/v1/mode_retrieve.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreQdrant/v1/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreQdrant/v1/mode_retrieve_as_tool.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreQdrant/v1/mode_retrieve_as_tool.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreQdrant/v11/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreQdrant/v11/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreQdrant/v11/mode_load.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreQdrant/v11/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreQdrant/v11/mode_retrieve.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreQdrant/v11/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreQdrant/v11/mode_retrieve_as_tool.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreQdrant/v11/mode_retrieve_as_tool.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreQdrant/v12/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreQdrant/v12/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreQdrant/v12/mode_load.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreQdrant/v12/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreQdrant/v12/mode_retrieve.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreQdrant/v12/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreQdrant/v12/mode_retrieve_as_tool.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreQdrant/v12/mode_retrieve_as_tool.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreQdrant/v13/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreQdrant/v13/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreQdrant/v13/mode_load.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreQdrant/v13/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreQdrant/v13/mode_retrieve.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreQdrant/v13/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreQdrant/v13/mode_retrieve_as_tool.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreQdrant/v13/mode_retrieve_as_tool.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v1/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v1/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v1/mode_load.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v1/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v1/mode_retrieve.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v1/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v1/mode_retrieve_as_tool.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v1/mode_retrieve_as_tool.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v1/mode_update.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v1/mode_update.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v11/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v11/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v11/mode_load.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v11/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v11/mode_retrieve.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v11/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v11/mode_retrieve_as_tool.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v11/mode_retrieve_as_tool.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v11/mode_update.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v11/mode_update.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v12/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v12/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v12/mode_load.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v12/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v12/mode_retrieve.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v12/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v12/mode_retrieve_as_tool.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v12/mode_retrieve_as_tool.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v12/mode_update.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v12/mode_update.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v13/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v13/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v13/mode_load.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v13/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v13/mode_retrieve.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v13/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v13/mode_retrieve_as_tool.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v13/mode_retrieve_as_tool.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v13/mode_update.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreRedis/v13/mode_update.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v1/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v1/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v1/mode_load.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v1/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v1/mode_retrieve.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v1/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v1/mode_retrieve_as_tool.schema.js +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v1/mode_retrieve_as_tool.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v1/mode_update.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v1/mode_update.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v11/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v11/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v11/mode_load.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v11/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v11/mode_retrieve.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v11/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v11/mode_retrieve_as_tool.schema.js +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v11/mode_retrieve_as_tool.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v11/mode_update.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v11/mode_update.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v12/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v12/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v12/mode_load.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v12/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v12/mode_retrieve.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v12/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v12/mode_retrieve_as_tool.schema.js +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v12/mode_retrieve_as_tool.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v12/mode_update.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v12/mode_update.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v13/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v13/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v13/mode_load.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v13/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v13/mode_retrieve.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v13/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v13/mode_retrieve_as_tool.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v13/mode_retrieve_as_tool.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v13/mode_update.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreSupabase/v13/mode_update.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreWeaviate/v1/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreWeaviate/v1/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreWeaviate/v1/mode_load.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreWeaviate/v1/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreWeaviate/v1/mode_retrieve.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreWeaviate/v1/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreWeaviate/v1/mode_retrieve_as_tool.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreWeaviate/v1/mode_retrieve_as_tool.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreWeaviate/v11/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreWeaviate/v11/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreWeaviate/v11/mode_load.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreWeaviate/v11/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreWeaviate/v11/mode_retrieve.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreWeaviate/v11/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreWeaviate/v11/mode_retrieve_as_tool.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreWeaviate/v11/mode_retrieve_as_tool.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreWeaviate/v12/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreWeaviate/v12/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreWeaviate/v12/mode_load.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreWeaviate/v12/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreWeaviate/v12/mode_retrieve.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreWeaviate/v12/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreWeaviate/v12/mode_retrieve_as_tool.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreWeaviate/v12/mode_retrieve_as_tool.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreWeaviate/v13/mode_insert.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreWeaviate/v13/mode_insert.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreWeaviate/v13/mode_load.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreWeaviate/v13/mode_load.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreWeaviate/v13/mode_retrieve.schema.js +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreWeaviate/v13/mode_retrieve.ts +0 -1
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreWeaviate/v13/mode_retrieve_as_tool.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreWeaviate/v13/mode_retrieve_as_tool.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreZep/v1/mode_insert.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreZep/v1/mode_insert.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreZep/v1/mode_load.schema.js +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreZep/v1/mode_load.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreZep/v1/mode_retrieve.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreZep/v1/mode_retrieve.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreZep/v1/mode_retrieve_as_tool.schema.js +4 -5
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreZep/v1/mode_retrieve_as_tool.ts +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreZep/v11/mode_insert.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreZep/v11/mode_insert.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreZep/v11/mode_load.schema.js +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreZep/v11/mode_load.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreZep/v11/mode_retrieve.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreZep/v11/mode_retrieve.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreZep/v11/mode_retrieve_as_tool.schema.js +4 -5
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreZep/v11/mode_retrieve_as_tool.ts +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreZep/v12/mode_insert.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreZep/v12/mode_insert.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreZep/v12/mode_load.schema.js +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreZep/v12/mode_load.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreZep/v12/mode_retrieve.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreZep/v12/mode_retrieve.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreZep/v12/mode_retrieve_as_tool.schema.js +4 -5
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreZep/v12/mode_retrieve_as_tool.ts +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreZep/v13/mode_insert.schema.js +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreZep/v13/mode_insert.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreZep/v13/mode_load.schema.js +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreZep/v13/mode_load.ts +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreZep/v13/mode_retrieve.schema.js +2 -3
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreZep/v13/mode_retrieve.ts +1 -2
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreZep/v13/mode_retrieve_as_tool.schema.js +3 -4
- package/dist/node-definitions/nodes/n8n-nodes-langchain/vectorStoreZep/v13/mode_retrieve_as_tool.ts +2 -3
- package/dist/nodes/embeddings/EmbeddingsGoogleGemini/EmbeddingsGoogleGemini.node.js +2 -2
- package/dist/nodes/embeddings/EmbeddingsGoogleGemini/EmbeddingsGoogleGemini.node.js.map +1 -1
- package/dist/nodes/llms/LMChatAnthropic/LmChatAnthropic.node.js +44 -3
- package/dist/nodes/llms/LMChatAnthropic/LmChatAnthropic.node.js.map +1 -1
- package/dist/nodes/llms/LMChatOpenAi/LmChatOpenAi.node.js +5 -2
- package/dist/nodes/llms/LMChatOpenAi/LmChatOpenAi.node.js.map +1 -1
- package/dist/nodes/llms/LmChatAlibabaCloud/LmChatAlibabaCloud.node.js +3 -0
- package/dist/nodes/llms/LmChatAlibabaCloud/LmChatAlibabaCloud.node.js.map +1 -1
- package/dist/nodes/llms/LmChatAwsBedrock/LmChatAwsBedrock.node.js +6 -0
- package/dist/nodes/llms/LmChatAwsBedrock/LmChatAwsBedrock.node.js.map +1 -1
- package/dist/nodes/llms/LmChatCohere/LmChatCohere.node.js +3 -0
- package/dist/nodes/llms/LmChatCohere/LmChatCohere.node.js.map +1 -1
- package/dist/nodes/llms/LmChatDeepSeek/LmChatDeepSeek.node.js +3 -0
- package/dist/nodes/llms/LmChatDeepSeek/LmChatDeepSeek.node.js.map +1 -1
- package/dist/nodes/llms/LmChatGoogleGemini/LmChatGoogleGemini.node.js +68 -48
- package/dist/nodes/llms/LmChatGoogleGemini/LmChatGoogleGemini.node.js.map +1 -1
- package/dist/nodes/llms/LmChatGoogleVertex/LmChatGoogleVertex.node.js +3 -0
- package/dist/nodes/llms/LmChatGoogleVertex/LmChatGoogleVertex.node.js.map +1 -1
- package/dist/nodes/llms/LmChatGroq/LmChatGroq.node.js +3 -0
- package/dist/nodes/llms/LmChatGroq/LmChatGroq.node.js.map +1 -1
- package/dist/nodes/llms/LmChatMinimax/LmChatMinimax.node.js +3 -0
- package/dist/nodes/llms/LmChatMinimax/LmChatMinimax.node.js.map +1 -1
- package/dist/nodes/llms/LmChatMistralCloud/LmChatMistralCloud.node.js +3 -0
- package/dist/nodes/llms/LmChatMistralCloud/LmChatMistralCloud.node.js.map +1 -1
- package/dist/nodes/llms/LmChatMoonshot/LmChatMoonshot.node.js +60 -1
- package/dist/nodes/llms/LmChatMoonshot/LmChatMoonshot.node.js.map +1 -1
- package/dist/nodes/llms/LmChatOpenRouter/LmChatOpenRouter.node.js +3 -0
- package/dist/nodes/llms/LmChatOpenRouter/LmChatOpenRouter.node.js.map +1 -1
- package/dist/nodes/llms/LmChatVercelAiGateway/LmChatVercelAiGateway.node.js +3 -0
- package/dist/nodes/llms/LmChatVercelAiGateway/LmChatVercelAiGateway.node.js.map +1 -1
- package/dist/nodes/llms/LmChatXAiGrok/LmChatXAiGrok.node.js +3 -0
- package/dist/nodes/llms/LmChatXAiGrok/LmChatXAiGrok.node.js.map +1 -1
- package/dist/nodes/mcp/McpClient/McpClient.node.js +8 -3
- package/dist/nodes/mcp/McpClient/McpClient.node.js.map +1 -1
- package/dist/nodes/mcp/McpClientTool/McpClientTool.node.js +3 -0
- package/dist/nodes/mcp/McpClientTool/McpClientTool.node.js.map +1 -1
- package/dist/nodes/mcp/McpClientTool/utils.js +4 -0
- package/dist/nodes/mcp/McpClientTool/utils.js.map +1 -1
- package/dist/nodes/mcp/McpTrigger/McpServer.js +3 -1
- package/dist/nodes/mcp/McpTrigger/McpServer.js.map +1 -1
- package/dist/nodes/mcp/McpTrigger/protocol/MessageFormatter.js +1 -5
- package/dist/nodes/mcp/McpTrigger/protocol/MessageFormatter.js.map +1 -1
- package/dist/nodes/mcp/shared/utils.d.ts +1 -0
- package/dist/nodes/mcp/shared/utils.js +4 -0
- package/dist/nodes/mcp/shared/utils.js.map +1 -1
- package/dist/nodes/memory/MemoryMongoDbChat/MemoryMongoDbChat.node.js +6 -1
- package/dist/nodes/memory/MemoryMongoDbChat/MemoryMongoDbChat.node.js.map +1 -1
- package/dist/nodes/tools/ToolSerpApi/ToolSerpApi.node.js +7 -0
- package/dist/nodes/tools/ToolSerpApi/ToolSerpApi.node.js.map +1 -1
- package/dist/nodes/trigger/ChatTrigger/ChatTrigger.node.js +11 -4
- package/dist/nodes/trigger/ChatTrigger/ChatTrigger.node.js.map +1 -1
- package/dist/nodes/vendors/AlibabaCloud/AlibabaCloud.node.d.ts +4 -0
- package/dist/nodes/vendors/AlibabaCloud/AlibabaCloud.node.js +4 -0
- package/dist/nodes/vendors/AlibabaCloud/AlibabaCloud.node.js.map +1 -1
- package/dist/nodes/vendors/AlibabaCloud/actions/descriptions.d.ts +2 -0
- package/dist/nodes/vendors/AlibabaCloud/actions/descriptions.js +29 -0
- package/dist/nodes/vendors/AlibabaCloud/actions/descriptions.js.map +1 -0
- package/dist/nodes/vendors/AlibabaCloud/actions/image/analyze.operation.js +14 -1
- package/dist/nodes/vendors/AlibabaCloud/actions/image/analyze.operation.js.map +1 -1
- package/dist/nodes/vendors/AlibabaCloud/actions/image/generate.operation.js +14 -1
- package/dist/nodes/vendors/AlibabaCloud/actions/image/generate.operation.js.map +1 -1
- package/dist/nodes/vendors/AlibabaCloud/actions/text/message.operation.js +20 -14
- package/dist/nodes/vendors/AlibabaCloud/actions/text/message.operation.js.map +1 -1
- package/dist/nodes/vendors/AlibabaCloud/actions/versionDescription.js +2 -1
- package/dist/nodes/vendors/AlibabaCloud/actions/versionDescription.js.map +1 -1
- package/dist/nodes/vendors/AlibabaCloud/actions/video/generate.i2v.operation.js +14 -1
- package/dist/nodes/vendors/AlibabaCloud/actions/video/generate.i2v.operation.js.map +1 -1
- package/dist/nodes/vendors/AlibabaCloud/actions/video/generate.t2v.operation.js +14 -1
- package/dist/nodes/vendors/AlibabaCloud/actions/video/generate.t2v.operation.js.map +1 -1
- package/dist/nodes/vendors/AlibabaCloud/methods/index.d.ts +1 -0
- package/dist/nodes/vendors/AlibabaCloud/methods/index.js +38 -0
- package/dist/nodes/vendors/AlibabaCloud/methods/index.js.map +1 -0
- package/dist/nodes/vendors/AlibabaCloud/methods/listSearch.d.ts +6 -0
- package/dist/nodes/vendors/AlibabaCloud/methods/listSearch.js +51 -0
- package/dist/nodes/vendors/AlibabaCloud/methods/listSearch.js.map +1 -0
- package/dist/nodes/vendors/Anthropic/actions/descriptions.js +1 -1
- package/dist/nodes/vendors/Anthropic/actions/descriptions.js.map +1 -1
- package/dist/nodes/vendors/GoogleGemini/actions/descriptions.js +3 -0
- package/dist/nodes/vendors/GoogleGemini/actions/descriptions.js.map +1 -1
- package/dist/nodes/vendors/GoogleGemini/actions/image/generate.operation.js +15 -2
- package/dist/nodes/vendors/GoogleGemini/actions/image/generate.operation.js.map +1 -1
- package/dist/nodes/vendors/GoogleGemini/actions/text/message.operation.js +17 -6
- package/dist/nodes/vendors/GoogleGemini/actions/text/message.operation.js.map +1 -1
- package/dist/nodes/vendors/GoogleGemini/actions/versionDescription.js +2 -2
- package/dist/nodes/vendors/GoogleGemini/actions/versionDescription.js.map +1 -1
- package/dist/nodes/vendors/GoogleGemini/helpers/interfaces.d.ts +11 -6
- package/dist/nodes/vendors/GoogleGemini/methods/listSearch.js +3 -0
- package/dist/nodes/vendors/GoogleGemini/methods/listSearch.js.map +1 -1
- package/dist/nodes/vendors/Microsoft/MicrosoftAgent365Trigger.node.js +1 -1
- package/dist/nodes/vendors/Microsoft/MicrosoftAgent365Trigger.node.js.map +1 -1
- package/dist/nodes/vendors/Moonshot/actions/descriptions.js +1 -1
- package/dist/nodes/vendors/OpenAi/OpenAi.node.js +2 -1
- package/dist/nodes/vendors/OpenAi/OpenAi.node.js.map +1 -1
- package/dist/nodes/vendors/OpenAi/methods/listSearch.js +18 -1
- package/dist/nodes/vendors/OpenAi/methods/listSearch.js.map +1 -1
- package/dist/nodes/vendors/OpenAi/v2/OpenAiV2.node.js +1 -1
- package/dist/nodes/vendors/OpenAi/v2/OpenAiV2.node.js.map +1 -1
- package/dist/nodes/vendors/OpenAi/v2/actions/image/edit.operation.js +261 -10
- package/dist/nodes/vendors/OpenAi/v2/actions/image/edit.operation.js.map +1 -1
- package/dist/typecheck.tsbuildinfo +1 -1
- package/dist/types/credentials.json +1 -1
- package/dist/types/nodes.json +25 -25
- package/package.json +12 -12
- package/dist/node-definitions/nodes/n8n-nodes-langchain/toolSerpApi/index.ts +0 -12
- package/dist/node-definitions/nodes/n8n-nodes-langchain/toolSerpApi/v1.ts +0 -47
package/dist/types/nodes.json
CHANGED
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
[
|
|
2
|
-
{"displayName":"Alibaba Cloud Model Studio","name":"alibabaCloud","group":["transform"],"version":1,"subtitle":"={{ $parameter[\"operation\"] + \": \" + $parameter[\"resource\"] }}","description":"Interact with Alibaba Cloud Qwen models via Model Studio","defaults":{"name":"Alibaba Cloud Model Studio"},"usableAsTool":true,"codex":{"alias":["qwen","dashscope","alibaba","model studio","video","image"],"categories":["AI"],"subcategories":{"AI":["Agents","Miscellaneous","Root Nodes"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/app-nodes/n8n-nodes-langchain.alibabacloud/"}]}},"inputs":"={{\n\t\t(() => {\n\t\t\tconst resource = $parameter.resource;\n\t\t\tconst operation = $parameter.operation;\n\t\t\tif (resource === 'text' && operation === 'message') {\n\t\t\t\treturn [{ type: 'main' }, { type: 'ai_tool', displayName: 'Tools' }];\n\t\t\t}\n\n\t\t\treturn ['main'];\n\t\t})()\n\t}}","outputs":["main"],"credentials":[{"name":"alibabaCloudApi","required":true}],"properties":[{"displayName":"Resource","name":"resource","type":"options","noDataExpression":true,"options":[{"name":"Text","value":"text"},{"name":"Image","value":"image"},{"name":"Video","value":"video"}],"default":"text"},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"displayOptions":{"show":{"resource":["text"]}},"options":[{"name":"Message a Model","value":"message","action":"Message a model","description":"Create a completion with a Qwen model"}],"default":"message"},{"displayName":"Model","name":"modelId","type":"options","options":[{"name":"Qwen3 Max","value":"qwen3-max","description":"Most capable model with best performance"},{"name":"Qwen3 Max (2026-01-23)","value":"qwen3-max-2026-01-23","description":"Qwen Max snapshot from 2026-01-23"},{"name":"Qwen3.5 122B-A10B","value":"qwen3.5-122b-a10b","description":"MoE model with 122B total / 10B active parameters"},{"name":"Qwen3.5 27B","value":"qwen3.5-27b","description":"Dense 27B parameter model"},{"name":"Qwen3.5 35B-A3B","value":"qwen3.5-35b-a3b","description":"Small MoE model with 35B total / 3B active parameters"},{"name":"Qwen3.5 397B-A17B","value":"qwen3.5-397b-a17b","description":"Large MoE model with 397B total / 17B active parameters"},{"name":"Qwen3.5 Flash","value":"qwen3.5-flash","description":"Faster, more cost-effective model"},{"name":"Qwen3.5 Flash (2026-02-23)","value":"qwen3.5-flash-2026-02-23","description":"Qwen Flash snapshot from 2026-02-23"},{"name":"Qwen3.5 Plus","value":"qwen3.5-plus","description":"Balanced model with good performance and cost"},{"name":"Qwen3.5 Plus (2026-02-15)","value":"qwen3.5-plus-2026-02-15","description":"Qwen Plus snapshot from 2026-02-15"}],"default":"qwen3.5-flash","description":"The model to use for generation","displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Messages","name":"messages","type":"fixedCollection","typeOptions":{"multipleValues":true},"default":{"messageValues":[{"content":"","role":"user"}]},"placeholder":"Add Message","options":[{"name":"messageValues","displayName":"Message","values":[{"displayName":"Content","name":"content","type":"string","typeOptions":{"rows":4},"default":"","description":"The content of the message"},{"displayName":"Role","name":"role","type":"options","options":[{"name":"User","value":"user"},{"name":"Assistant","value":"assistant"}],"default":"user","description":"The role of the message sender"}]}],"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to return a simplified version of the response instead of the raw data","displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Enable Search","name":"enableSearch","type":"boolean","default":false,"description":"Whether to enable web search for up-to-date information"},{"displayName":"Max Tokens","name":"maxTokens","type":"number","typeOptions":{"minValue":1},"default":2000,"description":"Maximum number of tokens to generate"},{"displayName":"Max Tools Iterations","name":"maxToolsIterations","type":"number","default":15,"description":"Maximum number of tool-calling iterations before stopping. Set to 0 for unlimited."},{"displayName":"Repetition Penalty","name":"repetitionPenalty","type":"number","typeOptions":{"minValue":1,"maxValue":2,"numberPrecision":2},"default":1.1,"description":"Penalty for token repetition. Higher values reduce repetition."},{"displayName":"Seed","name":"seed","type":"number","default":1234,"description":"Random seed for reproducible outputs"},{"displayName":"Stop Sequences","name":"stop","type":"string","default":"","description":"Comma-separated list of sequences where the API will stop generating"},{"displayName":"System Message","name":"system","type":"string","default":"","placeholder":"e.g. You are a helpful assistant"},{"displayName":"Temperature","name":"temperature","type":"number","typeOptions":{"minValue":0,"maxValue":2,"numberPrecision":2},"default":1,"description":"Controls randomness in the output. Lower values make output more focused and deterministic."},{"displayName":"Top K","name":"topK","type":"number","typeOptions":{"minValue":1,"maxValue":100},"default":50,"description":"Limits the sampling pool to top K tokens"},{"displayName":"Top P","name":"topP","type":"number","typeOptions":{"minValue":0,"maxValue":1,"numberPrecision":2},"default":0.9,"description":"Nucleus sampling parameter. Lower values make output more focused."}],"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"displayOptions":{"show":{"resource":["image"]}},"options":[{"name":"Analyze Image","value":"analyze","action":"Analyze image","description":"Take in images and answer questions about them"},{"name":"Generate an Image","value":"generate","action":"Generate an image","description":"Creates an image from a text prompt"}],"default":"generate"},{"displayName":"Model","name":"modelId","type":"options","options":[{"name":"Qwen-VL Flash","value":"qwen3-vl-flash","description":"Fast vision-language model"},{"name":"Qwen-VL Plus","value":"qwen3-vl-plus","description":"Enhanced vision-language model"}],"default":"qwen3-vl-flash","description":"The model to use for image analysis","displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Input Type","name":"inputType","type":"options","options":[{"name":"URL","value":"url"},{"name":"Binary Data","value":"binary"}],"default":"url","description":"How to provide the image for analysis","displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Image URL","name":"imageUrl","type":"string","default":"","description":"The URL of the image to analyze","required":true,"placeholder":"https://example.com/image.jpg","displayOptions":{"show":{"inputType":["url"],"operation":["analyze"],"resource":["image"]}}},{"displayName":"Input Data Field Name","name":"binaryPropertyName","type":"string","default":"data","required":true,"placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","displayOptions":{"show":{"inputType":["binary"],"operation":["analyze"],"resource":["image"]}}},{"displayName":"Question","name":"question","type":"string","typeOptions":{"rows":4},"default":"","description":"The question or instruction about the image","required":true,"placeholder":"What is in this image?","displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to return a simplified version of the response instead of the raw data","displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Options","name":"visionOptions","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Temperature","name":"temperature","type":"number","typeOptions":{"minValue":0,"maxValue":2,"numberPrecision":2},"default":1,"description":"Controls randomness in the output. Lower values make output more focused and deterministic."},{"displayName":"Max Tokens","name":"maxTokens","type":"number","typeOptions":{"minValue":1},"default":2000,"description":"Maximum number of tokens to generate"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Model","name":"modelId","type":"options","options":[{"name":"Qwen Image","value":"qwen-image","description":"Qwen image generation model"},{"name":"Qwen Image Max","value":"qwen-image-max","description":"Most capable Qwen image generation model"},{"name":"Qwen Image Plus","value":"qwen-image-plus","description":"Enhanced Qwen image generation model"},{"name":"Wan 2.6 T2I","value":"wan2.6-t2i","description":"Wanx image generation model"},{"name":"Z-Image Turbo","value":"z-image-turbo","description":"Fast image generation model"}],"default":"z-image-turbo","description":"The model to use for image generation","displayOptions":{"show":{"operation":["generate"],"resource":["image"]}}},{"displayName":"Prompt","name":"prompt","type":"string","typeOptions":{"rows":4},"default":"","description":"The text prompt describing the image to generate","required":true,"displayOptions":{"show":{"operation":["generate"],"resource":["image"]}}},{"displayName":"Download Image","name":"downloadImage","type":"boolean","default":true,"description":"Whether to download the generated image as binary data. When disabled, only the image URL is returned.","displayOptions":{"show":{"operation":["generate"],"resource":["image"]}}},{"displayName":"Options","name":"imageOptions","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Size","name":"size","type":"options","displayOptions":{"show":{"/modelId":["z-image-turbo","wan2.6-t2i"]}},"options":[{"name":"1024x1024","value":"1024*1024"},{"name":"720x1280","value":"720*1280"},{"name":"1280x720","value":"1280*720"}],"default":"1024*1024","description":"The size of the generated image"},{"displayName":"Size","name":"size","type":"options","displayOptions":{"show":{"/modelId":["qwen-image","qwen-image-plus","qwen-image-max"]}},"options":[{"name":"1104x1472 (3:4)","value":"1104*1472"},{"name":"1328x1328 (1:1)","value":"1328*1328"},{"name":"1472x1104 (4:3)","value":"1472*1104"},{"name":"1664x928 (16:9)","value":"1664*928"},{"name":"928x1664 (9:16)","value":"928*1664"}],"default":"1664*928","description":"The size of the generated image"},{"displayName":"Prompt Extend","name":"promptExtend","type":"boolean","default":false,"description":"Whether to automatically extend and enhance the prompt"}],"displayOptions":{"show":{"operation":["generate"],"resource":["image"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"displayOptions":{"show":{"resource":["video"]}},"options":[{"name":"Generate Video From Text","value":"textToVideo","action":"Generate video from text prompt","description":"Generate a video from a text prompt"},{"name":"Generate Video From Image","value":"imageToVideo","action":"Generate video from image","description":"Generate a video from one or more images using Wan models"}],"default":"textToVideo"},{"displayName":"Model","name":"modelId","type":"options","options":[{"name":"Wan 2.6 Text-to-Video","value":"wan2.6-t2v","description":"Text-to-video generation model"}],"default":"wan2.6-t2v","description":"The model to use for text-to-video generation","displayOptions":{"show":{"resource":["video"],"operation":["textToVideo"]}}},{"displayName":"Prompt","name":"prompt","type":"string","typeOptions":{"rows":4},"default":"","required":true,"description":"The text prompt to generate video from","placeholder":"A cat playing with a ball of yarn","displayOptions":{"show":{"resource":["video"],"operation":["textToVideo"]}}},{"displayName":"Resolution","name":"resolution","type":"options","options":[{"name":"720P","value":"720P"},{"name":"1080P","value":"1080P"}],"default":"1080P","description":"Resolution tier of the generated video","displayOptions":{"show":{"resource":["video"],"operation":["textToVideo"]}}},{"displayName":"Duration (Seconds)","name":"duration","type":"number","typeOptions":{"minValue":2,"maxValue":15},"default":5,"description":"Duration of the generated video in seconds (2–15)","displayOptions":{"show":{"resource":["video"],"operation":["textToVideo"]}}},{"displayName":"Shot Type","name":"shotType","type":"options","options":[{"name":"Single","value":"single"},{"name":"Multi","value":"multi"}],"default":"single","description":"Whether to generate a single-shot or multi-shot narrative video","displayOptions":{"show":{"resource":["video"],"operation":["textToVideo"]}}},{"displayName":"Download Video","name":"downloadVideo","type":"boolean","default":true,"description":"Whether to download the generated video as binary data. When disabled, only the video URL is returned.","displayOptions":{"show":{"resource":["video"],"operation":["textToVideo"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to return a simplified version of the response instead of the raw data","displayOptions":{"show":{"resource":["video"],"operation":["textToVideo"]}}},{"displayName":"Options","name":"videoOptions","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Prompt Extend","name":"promptExtend","type":"boolean","default":false,"description":"Whether to automatically extend and enhance the prompt"},{"displayName":"Audio","name":"audio","type":"boolean","default":true,"description":"Whether to generate audio for the video"},{"displayName":"Audio Input Type","name":"audioInputType","type":"options","options":[{"name":"Audio URL","value":"url"},{"name":"Binary File","value":"binary"}],"default":"url"},{"displayName":"Audio URL","name":"audioUrl","type":"string","default":"","placeholder":"https://example.com/audio.mp3","description":"URL of the audio file to use for the video","displayOptions":{"show":{"audioInputType":["url"]}}},{"displayName":"Audio Data Field Name","name":"audioBinaryPropertyName","type":"string","default":"audio","placeholder":"e.g. audio","hint":"The name of the input field containing the binary audio data","typeOptions":{"binaryDataProperty":true},"displayOptions":{"show":{"audioInputType":["binary"]}}}],"displayOptions":{"show":{"resource":["video"],"operation":["textToVideo"]}}},{"displayName":"Model","name":"modelId","type":"options","options":[{"name":"Wan 2.6 Image-to-Video Flash","value":"wan2.6-i2v-flash","description":"Fast image-to-video generation model"},{"name":"Wan 2.6 Image-to-Video","value":"wan2.6-i2v","description":"Standard image-to-video generation model"}],"default":"wan2.6-i2v-flash","description":"The model to use for image-to-video generation","displayOptions":{"show":{"resource":["video"],"operation":["imageToVideo"]}}},{"displayName":"Input Type","name":"inputType","type":"options","options":[{"name":"Image URL","value":"url"},{"name":"Binary File","value":"binary"}],"default":"url","displayOptions":{"show":{"resource":["video"],"operation":["imageToVideo"]}}},{"displayName":"Image URL","name":"imgUrl","type":"string","required":true,"default":"","placeholder":"https://example.com/image.png","description":"The URL of the first-frame image to generate video from","displayOptions":{"show":{"inputType":["url"],"resource":["video"],"operation":["imageToVideo"]}}},{"displayName":"Input Data Field Name","name":"binaryPropertyName","type":"string","default":"data","required":true,"placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","displayOptions":{"show":{"inputType":["binary"],"resource":["video"],"operation":["imageToVideo"]}}},{"displayName":"Prompt","name":"prompt","type":"string","typeOptions":{"rows":4},"default":"","description":"A text prompt describing the desired content and visual characteristics for the generated video","placeholder":"A small cat running on the grass","displayOptions":{"show":{"resource":["video"],"operation":["imageToVideo"]}}},{"displayName":"Resolution","name":"resolution","type":"options","options":[{"name":"720P","value":"720P"},{"name":"1080P","value":"1080P"}],"default":"1080P","description":"Resolution tier of the generated video","displayOptions":{"show":{"resource":["video"],"operation":["imageToVideo"]}}},{"displayName":"Duration (Seconds)","name":"duration","type":"number","typeOptions":{"minValue":2,"maxValue":15},"default":5,"description":"Duration of the generated video in seconds (2–15)","displayOptions":{"show":{"resource":["video"],"operation":["imageToVideo"]}}},{"displayName":"Shot Type","name":"shotType","type":"options","options":[{"name":"Single","value":"single"},{"name":"Multi","value":"multi"}],"default":"single","description":"Whether to generate a single-shot or multi-shot narrative video","displayOptions":{"show":{"resource":["video"],"operation":["imageToVideo"]}}},{"displayName":"Download Video","name":"downloadVideo","type":"boolean","default":true,"description":"Whether to download the generated video as binary data. When disabled, only the video URL is returned.","displayOptions":{"show":{"resource":["video"],"operation":["imageToVideo"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to return a simplified version of the response instead of the raw data","displayOptions":{"show":{"resource":["video"],"operation":["imageToVideo"]}}},{"displayName":"Options","name":"imageToVideoOptions","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Prompt Extend","name":"promptExtend","type":"boolean","default":false,"description":"Whether to automatically extend and enhance the prompt"},{"displayName":"Audio","name":"audio","type":"boolean","default":true,"description":"Whether to generate audio for the video"},{"displayName":"Audio Input Type","name":"audioInputType","type":"options","options":[{"name":"Audio URL","value":"url"},{"name":"Binary File","value":"binary"}],"default":"url"},{"displayName":"Audio URL","name":"audioUrl","type":"string","default":"","placeholder":"https://example.com/audio.mp3","description":"URL of the audio file to use for the video","displayOptions":{"show":{"audioInputType":["url"]}}},{"displayName":"Audio Data Field Name","name":"audioBinaryPropertyName","type":"string","default":"audio","placeholder":"e.g. audio","hint":"The name of the input field containing the binary audio data","typeOptions":{"binaryDataProperty":true},"displayOptions":{"show":{"audioInputType":["binary"]}}}],"displayOptions":{"show":{"resource":["video"],"operation":["imageToVideo"]}}}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vendors/AlibabaCloud/alibaba.svg"},
|
|
3
|
-
{"displayName":"Anthropic","name":"anthropic","group":["transform"],"version":1,"subtitle":"={{ $parameter[\"operation\"] + \": \" + $parameter[\"resource\"] }}","description":"Interact with Anthropic AI models","defaults":{"name":"Anthropic"},"usableAsTool":true,"codex":{"alias":["LangChain","document","image","assistant","claude"],"categories":["AI"],"subcategories":{"AI":["Agents","Miscellaneous","Root Nodes"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/app-nodes/n8n-nodes-langchain.anthropic/"}]}},"inputs":"={{\n\t\t(() => {\n\t\t\tconst resource = $parameter.resource;\n\t \tconst operation = $parameter.operation;\n\t\t\tif (resource === 'text' && operation === 'message') {\n\t\t\t\treturn [{ type: 'main' }, { type: 'ai_tool', displayName: 'Tools' }];\n\t\t\t}\n\n\t\t\treturn ['main'];\n\t\t})()\n\t}}","outputs":["main"],"credentials":[{"name":"anthropicApi","required":true}],"properties":[{"displayName":"Resource","name":"resource","type":"options","noDataExpression":true,"options":[{"name":"Document","value":"document"},{"name":"File","value":"file"},{"name":"Image","value":"image"},{"name":"Prompt","value":"prompt"},{"name":"Text","value":"text"}],"default":"text"},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Analyze Document","value":"analyze","action":"Analyze document","description":"Take in documents and answer questions about them"}],"default":"analyze","displayOptions":{"show":{"resource":["document"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"modelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. claude-sonnet-4-5-20250929"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["document"]}}},{"displayName":"Text Input","name":"text","type":"string","placeholder":"e.g. What's in this document?","default":"What's in this document?","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["analyze"],"resource":["document"]}}},{"displayName":"Input Type","name":"inputType","type":"options","default":"url","options":[{"name":"Document URL(s)","value":"url"},{"name":"Binary File(s)","value":"binary"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["document"]}}},{"displayName":"URL(s)","name":"documentUrls","type":"string","placeholder":"e.g. https://example.com/document.pdf","description":"URL(s) of the document(s) to analyze, multiple URLs can be added separated by comma","default":"","displayOptions":{"show":{"inputType":["url"],"operation":["analyze"],"resource":["document"]}}},{"displayName":"Input Data Field Name(s)","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary field(s) which contains the document(s), seperate multiple field names with commas","displayOptions":{"show":{"inputType":["binary"],"operation":["analyze"],"resource":["document"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to simplify the response or not","displayOptions":{"show":{"operation":["analyze"],"resource":["document"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Length of Description (Max Tokens)","description":"Fewer tokens will result in shorter, less detailed image description","name":"maxTokens","type":"number","default":1024,"typeOptions":{"minValue":1}}],"displayOptions":{"show":{"operation":["analyze"],"resource":["document"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Upload File","value":"upload","action":"Upload a file","description":"Upload a file to the Anthropic API for later use"},{"name":"Get File Metadata","value":"get","action":"Get file metadata","description":"Get metadata for a file from the Anthropic API"},{"name":"List Files","value":"list","action":"List files","description":"List files from the Anthropic API"},{"name":"Delete File","value":"deleteFile","action":"Delete a file","description":"Delete a file from the Anthropic API"}],"default":"upload","displayOptions":{"show":{"resource":["file"]}}},{"displayName":"File ID","name":"fileId","type":"string","placeholder":"e.g. file_123","description":"ID of the file to delete","default":"","displayOptions":{"show":{"operation":["deleteFile"],"resource":["file"]}}},{"displayName":"File ID","name":"fileId","type":"string","placeholder":"e.g. file_123","description":"ID of the file to get metadata for","default":"","displayOptions":{"show":{"operation":["get"],"resource":["file"]}}},{"displayName":"Return All","name":"returnAll","type":"boolean","default":false,"description":"Whether to return all results or only up to a given limit","displayOptions":{"show":{"operation":["list"],"resource":["file"]}}},{"displayName":"Limit","name":"limit","type":"number","typeOptions":{"minValue":1,"maxValue":1000},"default":50,"description":"Max number of results to return","displayOptions":{"show":{"returnAll":[false],"operation":["list"],"resource":["file"]}}},{"displayName":"Input Type","name":"inputType","type":"options","default":"url","options":[{"name":"File URL","value":"url"},{"name":"Binary File","value":"binary"}],"displayOptions":{"show":{"operation":["upload"],"resource":["file"]}}},{"displayName":"URL","name":"fileUrl","type":"string","placeholder":"e.g. https://example.com/file.pdf","description":"URL of the file to upload","default":"","displayOptions":{"show":{"inputType":["url"],"operation":["upload"],"resource":["file"]}}},{"displayName":"Input Data Field Name","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary field which contains the file","displayOptions":{"show":{"inputType":["binary"],"operation":["upload"],"resource":["file"]}}},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"File Name","name":"fileName","type":"string","description":"The file name to use for the uploaded file","default":""}],"displayOptions":{"show":{"operation":["upload"],"resource":["file"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Analyze Image","value":"analyze","action":"Analyze image","description":"Take in images and answer questions about them"}],"default":"analyze","displayOptions":{"show":{"resource":["image"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"modelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. claude-sonnet-4-5-20250929"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Text Input","name":"text","type":"string","placeholder":"e.g. What's in this image?","default":"What's in this image?","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Input Type","name":"inputType","type":"options","default":"url","options":[{"name":"Image URL(s)","value":"url"},{"name":"Binary File(s)","value":"binary"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"URL(s)","name":"imageUrls","type":"string","placeholder":"e.g. https://example.com/image.png","description":"URL(s) of the image(s) to analyze, multiple URLs can be added separated by comma","default":"","displayOptions":{"show":{"inputType":["url"],"operation":["analyze"],"resource":["image"]}}},{"displayName":"Input Data Field Name(s)","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary field(s) which contains the image(s), seperate multiple field names with commas","displayOptions":{"show":{"inputType":["binary"],"operation":["analyze"],"resource":["image"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to simplify the response or not","displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Length of Description (Max Tokens)","description":"Fewer tokens will result in shorter, less detailed image description","name":"maxTokens","type":"number","default":1024,"typeOptions":{"minValue":1}}],"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Generate Prompt","value":"generate","action":"Generate a prompt","description":"Generate a prompt for a model"},{"name":"Improve Prompt","value":"improve","action":"Improve a prompt","description":"Improve a prompt for a model"},{"name":"Templatize Prompt","value":"templatize","action":"Templatize a prompt","description":"Templatize a prompt for a model"}],"default":"generate","displayOptions":{"show":{"resource":["prompt"]}}},{"displayName":"The <a href=\"https://docs.anthropic.com/en/api/prompt-tools-generate\">prompt tools APIs</a> are in a closed research preview. Your organization must request access to use them.","name":"experimentalNotice","type":"notice","default":"","displayOptions":{"show":{"resource":["prompt"]}}},{"displayName":"Task","name":"task","type":"string","description":"Description of the prompt's purpose","placeholder":"e.g. A chef for a meal prep planning service","default":"","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["generate"],"resource":["prompt"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to return a simplified version of the response instead of the raw data","displayOptions":{"show":{"operation":["generate"],"resource":["prompt"]}}},{"displayName":"Messages","name":"messages","type":"fixedCollection","typeOptions":{"sortable":true,"multipleValues":true},"description":"Messages that constitute the prompt to be improved","placeholder":"Add Message","default":{"values":[{"content":"","role":"user"}]},"options":[{"displayName":"Values","name":"values","values":[{"displayName":"Prompt","name":"content","type":"string","description":"The content of the message to be sent","default":"","placeholder":"e.g. Concise instructions for a meal prep service","typeOptions":{"rows":2}},{"displayName":"Role","name":"role","type":"options","description":"Role in shaping the model's response, it tells the model how it should behave and interact with the user","options":[{"name":"User","value":"user","description":"Send a message as a user and get a response from the model"},{"name":"Assistant","value":"assistant","description":"Tell the model to adopt a specific tone or personality"}],"default":"user"}]}],"displayOptions":{"show":{"operation":["improve"],"resource":["prompt"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to return a simplified version of the response instead of the raw data","displayOptions":{"show":{"operation":["improve"],"resource":["prompt"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"System Message","name":"system","type":"string","description":"The existing system prompt to incorporate, if any","default":"","placeholder":"e.g. You are a professional meal prep chef"},{"displayName":"Feedback","name":"feedback","type":"string","description":"Feedback for improving the prompt","default":"","placeholder":"e.g. Make it more detailed and include cooking times"}],"displayOptions":{"show":{"operation":["improve"],"resource":["prompt"]}}},{"displayName":"Messages","name":"messages","type":"fixedCollection","typeOptions":{"sortable":true,"multipleValues":true},"description":"Messages that constitute the prompt to be templatized","placeholder":"Add Message","default":{"values":[{"content":"","role":"user"}]},"options":[{"displayName":"Values","name":"values","values":[{"displayName":"Prompt","name":"content","type":"string","description":"The content of the message to be sent","default":"","placeholder":"e.g. Translate hello to German","typeOptions":{"rows":2}},{"displayName":"Role","name":"role","type":"options","description":"Role in shaping the model's response, it tells the model how it should behave and interact with the user","options":[{"name":"User","value":"user","description":"Send a message as a user and get a response from the model"},{"name":"Assistant","value":"assistant","description":"Tell the model to adopt a specific tone or personality"}],"default":"user"}]}],"displayOptions":{"show":{"operation":["templatize"],"resource":["prompt"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to return a simplified version of the response instead of the raw data","displayOptions":{"show":{"operation":["templatize"],"resource":["prompt"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"System Message","name":"system","type":"string","description":"The existing system prompt to templatize","default":"","placeholder":"e.g. You are a professional English to German translator"}],"displayOptions":{"show":{"operation":["templatize"],"resource":["prompt"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Message a Model","value":"message","action":"Message a model","description":"Create a completion with Anthropic model"}],"default":"message","displayOptions":{"show":{"resource":["text"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"modelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. claude-sonnet-4-5-20250929"}],"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Messages","name":"messages","type":"fixedCollection","typeOptions":{"sortable":true,"multipleValues":true},"placeholder":"Add Message","default":{"values":[{"content":"","role":"user"}]},"options":[{"displayName":"Values","name":"values","values":[{"displayName":"Prompt","name":"content","type":"string","description":"The content of the message to be sent","default":"","placeholder":"e.g. Hello, how can you help me?","typeOptions":{"rows":2}},{"displayName":"Role","name":"role","type":"options","description":"Role in shaping the model's response, it tells the model how it should behave and interact with the user","options":[{"name":"User","value":"user","description":"Send a message as a user and get a response from the model"},{"name":"Assistant","value":"assistant","description":"Tell the model to adopt a specific tone or personality"}],"default":"user"}]}],"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Add Attachments","name":"addAttachments","type":"boolean","default":false,"description":"Whether to add attachments to the message","displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Attachments Input Type","name":"attachmentsInputType","type":"options","default":"url","description":"The type of input to use for the attachments","options":[{"name":"URL(s)","value":"url"},{"name":"Binary File(s)","value":"binary"}],"displayOptions":{"show":{"addAttachments":[true],"operation":["message"],"resource":["text"]}}},{"displayName":"Attachment URL(s)","name":"attachmentsUrls","type":"string","default":"","placeholder":"e.g. https://example.com/image.png","description":"URL(s) of the file(s) to attach, multiple URLs can be added separated by comma","displayOptions":{"show":{"addAttachments":[true],"attachmentsInputType":["url"],"operation":["message"],"resource":["text"]}}},{"displayName":"Attachment Input Data Field Name(s)","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","description":"Name of the binary field(s) which contains the file(s) to attach, multiple field names can be added separated by comma","displayOptions":{"show":{"addAttachments":[true],"attachmentsInputType":["binary"],"operation":["message"],"resource":["text"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to return a simplified version of the response instead of the raw data","displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Include Merged Response","name":"includeMergedResponse","type":"boolean","default":false,"description":"Whether to include a single output string merging all text parts of the response"},{"displayName":"System Message","name":"system","type":"string","default":"","placeholder":"e.g. You are a helpful assistant"},{"displayName":"Code Execution","name":"codeExecution","type":"boolean","default":false,"description":"Whether to enable code execution. Not supported by all models."},{"displayName":"Web Search","name":"webSearch","type":"boolean","default":false,"description":"Whether to enable web search"},{"displayName":"Web Search Max Uses","name":"maxUses","type":"number","default":5,"description":"The maximum number of web search uses per request","typeOptions":{"minValue":0,"numberPrecision":0}},{"displayName":"Web Search Allowed Domains","name":"allowedDomains","type":"string","default":"","description":"Comma-separated list of domains to search. Only domains in this list will be searched. Conflicts with \"Web Search Blocked Domains\".","placeholder":"e.g. google.com, wikipedia.org"},{"displayName":"Web Search Blocked Domains","name":"blockedDomains","type":"string","default":"","description":"Comma-separated list of domains to block from search. Conflicts with \"Web Search Allowed Domains\".","placeholder":"e.g. google.com, wikipedia.org"},{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":1024,"description":"The maximum number of tokens to generate in the completion","type":"number","typeOptions":{"minValue":1,"numberPrecision":0}},{"displayName":"Output Randomness (Temperature)","name":"temperature","default":1,"description":"Controls the randomness of the output. Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive","type":"number","typeOptions":{"minValue":0,"maxValue":1,"numberPrecision":1}},{"displayName":"Output Randomness (Top P)","name":"topP","default":0.7,"description":"The maximum cumulative probability of tokens to consider when sampling","type":"number","typeOptions":{"minValue":0,"maxValue":1,"numberPrecision":1}},{"displayName":"Output Randomness (Top K)","name":"topK","default":5,"description":"The maximum number of tokens to consider when sampling","type":"number","typeOptions":{"minValue":0,"numberPrecision":0}},{"displayName":"Max Tool Calls Iterations","name":"maxToolsIterations","type":"number","default":15,"description":"The maximum number of tool iteration cycles the LLM will run before stopping. A single iteration can contain multiple tool calls. Set to 0 for no limit","typeOptions":{"minValue":0,"numberPrecision":0}}],"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vendors/Anthropic/anthropic.svg"},
|
|
4
|
-
{"displayName":"Google Gemini","name":"googleGemini","group":["transform"],"version":[1,1.1],"defaultVersion":1.1,"subtitle":"={{ $parameter[\"operation\"] + \": \" + $parameter[\"resource\"] }}","description":"Interact with Google Gemini AI models","defaults":{"name":"Google Gemini"},"usableAsTool":true,"codex":{"alias":["LangChain","video","document","audio","transcribe","assistant"],"categories":["AI"],"subcategories":{"AI":["Agents","Miscellaneous","Root Nodes"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/app-nodes/n8n-nodes-langchain.googlegemini/"}]}},"inputs":"={{\n\t\t(() => {\n\t\t\tconst resource = $parameter.resource;\n\t \tconst operation = $parameter.operation;\n\t\t\tif (resource === 'text' && operation === 'message') {\n\t\t\t\treturn [{ type: 'main' }, { type: 'ai_tool', displayName: 'Tools' }];\n\t\t\t}\n\n\t\t\treturn ['main'];\n\t\t})()\n\t}}","outputs":["main"],"credentials":[{"name":"googlePalmApi","required":true}],"properties":[{"displayName":"Resource","name":"resource","type":"options","noDataExpression":true,"options":[{"name":"Audio","value":"audio"},{"name":"Document","value":"document"},{"name":"File Search","value":"fileSearch"},{"name":"Image","value":"image"},{"name":"Media File","value":"file"},{"name":"Text","value":"text"},{"name":"Video","value":"video"}],"default":"text"},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Analyze Audio","value":"analyze","action":"Analyze audio","description":"Take in audio and answer questions about it"},{"name":"Transcribe a Recording","value":"transcribe","action":"Transcribe a recording","description":"Transcribes audio into the text"}],"default":"transcribe","displayOptions":{"show":{"resource":["audio"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"audioModelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. models/gemini-2.5-flash"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["audio"]}}},{"displayName":"Text Input","name":"text","type":"string","placeholder":"e.g. What's in this audio?","default":"What's in this audio?","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["analyze"],"resource":["audio"]}}},{"displayName":"Input Type","name":"inputType","type":"options","default":"url","options":[{"name":"Audio URL(s)","value":"url"},{"name":"Binary File(s)","value":"binary"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["audio"]}}},{"displayName":"URL(s)","name":"audioUrls","type":"string","placeholder":"e.g. https://example.com/audio.mp3","description":"URL(s) of the audio(s) to analyze, multiple URLs can be added separated by comma","default":"","displayOptions":{"show":{"inputType":["url"],"operation":["analyze"],"resource":["audio"]}}},{"displayName":"Input Data Field Name(s)","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary field(s) which contains the audio(s), seperate multiple field names with commas","displayOptions":{"show":{"inputType":["binary"],"operation":["analyze"],"resource":["audio"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to simplify the response or not","displayOptions":{"show":{"operation":["analyze"],"resource":["audio"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Length of Description (Max Tokens)","description":"Fewer tokens will result in shorter, less detailed audio description","name":"maxOutputTokens","type":"number","default":300,"typeOptions":{"minValue":1}}],"displayOptions":{"show":{"operation":["analyze"],"resource":["audio"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"audioModelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. models/gemini-2.5-flash"}],"displayOptions":{"show":{"operation":["transcribe"],"resource":["audio"]}}},{"displayName":"Input Type","name":"inputType","type":"options","default":"url","options":[{"name":"Audio URL(s)","value":"url"},{"name":"Binary File(s)","value":"binary"}],"displayOptions":{"show":{"operation":["transcribe"],"resource":["audio"]}}},{"displayName":"URL(s)","name":"audioUrls","type":"string","placeholder":"e.g. https://example.com/audio.mp3","description":"URL(s) of the audio(s) to transcribe, multiple URLs can be added separated by comma","default":"","displayOptions":{"show":{"inputType":["url"],"operation":["transcribe"],"resource":["audio"]}}},{"displayName":"Input Data Field Name(s)","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary field(s) which contains the audio(s), seperate multiple field names with commas","displayOptions":{"show":{"inputType":["binary"],"operation":["transcribe"],"resource":["audio"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to simplify the response or not","displayOptions":{"show":{"operation":["transcribe"],"resource":["audio"]}}},{"displayName":"Options","name":"options","type":"collection","default":{},"options":[{"displayName":"Start Time","name":"startTime","type":"string","default":"","description":"The start time of the audio in MM:SS or HH:MM:SS format","placeholder":"e.g. 00:15"},{"displayName":"End Time","name":"endTime","type":"string","default":"","description":"The end time of the audio in MM:SS or HH:MM:SS format","placeholder":"e.g. 02:15"}],"displayOptions":{"show":{"operation":["transcribe"],"resource":["audio"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Analyze Document","value":"analyze","action":"Analyze document","description":"Take in documents and answer questions about them"}],"default":"analyze","displayOptions":{"show":{"resource":["document"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"modelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. models/gemini-2.5-flash"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["document"]}}},{"displayName":"Text Input","name":"text","type":"string","placeholder":"e.g. What's in this document?","default":"What's in this document?","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["analyze"],"resource":["document"]}}},{"displayName":"Input Type","name":"inputType","type":"options","default":"url","options":[{"name":"Document URL(s)","value":"url"},{"name":"Binary File(s)","value":"binary"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["document"]}}},{"displayName":"URL(s)","name":"documentUrls","type":"string","placeholder":"e.g. https://example.com/document.pdf","description":"URL(s) of the document(s) to analyze, multiple URLs can be added separated by comma","default":"","displayOptions":{"show":{"inputType":["url"],"operation":["analyze"],"resource":["document"]}}},{"displayName":"Input Data Field Name(s)","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary field(s) which contains the document(s), seperate multiple field names with commas","displayOptions":{"show":{"inputType":["binary"],"operation":["analyze"],"resource":["document"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to simplify the response or not","displayOptions":{"show":{"operation":["analyze"],"resource":["document"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Length of Description (Max Tokens)","description":"Fewer tokens will result in shorter, less detailed document description","name":"maxOutputTokens","type":"number","default":300,"typeOptions":{"minValue":1}}],"displayOptions":{"show":{"operation":["analyze"],"resource":["document"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Upload Media File","value":"upload","action":"Upload a media file","description":"Upload a file to the Google Gemini API for later use"}],"default":"upload","displayOptions":{"show":{"resource":["file"]}}},{"displayName":"Input Type","name":"inputType","type":"options","default":"url","options":[{"name":"File URL","value":"url"},{"name":"Binary File","value":"binary"}],"displayOptions":{"show":{"operation":["upload"],"resource":["file"]}}},{"displayName":"URL","name":"fileUrl","type":"string","placeholder":"e.g. https://example.com/file.pdf","description":"URL of the file to upload","default":"","displayOptions":{"show":{"inputType":["url"],"operation":["upload"],"resource":["file"]}}},{"displayName":"Input Data Field Name","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary property which contains the file","displayOptions":{"show":{"inputType":["binary"],"operation":["upload"],"resource":["file"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Create File Search Store","value":"createStore","action":"Create a File Search store","description":"Create a new File Search store for RAG (Retrieval Augmented Generation)"},{"name":"Delete File Search Store","value":"deleteStore","action":"Delete a File Search store","description":"Delete a File Search store"},{"name":"List File Search Stores","value":"listStores","action":"List all File Search stores","description":"List all File Search stores owned by the user"},{"name":"Upload to File Search Store","value":"uploadToStore","action":"Upload a file to a File Search store","description":"Upload a file to a File Search store for RAG (Retrieval Augmented Generation)"}],"default":"createStore","displayOptions":{"show":{"resource":["fileSearch"]}}},{"displayName":"Display Name","name":"displayName","type":"string","placeholder":"e.g. My File Search Store","description":"A human-readable name for the File Search store","default":"","required":true,"displayOptions":{"show":{"operation":["createStore"],"resource":["fileSearch"]}}},{"displayName":"File Search Store Name","name":"fileSearchStoreName","type":"string","placeholder":"e.g. fileSearchStores/abc123","description":"The full name of the File Search store to delete (format: fileSearchStores/...)","default":"","required":true,"displayOptions":{"show":{"operation":["deleteStore"],"resource":["fileSearch"]}}},{"displayName":"Force Delete","name":"force","type":"boolean","description":"Whether to delete related Documents and objects. If false, deletion will fail if the store contains any Documents.","default":false,"displayOptions":{"show":{"operation":["deleteStore"],"resource":["fileSearch"]}}},{"displayName":"Page Size","name":"pageSize","type":"number","description":"Maximum number of File Search stores to return per page (max 20)","default":10,"typeOptions":{"minValue":1,"maxValue":20},"displayOptions":{"show":{"operation":["listStores"],"resource":["fileSearch"]}}},{"displayName":"Page Token","name":"pageToken","type":"string","description":"Token from a previous page to retrieve the next page of results","default":"","displayOptions":{"show":{"operation":["listStores"],"resource":["fileSearch"]}}},{"displayName":"File Search Store Name","name":"fileSearchStoreName","type":"string","placeholder":"e.g. fileSearchStores/abc123","description":"The full name of the File Search store to upload to (format: fileSearchStores/...)","default":"","required":true,"displayOptions":{"show":{"operation":["uploadToStore"],"resource":["fileSearch"]}}},{"displayName":"File Display Name","name":"displayName","type":"string","placeholder":"e.g. My Document","description":"A human-readable name for the file (will be visible in citations)","default":"","required":true,"displayOptions":{"show":{"operation":["uploadToStore"],"resource":["fileSearch"]}}},{"displayName":"Input Type","name":"inputType","type":"options","default":"url","options":[{"name":"File URL","value":"url"},{"name":"Binary File","value":"binary"}],"displayOptions":{"show":{"operation":["uploadToStore"],"resource":["fileSearch"]}}},{"displayName":"URL","name":"fileUrl","type":"string","placeholder":"e.g. https://example.com/file.pdf","description":"URL of the file to upload","default":"","displayOptions":{"show":{"inputType":["url"],"operation":["uploadToStore"],"resource":["fileSearch"]}}},{"displayName":"Input Data Field Name","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary property which contains the file","displayOptions":{"show":{"inputType":["binary"],"operation":["uploadToStore"],"resource":["fileSearch"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Analyze Image","value":"analyze","action":"Analyze an image","description":"Take in images and answer questions about them"},{"name":"Generate an Image","value":"generate","action":"Generate an image","description":"Creates an image from a text prompt"},{"name":"Edit Image","value":"edit","action":"Edit an image","description":"Upload one or more images and apply edits based on a prompt"}],"default":"generate","displayOptions":{"show":{"resource":["image"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"modelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. models/gemini-2.5-flash"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Text Input","name":"text","type":"string","placeholder":"e.g. What's in this image?","default":"What's in this image?","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Input Type","name":"inputType","type":"options","default":"url","options":[{"name":"Image URL(s)","value":"url"},{"name":"Binary File(s)","value":"binary"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"URL(s)","name":"imageUrls","type":"string","placeholder":"e.g. https://example.com/image.png","description":"URL(s) of the image(s) to analyze, multiple URLs can be added separated by comma","default":"","displayOptions":{"show":{"inputType":["url"],"operation":["analyze"],"resource":["image"]}}},{"displayName":"Input Data Field Name(s)","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary field(s) which contains the image(s), separate multiple field names with commas","displayOptions":{"show":{"inputType":["binary"],"operation":["analyze"],"resource":["image"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to simplify the response or not","displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Length of Description (Max Tokens)","description":"Fewer tokens will result in shorter, less detailed image description","name":"maxOutputTokens","type":"number","default":300,"typeOptions":{"minValue":1}}],"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"imageEditModelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. models/gemini-2.5-flash"}],"displayOptions":{"show":{"operation":["edit"],"resource":["image"]}}},{"displayName":"Prompt","name":"prompt","type":"string","placeholder":"e.g. combine the first image with the second image","description":"Instruction describing how to edit the image","default":"","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["edit"],"resource":["image"]}}},{"displayName":"Images","name":"images","type":"fixedCollection","placeholder":"Add Image","typeOptions":{"multipleValues":true,"multipleValueButtonText":"Add Image"},"default":{"values":[{"binaryPropertyName":"data"}]},"description":"Add one or more binary fields to include images with your prompt","options":[{"displayName":"Image","name":"values","values":[{"displayName":"Binary Field Name","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","description":"The name of the binary field containing the image data"}]}],"displayOptions":{"show":{"operation":["edit"],"resource":["image"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Put Output in Field","name":"binaryPropertyOutput","type":"string","default":"edited","hint":"The name of the output field to put the binary file data in"}],"displayOptions":{"show":{"operation":["edit"],"resource":["image"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"imageGenerationModelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. models/gemini-2.5-flash"}],"displayOptions":{"show":{"operation":["generate"],"resource":["image"]}}},{"displayName":"Prompt","name":"prompt","type":"string","placeholder":"e.g. A cute cat eating a dinosaur","description":"A text description of the desired image(s)","default":"","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["generate"],"resource":["image"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Number of Images","name":"sampleCount","default":1,"description":"Number of images to generate. Not supported by Gemini models, supported by Imagen models.","type":"number","typeOptions":{"minValue":1}},{"displayName":"Put Output in Field","name":"binaryPropertyOutput","type":"string","default":"data","hint":"The name of the output field to put the binary file data in"}],"displayOptions":{"show":{"operation":["generate"],"resource":["image"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Message a Model","value":"message","action":"Message a model","description":"Create a completion with Google Gemini model"}],"default":"message","displayOptions":{"show":{"resource":["text"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"modelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. models/gemini-2.5-flash"}],"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Messages","name":"messages","type":"fixedCollection","typeOptions":{"sortable":true,"multipleValues":true},"placeholder":"Add Message","default":{"values":[{"content":""}]},"options":[{"displayName":"Values","name":"values","values":[{"displayName":"Prompt","name":"content","type":"string","description":"The content of the message to be send","default":"","placeholder":"e.g. Hello, how can you help me?","typeOptions":{"rows":2}},{"displayName":"Role","name":"role","type":"options","description":"Role in shaping the model's response, it tells the model how it should behave and interact with the user","options":[{"name":"User","value":"user","description":"Send a message as a user and get a response from the model"},{"name":"Model","value":"model","description":"Tell the model to adopt a specific tone or personality"}],"default":"user"}]}],"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to return a simplified version of the response instead of the raw data","displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Output Content as JSON","name":"jsonOutput","type":"boolean","description":"Whether to attempt to return the response in JSON format","default":false,"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Built-in Tools","name":"builtInTools","placeholder":"Add Built-in Tool","type":"collection","default":{},"displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.1}}],"operation":["message"],"resource":["text"]}},"options":[{"displayName":"Google Search","name":"googleSearch","type":"boolean","default":true,"description":"Whether to allow the model to search the web using Google Search to get real-time information"},{"displayName":"Google Maps","name":"googleMaps","type":"collection","default":{"latitude":"","longitude":""},"options":[{"displayName":"Latitude","name":"latitude","type":"number","default":"","description":"The latitude coordinate for location-based queries","typeOptions":{"numberPrecision":6}},{"displayName":"Longitude","name":"longitude","type":"number","default":"","description":"The longitude coordinate for location-based queries","typeOptions":{"numberPrecision":6}}]},{"displayName":"URL Context","name":"urlContext","type":"boolean","default":true,"description":"Whether to allow the model to read and analyze content from specific URLs"},{"displayName":"File Search","name":"fileSearch","type":"collection","default":{"fileSearchStoreNames":"[]"},"options":[{"displayName":"File Search Store Names","name":"fileSearchStoreNames","description":"The file search store names to use for the file search. File search stores are managed via Google AI Studio.","type":"json","default":"[]","required":true},{"displayName":"Metadata Filter","name":"metadataFilter","type":"string","default":"","description":"Use metadata filter to search within a subset of documents. Example: author=\"Robert Graves\".","placeholder":"e.g. author=\"John Doe\""}]},{"displayName":"Code Execution","name":"codeExecution","type":"boolean","default":true,"description":"Whether to allow the model to execute code it generates to produce a response. Supported only by certain models."}]},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Include Merged Response","name":"includeMergedResponse","type":"boolean","default":false,"description":"Whether to include a single output string merging all text parts of the response","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.1}}]}}},{"displayName":"System Message","name":"systemMessage","type":"string","default":"","placeholder":"e.g. You are a helpful assistant"},{"displayName":"Code Execution","name":"codeExecution","type":"boolean","default":false,"description":"Whether to allow the model to execute code it generates to produce a response. Supported only by certain models.","displayOptions":{"show":{"@version":[{"_cnd":{"eq":1}}]}}},{"displayName":"Frequency Penalty","name":"frequencyPenalty","default":0,"description":"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim","type":"number","typeOptions":{"minValue":-2,"maxValue":2,"numberPrecision":1}},{"displayName":"Maximum Number of Tokens","name":"maxOutputTokens","default":16,"description":"The maximum number of tokens to generate in the completion","type":"number","typeOptions":{"minValue":1,"numberPrecision":0}},{"displayName":"Number of Completions","name":"candidateCount","default":1,"description":"How many completions to generate for each prompt","type":"number","typeOptions":{"minValue":1,"maxValue":8,"numberPrecision":0}},{"displayName":"Presence Penalty","name":"presencePenalty","default":0,"description":"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics","type":"number","typeOptions":{"minValue":-2,"maxValue":2,"numberPrecision":1}},{"displayName":"Output Randomness (Temperature)","name":"temperature","default":1,"description":"Controls the randomness of the output. Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive","type":"number","typeOptions":{"minValue":0,"maxValue":2,"numberPrecision":1}},{"displayName":"Output Randomness (Top P)","name":"topP","default":1,"description":"The maximum cumulative probability of tokens to consider when sampling","type":"number","typeOptions":{"minValue":0,"maxValue":1,"numberPrecision":1}},{"displayName":"Output Randomness (Top K)","name":"topK","default":1,"description":"The maximum number of tokens to consider when sampling","type":"number","typeOptions":{"minValue":1,"numberPrecision":0}},{"displayName":"Thinking Budget","name":"thinkingBudget","type":"number","default":-1,"description":"Controls reasoning tokens for thinking models. Set to 0 to disable automatic thinking. Set to -1 for dynamic thinking (default).","typeOptions":{"minValue":-1,"numberPrecision":0}},{"displayName":"Max Tool Calls Iterations","name":"maxToolsIterations","type":"number","default":15,"description":"The maximum number of tool iteration cycles the LLM will run before stopping. A single iteration can contain multiple tool calls. Set to 0 for no limit","typeOptions":{"minValue":0,"numberPrecision":0}}],"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Analyze Video","value":"analyze","action":"Analyze video","description":"Take in videos and answer questions about them"},{"name":"Generate a Video","value":"generate","action":"Generate a video","description":"Creates a video from a text prompt"},{"name":"Download Video","value":"download","action":"Download a video","description":"Download a generated video from the Google Gemini API using a URL"}],"default":"generate","displayOptions":{"show":{"resource":["video"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"modelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. models/gemini-2.5-flash"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["video"]}}},{"displayName":"Text Input","name":"text","type":"string","placeholder":"e.g. What's in this video?","default":"What's in this video?","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["analyze"],"resource":["video"]}}},{"displayName":"Input Type","name":"inputType","type":"options","default":"url","options":[{"name":"Video URL(s)","value":"url"},{"name":"Binary File(s)","value":"binary"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["video"]}}},{"displayName":"URL(s)","name":"videoUrls","type":"string","placeholder":"e.g. https://example.com/video.mp4","description":"URL(s) of the video(s) to analyze, multiple URLs can be added separated by comma","default":"","displayOptions":{"show":{"inputType":["url"],"operation":["analyze"],"resource":["video"]}}},{"displayName":"Input Data Field Name(s)","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary field(s) which contains the video(s), seperate multiple field names with commas","displayOptions":{"show":{"inputType":["binary"],"operation":["analyze"],"resource":["video"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to simplify the response or not","displayOptions":{"show":{"operation":["analyze"],"resource":["video"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Length of Description (Max Tokens)","description":"Fewer tokens will result in shorter, less detailed video description","name":"maxOutputTokens","type":"number","default":300,"typeOptions":{"minValue":1}}],"displayOptions":{"show":{"operation":["analyze"],"resource":["video"]}}},{"displayName":"URL","name":"url","type":"string","placeholder":"e.g. https://generativelanguage.googleapis.com/v1beta/files/abcdefg:download","description":"The URL from Google Gemini API to download the video from","default":"","displayOptions":{"show":{"operation":["download"],"resource":["video"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Put Output in Field","name":"binaryPropertyOutput","type":"string","default":"data","hint":"The name of the output field to put the binary file data in"}],"displayOptions":{"show":{"operation":["download"],"resource":["video"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"videoGenerationModelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. models/gemini-2.5-flash"}],"displayOptions":{"show":{"operation":["generate"],"resource":["video"]}}},{"displayName":"Prompt","name":"prompt","type":"string","placeholder":"e.g. Panning wide shot of a calico kitten sleeping in the sunshine","description":"A text description of the desired video","default":"","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["generate"],"resource":["video"]}}},{"displayName":"Return As","name":"returnAs","type":"options","options":[{"name":"Video","value":"video"},{"name":"URL","value":"url"}],"description":"Whether to return the video as a binary file or a URL that can be used to download the video later","default":"video","displayOptions":{"show":{"operation":["generate"],"resource":["video"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Number of Videos","name":"sampleCount","type":"number","default":1,"description":"How many videos to generate","typeOptions":{"minValue":1,"maxValue":4}},{"displayName":"Duration (Seconds)","name":"durationSeconds","type":"number","default":8,"description":"Length of the generated video in seconds. Supported only by certain models.","typeOptions":{"minValue":5,"maxValue":8}},{"displayName":"Aspect Ratio","name":"aspectRatio","type":"options","options":[{"name":"Widescreen (16:9)","value":"16:9","description":"Most common aspect ratio for televisions and monitors"},{"name":"Portrait (9:16)","value":"9:16","description":"Popular for short-form videos like YouTube Shorts"}],"default":"16:9"},{"displayName":"Person Generation","name":"personGeneration","type":"options","options":[{"name":"Don't Allow","value":"dont_allow","description":"Prevent generation of people in the video"},{"name":"Allow Adult","value":"allow_adult","description":"Allow generation of adult people in the video"},{"name":"Allow All","value":"allow_all","description":"Allow generation of all people in the video"}],"default":"dont_allow"},{"displayName":"Put Output in Field","name":"binaryPropertyOutput","type":"string","default":"data","hint":"The name of the output field to put the binary file data in"}],"displayOptions":{"show":{"operation":["generate"],"resource":["video"]}}}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vendors/GoogleGemini/gemini.svg"},
|
|
2
|
+
{"displayName":"Alibaba Cloud Model Studio","name":"alibabaCloud","group":["transform"],"version":[1,1.1],"defaultVersion":1.1,"subtitle":"={{ $parameter[\"operation\"] + \": \" + $parameter[\"resource\"] }}","description":"Interact with Alibaba Cloud Qwen models via Model Studio","defaults":{"name":"Alibaba Cloud Model Studio"},"usableAsTool":true,"codex":{"alias":["qwen","dashscope","alibaba","model studio","video","image"],"categories":["AI"],"subcategories":{"AI":["Agents","Miscellaneous","Root Nodes"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/app-nodes/n8n-nodes-langchain.alibabacloud/"}]}},"inputs":"={{\n\t\t(() => {\n\t\t\tconst resource = $parameter.resource;\n\t\t\tconst operation = $parameter.operation;\n\t\t\tif (resource === 'text' && operation === 'message') {\n\t\t\t\treturn [{ type: 'main' }, { type: 'ai_tool', displayName: 'Tools' }];\n\t\t\t}\n\n\t\t\treturn ['main'];\n\t\t})()\n\t}}","outputs":["main"],"credentials":[{"name":"alibabaCloudApi","required":true}],"properties":[{"displayName":"Resource","name":"resource","type":"options","noDataExpression":true,"options":[{"name":"Text","value":"text"},{"name":"Image","value":"image"},{"name":"Video","value":"video"}],"default":"text"},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"displayOptions":{"show":{"resource":["text"]}},"options":[{"name":"Message a Model","value":"message","action":"Message a model","description":"Create a completion with a Qwen model"}],"default":"message"},{"displayName":"Model","name":"modelId","type":"options","options":[{"name":"Qwen3 Max","value":"qwen3-max","description":"Most capable model with best performance"},{"name":"Qwen3 Max (2026-01-23)","value":"qwen3-max-2026-01-23","description":"Qwen Max snapshot from 2026-01-23"},{"name":"Qwen3.5 122B-A10B","value":"qwen3.5-122b-a10b","description":"MoE model with 122B total / 10B active parameters"},{"name":"Qwen3.5 27B","value":"qwen3.5-27b","description":"Dense 27B parameter model"},{"name":"Qwen3.5 35B-A3B","value":"qwen3.5-35b-a3b","description":"Small MoE model with 35B total / 3B active parameters"},{"name":"Qwen3.5 397B-A17B","value":"qwen3.5-397b-a17b","description":"Large MoE model with 397B total / 17B active parameters"},{"name":"Qwen3.5 Flash","value":"qwen3.5-flash","description":"Faster, more cost-effective model"},{"name":"Qwen3.5 Flash (2026-02-23)","value":"qwen3.5-flash-2026-02-23","description":"Qwen Flash snapshot from 2026-02-23"},{"name":"Qwen3.5 Plus","value":"qwen3.5-plus","description":"Balanced model with good performance and cost"},{"name":"Qwen3.5 Plus (2026-02-15)","value":"qwen3.5-plus-2026-02-15","description":"Qwen Plus snapshot from 2026-02-15"}],"default":"qwen3.5-flash","description":"The model to use for generation","displayOptions":{"show":{"@version":[1],"operation":["message"],"resource":["text"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"textModelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. qwen3.5-flash"}],"displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.1}}],"operation":["message"],"resource":["text"]}}},{"displayName":"Messages","name":"messages","type":"fixedCollection","typeOptions":{"multipleValues":true},"default":{"messageValues":[{"content":"","role":"user"}]},"placeholder":"Add Message","options":[{"name":"messageValues","displayName":"Message","values":[{"displayName":"Content","name":"content","type":"string","typeOptions":{"rows":4},"default":"","description":"The content of the message"},{"displayName":"Role","name":"role","type":"options","options":[{"name":"User","value":"user"},{"name":"Assistant","value":"assistant"}],"default":"user","description":"The role of the message sender"}]}],"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to return a simplified version of the response instead of the raw data","displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Enable Search","name":"enableSearch","type":"boolean","default":false,"description":"Whether to enable web search for up-to-date information"},{"displayName":"Max Tokens","name":"maxTokens","type":"number","typeOptions":{"minValue":1},"default":2000,"description":"Maximum number of tokens to generate"},{"displayName":"Max Tools Iterations","name":"maxToolsIterations","type":"number","default":15,"description":"Maximum number of tool-calling iterations before stopping. Set to 0 for unlimited."},{"displayName":"Repetition Penalty","name":"repetitionPenalty","type":"number","typeOptions":{"minValue":1,"maxValue":2,"numberPrecision":2},"default":1.1,"description":"Penalty for token repetition. Higher values reduce repetition."},{"displayName":"Seed","name":"seed","type":"number","default":1234,"description":"Random seed for reproducible outputs"},{"displayName":"Stop Sequences","name":"stop","type":"string","default":"","description":"Comma-separated list of sequences where the API will stop generating"},{"displayName":"System Message","name":"system","type":"string","default":"","placeholder":"e.g. You are a helpful assistant"},{"displayName":"Temperature","name":"temperature","type":"number","typeOptions":{"minValue":0,"maxValue":2,"numberPrecision":2},"default":1,"description":"Controls randomness in the output. Lower values make output more focused and deterministic."},{"displayName":"Top K","name":"topK","type":"number","typeOptions":{"minValue":1,"maxValue":100},"default":50,"description":"Limits the sampling pool to top K tokens"},{"displayName":"Top P","name":"topP","type":"number","typeOptions":{"minValue":0,"maxValue":1,"numberPrecision":2},"default":0.9,"description":"Nucleus sampling parameter. Lower values make output more focused."}],"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"displayOptions":{"show":{"resource":["image"]}},"options":[{"name":"Analyze Image","value":"analyze","action":"Analyze image","description":"Take in images and answer questions about them"},{"name":"Generate an Image","value":"generate","action":"Generate an image","description":"Creates an image from a text prompt"}],"default":"generate"},{"displayName":"Model","name":"modelId","type":"options","options":[{"name":"Qwen-VL Flash","value":"qwen3-vl-flash","description":"Fast vision-language model"},{"name":"Qwen-VL Plus","value":"qwen3-vl-plus","description":"Enhanced vision-language model"}],"default":"qwen3-vl-flash","description":"The model to use for image analysis","displayOptions":{"show":{"@version":[1],"operation":["analyze"],"resource":["image"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"visionModelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. qwen3.5-flash"}],"displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.1}}],"operation":["analyze"],"resource":["image"]}}},{"displayName":"Input Type","name":"inputType","type":"options","options":[{"name":"URL","value":"url"},{"name":"Binary Data","value":"binary"}],"default":"url","description":"How to provide the image for analysis","displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Image URL","name":"imageUrl","type":"string","default":"","description":"The URL of the image to analyze","required":true,"placeholder":"https://example.com/image.jpg","displayOptions":{"show":{"inputType":["url"],"operation":["analyze"],"resource":["image"]}}},{"displayName":"Input Data Field Name","name":"binaryPropertyName","type":"string","default":"data","required":true,"placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","displayOptions":{"show":{"inputType":["binary"],"operation":["analyze"],"resource":["image"]}}},{"displayName":"Question","name":"question","type":"string","typeOptions":{"rows":4},"default":"","description":"The question or instruction about the image","required":true,"placeholder":"What is in this image?","displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to return a simplified version of the response instead of the raw data","displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Options","name":"visionOptions","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Temperature","name":"temperature","type":"number","typeOptions":{"minValue":0,"maxValue":2,"numberPrecision":2},"default":1,"description":"Controls randomness in the output. Lower values make output more focused and deterministic."},{"displayName":"Max Tokens","name":"maxTokens","type":"number","typeOptions":{"minValue":1},"default":2000,"description":"Maximum number of tokens to generate"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Model","name":"modelId","type":"options","options":[{"name":"Qwen Image","value":"qwen-image","description":"Qwen image generation model"},{"name":"Qwen Image Max","value":"qwen-image-max","description":"Most capable Qwen image generation model"},{"name":"Qwen Image Plus","value":"qwen-image-plus","description":"Enhanced Qwen image generation model"},{"name":"Wan 2.6 T2I","value":"wan2.6-t2i","description":"Wanx image generation model"},{"name":"Z-Image Turbo","value":"z-image-turbo","description":"Fast image generation model"}],"default":"z-image-turbo","description":"The model to use for image generation","displayOptions":{"show":{"@version":[1],"operation":["generate"],"resource":["image"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"imageGenerationModelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. qwen3.5-flash"}],"displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.1}}],"operation":["generate"],"resource":["image"]}}},{"displayName":"Prompt","name":"prompt","type":"string","typeOptions":{"rows":4},"default":"","description":"The text prompt describing the image to generate","required":true,"displayOptions":{"show":{"operation":["generate"],"resource":["image"]}}},{"displayName":"Download Image","name":"downloadImage","type":"boolean","default":true,"description":"Whether to download the generated image as binary data. When disabled, only the image URL is returned.","displayOptions":{"show":{"operation":["generate"],"resource":["image"]}}},{"displayName":"Options","name":"imageOptions","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Size","name":"size","type":"options","displayOptions":{"show":{"/modelId":["z-image-turbo","wan2.6-t2i"]}},"options":[{"name":"1024x1024","value":"1024*1024"},{"name":"720x1280","value":"720*1280"},{"name":"1280x720","value":"1280*720"}],"default":"1024*1024","description":"The size of the generated image"},{"displayName":"Size","name":"size","type":"options","displayOptions":{"show":{"/modelId":["qwen-image","qwen-image-plus","qwen-image-max"]}},"options":[{"name":"1104x1472 (3:4)","value":"1104*1472"},{"name":"1328x1328 (1:1)","value":"1328*1328"},{"name":"1472x1104 (4:3)","value":"1472*1104"},{"name":"1664x928 (16:9)","value":"1664*928"},{"name":"928x1664 (9:16)","value":"928*1664"}],"default":"1664*928","description":"The size of the generated image"},{"displayName":"Prompt Extend","name":"promptExtend","type":"boolean","default":false,"description":"Whether to automatically extend and enhance the prompt"}],"displayOptions":{"show":{"operation":["generate"],"resource":["image"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"displayOptions":{"show":{"resource":["video"]}},"options":[{"name":"Generate Video From Text","value":"textToVideo","action":"Generate video from text prompt","description":"Generate a video from a text prompt"},{"name":"Generate Video From Image","value":"imageToVideo","action":"Generate video from image","description":"Generate a video from one or more images using Wan models"}],"default":"textToVideo"},{"displayName":"Model","name":"modelId","type":"options","options":[{"name":"Wan 2.6 Text-to-Video","value":"wan2.6-t2v","description":"Text-to-video generation model"}],"default":"wan2.6-t2v","description":"The model to use for text-to-video generation","displayOptions":{"show":{"@version":[1],"resource":["video"],"operation":["textToVideo"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"textToVideoModelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. qwen3.5-flash"}],"displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.1}}],"resource":["video"],"operation":["textToVideo"]}}},{"displayName":"Prompt","name":"prompt","type":"string","typeOptions":{"rows":4},"default":"","required":true,"description":"The text prompt to generate video from","placeholder":"A cat playing with a ball of yarn","displayOptions":{"show":{"resource":["video"],"operation":["textToVideo"]}}},{"displayName":"Resolution","name":"resolution","type":"options","options":[{"name":"720P","value":"720P"},{"name":"1080P","value":"1080P"}],"default":"1080P","description":"Resolution tier of the generated video","displayOptions":{"show":{"resource":["video"],"operation":["textToVideo"]}}},{"displayName":"Duration (Seconds)","name":"duration","type":"number","typeOptions":{"minValue":2,"maxValue":15},"default":5,"description":"Duration of the generated video in seconds (2–15)","displayOptions":{"show":{"resource":["video"],"operation":["textToVideo"]}}},{"displayName":"Shot Type","name":"shotType","type":"options","options":[{"name":"Single","value":"single"},{"name":"Multi","value":"multi"}],"default":"single","description":"Whether to generate a single-shot or multi-shot narrative video","displayOptions":{"show":{"resource":["video"],"operation":["textToVideo"]}}},{"displayName":"Download Video","name":"downloadVideo","type":"boolean","default":true,"description":"Whether to download the generated video as binary data. When disabled, only the video URL is returned.","displayOptions":{"show":{"resource":["video"],"operation":["textToVideo"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to return a simplified version of the response instead of the raw data","displayOptions":{"show":{"resource":["video"],"operation":["textToVideo"]}}},{"displayName":"Options","name":"videoOptions","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Prompt Extend","name":"promptExtend","type":"boolean","default":false,"description":"Whether to automatically extend and enhance the prompt"},{"displayName":"Audio","name":"audio","type":"boolean","default":true,"description":"Whether to generate audio for the video"},{"displayName":"Audio Input Type","name":"audioInputType","type":"options","options":[{"name":"Audio URL","value":"url"},{"name":"Binary File","value":"binary"}],"default":"url"},{"displayName":"Audio URL","name":"audioUrl","type":"string","default":"","placeholder":"https://example.com/audio.mp3","description":"URL of the audio file to use for the video","displayOptions":{"show":{"audioInputType":["url"]}}},{"displayName":"Audio Data Field Name","name":"audioBinaryPropertyName","type":"string","default":"audio","placeholder":"e.g. audio","hint":"The name of the input field containing the binary audio data","typeOptions":{"binaryDataProperty":true},"displayOptions":{"show":{"audioInputType":["binary"]}}}],"displayOptions":{"show":{"resource":["video"],"operation":["textToVideo"]}}},{"displayName":"Model","name":"modelId","type":"options","options":[{"name":"Wan 2.6 Image-to-Video Flash","value":"wan2.6-i2v-flash","description":"Fast image-to-video generation model"},{"name":"Wan 2.6 Image-to-Video","value":"wan2.6-i2v","description":"Standard image-to-video generation model"}],"default":"wan2.6-i2v-flash","description":"The model to use for image-to-video generation","displayOptions":{"show":{"@version":[1],"resource":["video"],"operation":["imageToVideo"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"imageToVideoModelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. qwen3.5-flash"}],"displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.1}}],"resource":["video"],"operation":["imageToVideo"]}}},{"displayName":"Input Type","name":"inputType","type":"options","options":[{"name":"Image URL","value":"url"},{"name":"Binary File","value":"binary"}],"default":"url","displayOptions":{"show":{"resource":["video"],"operation":["imageToVideo"]}}},{"displayName":"Image URL","name":"imgUrl","type":"string","required":true,"default":"","placeholder":"https://example.com/image.png","description":"The URL of the first-frame image to generate video from","displayOptions":{"show":{"inputType":["url"],"resource":["video"],"operation":["imageToVideo"]}}},{"displayName":"Input Data Field Name","name":"binaryPropertyName","type":"string","default":"data","required":true,"placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","displayOptions":{"show":{"inputType":["binary"],"resource":["video"],"operation":["imageToVideo"]}}},{"displayName":"Prompt","name":"prompt","type":"string","typeOptions":{"rows":4},"default":"","description":"A text prompt describing the desired content and visual characteristics for the generated video","placeholder":"A small cat running on the grass","displayOptions":{"show":{"resource":["video"],"operation":["imageToVideo"]}}},{"displayName":"Resolution","name":"resolution","type":"options","options":[{"name":"720P","value":"720P"},{"name":"1080P","value":"1080P"}],"default":"1080P","description":"Resolution tier of the generated video","displayOptions":{"show":{"resource":["video"],"operation":["imageToVideo"]}}},{"displayName":"Duration (Seconds)","name":"duration","type":"number","typeOptions":{"minValue":2,"maxValue":15},"default":5,"description":"Duration of the generated video in seconds (2–15)","displayOptions":{"show":{"resource":["video"],"operation":["imageToVideo"]}}},{"displayName":"Shot Type","name":"shotType","type":"options","options":[{"name":"Single","value":"single"},{"name":"Multi","value":"multi"}],"default":"single","description":"Whether to generate a single-shot or multi-shot narrative video","displayOptions":{"show":{"resource":["video"],"operation":["imageToVideo"]}}},{"displayName":"Download Video","name":"downloadVideo","type":"boolean","default":true,"description":"Whether to download the generated video as binary data. When disabled, only the video URL is returned.","displayOptions":{"show":{"resource":["video"],"operation":["imageToVideo"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to return a simplified version of the response instead of the raw data","displayOptions":{"show":{"resource":["video"],"operation":["imageToVideo"]}}},{"displayName":"Options","name":"imageToVideoOptions","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Prompt Extend","name":"promptExtend","type":"boolean","default":false,"description":"Whether to automatically extend and enhance the prompt"},{"displayName":"Audio","name":"audio","type":"boolean","default":true,"description":"Whether to generate audio for the video"},{"displayName":"Audio Input Type","name":"audioInputType","type":"options","options":[{"name":"Audio URL","value":"url"},{"name":"Binary File","value":"binary"}],"default":"url"},{"displayName":"Audio URL","name":"audioUrl","type":"string","default":"","placeholder":"https://example.com/audio.mp3","description":"URL of the audio file to use for the video","displayOptions":{"show":{"audioInputType":["url"]}}},{"displayName":"Audio Data Field Name","name":"audioBinaryPropertyName","type":"string","default":"audio","placeholder":"e.g. audio","hint":"The name of the input field containing the binary audio data","typeOptions":{"binaryDataProperty":true},"displayOptions":{"show":{"audioInputType":["binary"]}}}],"displayOptions":{"show":{"resource":["video"],"operation":["imageToVideo"]}}}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vendors/AlibabaCloud/alibaba.svg"},
|
|
3
|
+
{"displayName":"Anthropic","name":"anthropic","group":["transform"],"version":1,"subtitle":"={{ $parameter[\"operation\"] + \": \" + $parameter[\"resource\"] }}","description":"Interact with Anthropic AI models","defaults":{"name":"Anthropic"},"usableAsTool":true,"codex":{"alias":["LangChain","document","image","assistant","claude"],"categories":["AI"],"subcategories":{"AI":["Agents","Miscellaneous","Root Nodes"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/app-nodes/n8n-nodes-langchain.anthropic/"}]}},"inputs":"={{\n\t\t(() => {\n\t\t\tconst resource = $parameter.resource;\n\t \tconst operation = $parameter.operation;\n\t\t\tif (resource === 'text' && operation === 'message') {\n\t\t\t\treturn [{ type: 'main' }, { type: 'ai_tool', displayName: 'Tools' }];\n\t\t\t}\n\n\t\t\treturn ['main'];\n\t\t})()\n\t}}","outputs":["main"],"credentials":[{"name":"anthropicApi","required":true}],"properties":[{"displayName":"Resource","name":"resource","type":"options","noDataExpression":true,"options":[{"name":"Document","value":"document"},{"name":"File","value":"file"},{"name":"Image","value":"image"},{"name":"Prompt","value":"prompt"},{"name":"Text","value":"text"}],"default":"text"},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Analyze Document","value":"analyze","action":"Analyze document","description":"Take in documents and answer questions about them"}],"default":"analyze","displayOptions":{"show":{"resource":["document"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"modelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. claude-sonnet-4-6"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["document"]}}},{"displayName":"Text Input","name":"text","type":"string","placeholder":"e.g. What's in this document?","default":"What's in this document?","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["analyze"],"resource":["document"]}}},{"displayName":"Input Type","name":"inputType","type":"options","default":"url","options":[{"name":"Document URL(s)","value":"url"},{"name":"Binary File(s)","value":"binary"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["document"]}}},{"displayName":"URL(s)","name":"documentUrls","type":"string","placeholder":"e.g. https://example.com/document.pdf","description":"URL(s) of the document(s) to analyze, multiple URLs can be added separated by comma","default":"","displayOptions":{"show":{"inputType":["url"],"operation":["analyze"],"resource":["document"]}}},{"displayName":"Input Data Field Name(s)","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary field(s) which contains the document(s), seperate multiple field names with commas","displayOptions":{"show":{"inputType":["binary"],"operation":["analyze"],"resource":["document"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to simplify the response or not","displayOptions":{"show":{"operation":["analyze"],"resource":["document"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Length of Description (Max Tokens)","description":"Fewer tokens will result in shorter, less detailed image description","name":"maxTokens","type":"number","default":1024,"typeOptions":{"minValue":1}}],"displayOptions":{"show":{"operation":["analyze"],"resource":["document"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Upload File","value":"upload","action":"Upload a file","description":"Upload a file to the Anthropic API for later use"},{"name":"Get File Metadata","value":"get","action":"Get file metadata","description":"Get metadata for a file from the Anthropic API"},{"name":"List Files","value":"list","action":"List files","description":"List files from the Anthropic API"},{"name":"Delete File","value":"deleteFile","action":"Delete a file","description":"Delete a file from the Anthropic API"}],"default":"upload","displayOptions":{"show":{"resource":["file"]}}},{"displayName":"File ID","name":"fileId","type":"string","placeholder":"e.g. file_123","description":"ID of the file to delete","default":"","displayOptions":{"show":{"operation":["deleteFile"],"resource":["file"]}}},{"displayName":"File ID","name":"fileId","type":"string","placeholder":"e.g. file_123","description":"ID of the file to get metadata for","default":"","displayOptions":{"show":{"operation":["get"],"resource":["file"]}}},{"displayName":"Return All","name":"returnAll","type":"boolean","default":false,"description":"Whether to return all results or only up to a given limit","displayOptions":{"show":{"operation":["list"],"resource":["file"]}}},{"displayName":"Limit","name":"limit","type":"number","typeOptions":{"minValue":1,"maxValue":1000},"default":50,"description":"Max number of results to return","displayOptions":{"show":{"returnAll":[false],"operation":["list"],"resource":["file"]}}},{"displayName":"Input Type","name":"inputType","type":"options","default":"url","options":[{"name":"File URL","value":"url"},{"name":"Binary File","value":"binary"}],"displayOptions":{"show":{"operation":["upload"],"resource":["file"]}}},{"displayName":"URL","name":"fileUrl","type":"string","placeholder":"e.g. https://example.com/file.pdf","description":"URL of the file to upload","default":"","displayOptions":{"show":{"inputType":["url"],"operation":["upload"],"resource":["file"]}}},{"displayName":"Input Data Field Name","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary field which contains the file","displayOptions":{"show":{"inputType":["binary"],"operation":["upload"],"resource":["file"]}}},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"File Name","name":"fileName","type":"string","description":"The file name to use for the uploaded file","default":""}],"displayOptions":{"show":{"operation":["upload"],"resource":["file"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Analyze Image","value":"analyze","action":"Analyze image","description":"Take in images and answer questions about them"}],"default":"analyze","displayOptions":{"show":{"resource":["image"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"modelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. claude-sonnet-4-6"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Text Input","name":"text","type":"string","placeholder":"e.g. What's in this image?","default":"What's in this image?","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Input Type","name":"inputType","type":"options","default":"url","options":[{"name":"Image URL(s)","value":"url"},{"name":"Binary File(s)","value":"binary"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"URL(s)","name":"imageUrls","type":"string","placeholder":"e.g. https://example.com/image.png","description":"URL(s) of the image(s) to analyze, multiple URLs can be added separated by comma","default":"","displayOptions":{"show":{"inputType":["url"],"operation":["analyze"],"resource":["image"]}}},{"displayName":"Input Data Field Name(s)","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary field(s) which contains the image(s), seperate multiple field names with commas","displayOptions":{"show":{"inputType":["binary"],"operation":["analyze"],"resource":["image"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to simplify the response or not","displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Length of Description (Max Tokens)","description":"Fewer tokens will result in shorter, less detailed image description","name":"maxTokens","type":"number","default":1024,"typeOptions":{"minValue":1}}],"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Generate Prompt","value":"generate","action":"Generate a prompt","description":"Generate a prompt for a model"},{"name":"Improve Prompt","value":"improve","action":"Improve a prompt","description":"Improve a prompt for a model"},{"name":"Templatize Prompt","value":"templatize","action":"Templatize a prompt","description":"Templatize a prompt for a model"}],"default":"generate","displayOptions":{"show":{"resource":["prompt"]}}},{"displayName":"The <a href=\"https://docs.anthropic.com/en/api/prompt-tools-generate\">prompt tools APIs</a> are in a closed research preview. Your organization must request access to use them.","name":"experimentalNotice","type":"notice","default":"","displayOptions":{"show":{"resource":["prompt"]}}},{"displayName":"Task","name":"task","type":"string","description":"Description of the prompt's purpose","placeholder":"e.g. A chef for a meal prep planning service","default":"","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["generate"],"resource":["prompt"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to return a simplified version of the response instead of the raw data","displayOptions":{"show":{"operation":["generate"],"resource":["prompt"]}}},{"displayName":"Messages","name":"messages","type":"fixedCollection","typeOptions":{"sortable":true,"multipleValues":true},"description":"Messages that constitute the prompt to be improved","placeholder":"Add Message","default":{"values":[{"content":"","role":"user"}]},"options":[{"displayName":"Values","name":"values","values":[{"displayName":"Prompt","name":"content","type":"string","description":"The content of the message to be sent","default":"","placeholder":"e.g. Concise instructions for a meal prep service","typeOptions":{"rows":2}},{"displayName":"Role","name":"role","type":"options","description":"Role in shaping the model's response, it tells the model how it should behave and interact with the user","options":[{"name":"User","value":"user","description":"Send a message as a user and get a response from the model"},{"name":"Assistant","value":"assistant","description":"Tell the model to adopt a specific tone or personality"}],"default":"user"}]}],"displayOptions":{"show":{"operation":["improve"],"resource":["prompt"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to return a simplified version of the response instead of the raw data","displayOptions":{"show":{"operation":["improve"],"resource":["prompt"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"System Message","name":"system","type":"string","description":"The existing system prompt to incorporate, if any","default":"","placeholder":"e.g. You are a professional meal prep chef"},{"displayName":"Feedback","name":"feedback","type":"string","description":"Feedback for improving the prompt","default":"","placeholder":"e.g. Make it more detailed and include cooking times"}],"displayOptions":{"show":{"operation":["improve"],"resource":["prompt"]}}},{"displayName":"Messages","name":"messages","type":"fixedCollection","typeOptions":{"sortable":true,"multipleValues":true},"description":"Messages that constitute the prompt to be templatized","placeholder":"Add Message","default":{"values":[{"content":"","role":"user"}]},"options":[{"displayName":"Values","name":"values","values":[{"displayName":"Prompt","name":"content","type":"string","description":"The content of the message to be sent","default":"","placeholder":"e.g. Translate hello to German","typeOptions":{"rows":2}},{"displayName":"Role","name":"role","type":"options","description":"Role in shaping the model's response, it tells the model how it should behave and interact with the user","options":[{"name":"User","value":"user","description":"Send a message as a user and get a response from the model"},{"name":"Assistant","value":"assistant","description":"Tell the model to adopt a specific tone or personality"}],"default":"user"}]}],"displayOptions":{"show":{"operation":["templatize"],"resource":["prompt"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to return a simplified version of the response instead of the raw data","displayOptions":{"show":{"operation":["templatize"],"resource":["prompt"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"System Message","name":"system","type":"string","description":"The existing system prompt to templatize","default":"","placeholder":"e.g. You are a professional English to German translator"}],"displayOptions":{"show":{"operation":["templatize"],"resource":["prompt"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Message a Model","value":"message","action":"Message a model","description":"Create a completion with Anthropic model"}],"default":"message","displayOptions":{"show":{"resource":["text"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"modelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. claude-sonnet-4-6"}],"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Messages","name":"messages","type":"fixedCollection","typeOptions":{"sortable":true,"multipleValues":true},"placeholder":"Add Message","default":{"values":[{"content":"","role":"user"}]},"options":[{"displayName":"Values","name":"values","values":[{"displayName":"Prompt","name":"content","type":"string","description":"The content of the message to be sent","default":"","placeholder":"e.g. Hello, how can you help me?","typeOptions":{"rows":2}},{"displayName":"Role","name":"role","type":"options","description":"Role in shaping the model's response, it tells the model how it should behave and interact with the user","options":[{"name":"User","value":"user","description":"Send a message as a user and get a response from the model"},{"name":"Assistant","value":"assistant","description":"Tell the model to adopt a specific tone or personality"}],"default":"user"}]}],"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Add Attachments","name":"addAttachments","type":"boolean","default":false,"description":"Whether to add attachments to the message","displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Attachments Input Type","name":"attachmentsInputType","type":"options","default":"url","description":"The type of input to use for the attachments","options":[{"name":"URL(s)","value":"url"},{"name":"Binary File(s)","value":"binary"}],"displayOptions":{"show":{"addAttachments":[true],"operation":["message"],"resource":["text"]}}},{"displayName":"Attachment URL(s)","name":"attachmentsUrls","type":"string","default":"","placeholder":"e.g. https://example.com/image.png","description":"URL(s) of the file(s) to attach, multiple URLs can be added separated by comma","displayOptions":{"show":{"addAttachments":[true],"attachmentsInputType":["url"],"operation":["message"],"resource":["text"]}}},{"displayName":"Attachment Input Data Field Name(s)","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","description":"Name of the binary field(s) which contains the file(s) to attach, multiple field names can be added separated by comma","displayOptions":{"show":{"addAttachments":[true],"attachmentsInputType":["binary"],"operation":["message"],"resource":["text"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to return a simplified version of the response instead of the raw data","displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Include Merged Response","name":"includeMergedResponse","type":"boolean","default":false,"description":"Whether to include a single output string merging all text parts of the response"},{"displayName":"System Message","name":"system","type":"string","default":"","placeholder":"e.g. You are a helpful assistant"},{"displayName":"Code Execution","name":"codeExecution","type":"boolean","default":false,"description":"Whether to enable code execution. Not supported by all models."},{"displayName":"Web Search","name":"webSearch","type":"boolean","default":false,"description":"Whether to enable web search"},{"displayName":"Web Search Max Uses","name":"maxUses","type":"number","default":5,"description":"The maximum number of web search uses per request","typeOptions":{"minValue":0,"numberPrecision":0}},{"displayName":"Web Search Allowed Domains","name":"allowedDomains","type":"string","default":"","description":"Comma-separated list of domains to search. Only domains in this list will be searched. Conflicts with \"Web Search Blocked Domains\".","placeholder":"e.g. google.com, wikipedia.org"},{"displayName":"Web Search Blocked Domains","name":"blockedDomains","type":"string","default":"","description":"Comma-separated list of domains to block from search. Conflicts with \"Web Search Allowed Domains\".","placeholder":"e.g. google.com, wikipedia.org"},{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":1024,"description":"The maximum number of tokens to generate in the completion","type":"number","typeOptions":{"minValue":1,"numberPrecision":0}},{"displayName":"Output Randomness (Temperature)","name":"temperature","default":1,"description":"Controls the randomness of the output. Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive","type":"number","typeOptions":{"minValue":0,"maxValue":1,"numberPrecision":1}},{"displayName":"Output Randomness (Top P)","name":"topP","default":0.7,"description":"The maximum cumulative probability of tokens to consider when sampling","type":"number","typeOptions":{"minValue":0,"maxValue":1,"numberPrecision":1}},{"displayName":"Output Randomness (Top K)","name":"topK","default":5,"description":"The maximum number of tokens to consider when sampling","type":"number","typeOptions":{"minValue":0,"numberPrecision":0}},{"displayName":"Max Tool Calls Iterations","name":"maxToolsIterations","type":"number","default":15,"description":"The maximum number of tool iteration cycles the LLM will run before stopping. A single iteration can contain multiple tool calls. Set to 0 for no limit","typeOptions":{"minValue":0,"numberPrecision":0}}],"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vendors/Anthropic/anthropic.svg"},
|
|
4
|
+
{"displayName":"Google Gemini","name":"googleGemini","group":["transform"],"version":[1,1.1,1.2],"defaultVersion":1.2,"subtitle":"={{ $parameter[\"operation\"] + \": \" + $parameter[\"resource\"] }}","description":"Interact with Google Gemini AI models","defaults":{"name":"Google Gemini"},"usableAsTool":true,"codex":{"alias":["LangChain","video","document","audio","transcribe","assistant"],"categories":["AI"],"subcategories":{"AI":["Agents","Miscellaneous","Root Nodes"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/app-nodes/n8n-nodes-langchain.googlegemini/"}]}},"inputs":"={{\n\t\t(() => {\n\t\t\tconst resource = $parameter.resource;\n\t \tconst operation = $parameter.operation;\n\t\t\tif (resource === 'text' && operation === 'message') {\n\t\t\t\treturn [{ type: 'main' }, { type: 'ai_tool', displayName: 'Tools' }];\n\t\t\t}\n\n\t\t\treturn ['main'];\n\t\t})()\n\t}}","outputs":["main"],"credentials":[{"name":"googlePalmApi","required":true}],"properties":[{"displayName":"Resource","name":"resource","type":"options","noDataExpression":true,"options":[{"name":"Audio","value":"audio"},{"name":"Document","value":"document"},{"name":"File Search","value":"fileSearch"},{"name":"Image","value":"image"},{"name":"Media File","value":"file"},{"name":"Text","value":"text"},{"name":"Video","value":"video"}],"default":"text"},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Analyze Audio","value":"analyze","action":"Analyze audio","description":"Take in audio and answer questions about it"},{"name":"Transcribe a Recording","value":"transcribe","action":"Transcribe a recording","description":"Transcribes audio into the text"}],"default":"transcribe","displayOptions":{"show":{"resource":["audio"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"typeOptions":{"loadOptionsDependsOn":["operation","resource"]},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"audioModelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. models/gemini-2.5-flash"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["audio"]}}},{"displayName":"Text Input","name":"text","type":"string","placeholder":"e.g. What's in this audio?","default":"What's in this audio?","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["analyze"],"resource":["audio"]}}},{"displayName":"Input Type","name":"inputType","type":"options","default":"url","options":[{"name":"Audio URL(s)","value":"url"},{"name":"Binary File(s)","value":"binary"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["audio"]}}},{"displayName":"URL(s)","name":"audioUrls","type":"string","placeholder":"e.g. https://example.com/audio.mp3","description":"URL(s) of the audio(s) to analyze, multiple URLs can be added separated by comma","default":"","displayOptions":{"show":{"inputType":["url"],"operation":["analyze"],"resource":["audio"]}}},{"displayName":"Input Data Field Name(s)","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary field(s) which contains the audio(s), seperate multiple field names with commas","displayOptions":{"show":{"inputType":["binary"],"operation":["analyze"],"resource":["audio"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to simplify the response or not","displayOptions":{"show":{"operation":["analyze"],"resource":["audio"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Length of Description (Max Tokens)","description":"Fewer tokens will result in shorter, less detailed audio description","name":"maxOutputTokens","type":"number","default":300,"typeOptions":{"minValue":1}}],"displayOptions":{"show":{"operation":["analyze"],"resource":["audio"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"typeOptions":{"loadOptionsDependsOn":["operation","resource"]},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"audioModelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. models/gemini-2.5-flash"}],"displayOptions":{"show":{"operation":["transcribe"],"resource":["audio"]}}},{"displayName":"Input Type","name":"inputType","type":"options","default":"url","options":[{"name":"Audio URL(s)","value":"url"},{"name":"Binary File(s)","value":"binary"}],"displayOptions":{"show":{"operation":["transcribe"],"resource":["audio"]}}},{"displayName":"URL(s)","name":"audioUrls","type":"string","placeholder":"e.g. https://example.com/audio.mp3","description":"URL(s) of the audio(s) to transcribe, multiple URLs can be added separated by comma","default":"","displayOptions":{"show":{"inputType":["url"],"operation":["transcribe"],"resource":["audio"]}}},{"displayName":"Input Data Field Name(s)","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary field(s) which contains the audio(s), seperate multiple field names with commas","displayOptions":{"show":{"inputType":["binary"],"operation":["transcribe"],"resource":["audio"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to simplify the response or not","displayOptions":{"show":{"operation":["transcribe"],"resource":["audio"]}}},{"displayName":"Options","name":"options","type":"collection","default":{},"options":[{"displayName":"Start Time","name":"startTime","type":"string","default":"","description":"The start time of the audio in MM:SS or HH:MM:SS format","placeholder":"e.g. 00:15"},{"displayName":"End Time","name":"endTime","type":"string","default":"","description":"The end time of the audio in MM:SS or HH:MM:SS format","placeholder":"e.g. 02:15"}],"displayOptions":{"show":{"operation":["transcribe"],"resource":["audio"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Analyze Document","value":"analyze","action":"Analyze document","description":"Take in documents and answer questions about them"}],"default":"analyze","displayOptions":{"show":{"resource":["document"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"typeOptions":{"loadOptionsDependsOn":["operation","resource"]},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"modelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. models/gemini-2.5-flash"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["document"]}}},{"displayName":"Text Input","name":"text","type":"string","placeholder":"e.g. What's in this document?","default":"What's in this document?","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["analyze"],"resource":["document"]}}},{"displayName":"Input Type","name":"inputType","type":"options","default":"url","options":[{"name":"Document URL(s)","value":"url"},{"name":"Binary File(s)","value":"binary"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["document"]}}},{"displayName":"URL(s)","name":"documentUrls","type":"string","placeholder":"e.g. https://example.com/document.pdf","description":"URL(s) of the document(s) to analyze, multiple URLs can be added separated by comma","default":"","displayOptions":{"show":{"inputType":["url"],"operation":["analyze"],"resource":["document"]}}},{"displayName":"Input Data Field Name(s)","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary field(s) which contains the document(s), seperate multiple field names with commas","displayOptions":{"show":{"inputType":["binary"],"operation":["analyze"],"resource":["document"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to simplify the response or not","displayOptions":{"show":{"operation":["analyze"],"resource":["document"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Length of Description (Max Tokens)","description":"Fewer tokens will result in shorter, less detailed document description","name":"maxOutputTokens","type":"number","default":300,"typeOptions":{"minValue":1}}],"displayOptions":{"show":{"operation":["analyze"],"resource":["document"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Upload Media File","value":"upload","action":"Upload a media file","description":"Upload a file to the Google Gemini API for later use"}],"default":"upload","displayOptions":{"show":{"resource":["file"]}}},{"displayName":"Input Type","name":"inputType","type":"options","default":"url","options":[{"name":"File URL","value":"url"},{"name":"Binary File","value":"binary"}],"displayOptions":{"show":{"operation":["upload"],"resource":["file"]}}},{"displayName":"URL","name":"fileUrl","type":"string","placeholder":"e.g. https://example.com/file.pdf","description":"URL of the file to upload","default":"","displayOptions":{"show":{"inputType":["url"],"operation":["upload"],"resource":["file"]}}},{"displayName":"Input Data Field Name","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary property which contains the file","displayOptions":{"show":{"inputType":["binary"],"operation":["upload"],"resource":["file"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Create File Search Store","value":"createStore","action":"Create a File Search store","description":"Create a new File Search store for RAG (Retrieval Augmented Generation)"},{"name":"Delete File Search Store","value":"deleteStore","action":"Delete a File Search store","description":"Delete a File Search store"},{"name":"List File Search Stores","value":"listStores","action":"List all File Search stores","description":"List all File Search stores owned by the user"},{"name":"Upload to File Search Store","value":"uploadToStore","action":"Upload a file to a File Search store","description":"Upload a file to a File Search store for RAG (Retrieval Augmented Generation)"}],"default":"createStore","displayOptions":{"show":{"resource":["fileSearch"]}}},{"displayName":"Display Name","name":"displayName","type":"string","placeholder":"e.g. My File Search Store","description":"A human-readable name for the File Search store","default":"","required":true,"displayOptions":{"show":{"operation":["createStore"],"resource":["fileSearch"]}}},{"displayName":"File Search Store Name","name":"fileSearchStoreName","type":"string","placeholder":"e.g. fileSearchStores/abc123","description":"The full name of the File Search store to delete (format: fileSearchStores/...)","default":"","required":true,"displayOptions":{"show":{"operation":["deleteStore"],"resource":["fileSearch"]}}},{"displayName":"Force Delete","name":"force","type":"boolean","description":"Whether to delete related Documents and objects. If false, deletion will fail if the store contains any Documents.","default":false,"displayOptions":{"show":{"operation":["deleteStore"],"resource":["fileSearch"]}}},{"displayName":"Page Size","name":"pageSize","type":"number","description":"Maximum number of File Search stores to return per page (max 20)","default":10,"typeOptions":{"minValue":1,"maxValue":20},"displayOptions":{"show":{"operation":["listStores"],"resource":["fileSearch"]}}},{"displayName":"Page Token","name":"pageToken","type":"string","description":"Token from a previous page to retrieve the next page of results","default":"","displayOptions":{"show":{"operation":["listStores"],"resource":["fileSearch"]}}},{"displayName":"File Search Store Name","name":"fileSearchStoreName","type":"string","placeholder":"e.g. fileSearchStores/abc123","description":"The full name of the File Search store to upload to (format: fileSearchStores/...)","default":"","required":true,"displayOptions":{"show":{"operation":["uploadToStore"],"resource":["fileSearch"]}}},{"displayName":"File Display Name","name":"displayName","type":"string","placeholder":"e.g. My Document","description":"A human-readable name for the file (will be visible in citations)","default":"","required":true,"displayOptions":{"show":{"operation":["uploadToStore"],"resource":["fileSearch"]}}},{"displayName":"Input Type","name":"inputType","type":"options","default":"url","options":[{"name":"File URL","value":"url"},{"name":"Binary File","value":"binary"}],"displayOptions":{"show":{"operation":["uploadToStore"],"resource":["fileSearch"]}}},{"displayName":"URL","name":"fileUrl","type":"string","placeholder":"e.g. https://example.com/file.pdf","description":"URL of the file to upload","default":"","displayOptions":{"show":{"inputType":["url"],"operation":["uploadToStore"],"resource":["fileSearch"]}}},{"displayName":"Input Data Field Name","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary property which contains the file","displayOptions":{"show":{"inputType":["binary"],"operation":["uploadToStore"],"resource":["fileSearch"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Analyze Image","value":"analyze","action":"Analyze an image","description":"Take in images and answer questions about them"},{"name":"Generate an Image","value":"generate","action":"Generate an image","description":"Creates an image from a text prompt"},{"name":"Edit Image","value":"edit","action":"Edit an image","description":"Upload one or more images and apply edits based on a prompt"}],"default":"generate","displayOptions":{"show":{"resource":["image"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"typeOptions":{"loadOptionsDependsOn":["operation","resource"]},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"modelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. models/gemini-2.5-flash"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Text Input","name":"text","type":"string","placeholder":"e.g. What's in this image?","default":"What's in this image?","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Input Type","name":"inputType","type":"options","default":"url","options":[{"name":"Image URL(s)","value":"url"},{"name":"Binary File(s)","value":"binary"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"URL(s)","name":"imageUrls","type":"string","placeholder":"e.g. https://example.com/image.png","description":"URL(s) of the image(s) to analyze, multiple URLs can be added separated by comma","default":"","displayOptions":{"show":{"inputType":["url"],"operation":["analyze"],"resource":["image"]}}},{"displayName":"Input Data Field Name(s)","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary field(s) which contains the image(s), separate multiple field names with commas","displayOptions":{"show":{"inputType":["binary"],"operation":["analyze"],"resource":["image"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to simplify the response or not","displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Length of Description (Max Tokens)","description":"Fewer tokens will result in shorter, less detailed image description","name":"maxOutputTokens","type":"number","default":300,"typeOptions":{"minValue":1}}],"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"typeOptions":{"loadOptionsDependsOn":["operation","resource"]},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"imageEditModelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. models/gemini-2.5-flash"}],"displayOptions":{"show":{"operation":["edit"],"resource":["image"]}}},{"displayName":"Prompt","name":"prompt","type":"string","placeholder":"e.g. combine the first image with the second image","description":"Instruction describing how to edit the image","default":"","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["edit"],"resource":["image"]}}},{"displayName":"Images","name":"images","type":"fixedCollection","placeholder":"Add Image","typeOptions":{"multipleValues":true,"multipleValueButtonText":"Add Image"},"default":{"values":[{"binaryPropertyName":"data"}]},"description":"Add one or more binary fields to include images with your prompt","options":[{"displayName":"Image","name":"values","values":[{"displayName":"Binary Field Name","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","description":"The name of the binary field containing the image data"}]}],"displayOptions":{"show":{"operation":["edit"],"resource":["image"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Put Output in Field","name":"binaryPropertyOutput","type":"string","default":"edited","hint":"The name of the output field to put the binary file data in"}],"displayOptions":{"show":{"operation":["edit"],"resource":["image"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"typeOptions":{"loadOptionsDependsOn":["operation","resource"]},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"imageGenerationModelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. models/gemini-2.5-flash"}],"displayOptions":{"show":{"@version":[{"_cnd":{"lt":1.2}}],"operation":["generate"],"resource":["image"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":"models/gemini-3.1-flash-image-preview"},"typeOptions":{"loadOptionsDependsOn":["operation","resource"]},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"imageGenerationModelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. models/gemini-2.5-flash"}],"displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.2}}],"operation":["generate"],"resource":["image"]}}},{"displayName":"Prompt","name":"prompt","type":"string","placeholder":"e.g. A cute cat eating a dinosaur","description":"A text description of the desired image(s)","default":"","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["generate"],"resource":["image"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Number of Images","name":"sampleCount","default":1,"description":"Number of images to generate","type":"number","displayOptions":{"show":{"/modelId":[{"_cnd":{"includes":"imagen"}}]}},"typeOptions":{"minValue":1}},{"displayName":"Put Output in Field","name":"binaryPropertyOutput","type":"string","default":"data","hint":"The name of the output field to put the binary file data in"}],"displayOptions":{"show":{"operation":["generate"],"resource":["image"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Message a Model","value":"message","action":"Message a model","description":"Create a completion with Google Gemini model"}],"default":"message","displayOptions":{"show":{"resource":["text"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"typeOptions":{"loadOptionsDependsOn":["operation","resource"]},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"modelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. models/gemini-2.5-flash"}],"displayOptions":{"show":{"@version":[{"_cnd":{"lt":1.2}}],"operation":["message"],"resource":["text"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":"models/gemini-3-flash-preview"},"typeOptions":{"loadOptionsDependsOn":["operation","resource"]},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"modelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. models/gemini-2.5-flash"}],"displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.2}}],"operation":["message"],"resource":["text"]}}},{"displayName":"Messages","name":"messages","type":"fixedCollection","typeOptions":{"sortable":true,"multipleValues":true},"placeholder":"Add Message","default":{"values":[{"content":""}]},"options":[{"displayName":"Values","name":"values","values":[{"displayName":"Prompt","name":"content","type":"string","description":"The content of the message to be send","default":"","placeholder":"e.g. Hello, how can you help me?","typeOptions":{"rows":2}},{"displayName":"Role","name":"role","type":"options","description":"Role in shaping the model's response, it tells the model how it should behave and interact with the user","options":[{"name":"User","value":"user","description":"Send a message as a user and get a response from the model"},{"name":"Model","value":"model","description":"Tell the model to adopt a specific tone or personality"}],"default":"user"}]}],"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to return a simplified version of the response instead of the raw data","displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Output Content as JSON","name":"jsonOutput","type":"boolean","description":"Whether to attempt to return the response in JSON format","default":false,"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Built-in Tools","name":"builtInTools","placeholder":"Add Built-in Tool","type":"collection","default":{},"displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.1}}],"operation":["message"],"resource":["text"]}},"options":[{"displayName":"Google Search","name":"googleSearch","type":"boolean","default":true,"description":"Whether to allow the model to search the web using Google Search to get real-time information"},{"displayName":"Google Maps","name":"googleMaps","type":"collection","default":{"latitude":"","longitude":""},"options":[{"displayName":"Latitude","name":"latitude","type":"number","default":"","description":"The latitude coordinate for location-based queries","typeOptions":{"numberPrecision":6}},{"displayName":"Longitude","name":"longitude","type":"number","default":"","description":"The longitude coordinate for location-based queries","typeOptions":{"numberPrecision":6}}]},{"displayName":"URL Context","name":"urlContext","type":"boolean","default":true,"description":"Whether to allow the model to read and analyze content from specific URLs"},{"displayName":"File Search","name":"fileSearch","type":"collection","default":{"fileSearchStoreNames":"[]"},"options":[{"displayName":"File Search Store Names","name":"fileSearchStoreNames","description":"The file search store names to use for the file search. File search stores are managed via Google AI Studio.","type":"json","default":"[]","required":true},{"displayName":"Metadata Filter","name":"metadataFilter","type":"string","default":"","description":"Use metadata filter to search within a subset of documents. Example: author=\"Robert Graves\".","placeholder":"e.g. author=\"John Doe\""}]},{"displayName":"Code Execution","name":"codeExecution","type":"boolean","default":true,"description":"Whether to allow the model to execute code it generates to produce a response. Supported only by certain models."}]},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Include Merged Response","name":"includeMergedResponse","type":"boolean","default":false,"description":"Whether to include a single output string merging all text parts of the response","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.1}}]}}},{"displayName":"System Message","name":"systemMessage","type":"string","default":"","placeholder":"e.g. You are a helpful assistant"},{"displayName":"Code Execution","name":"codeExecution","type":"boolean","default":false,"description":"Whether to allow the model to execute code it generates to produce a response. Supported only by certain models.","displayOptions":{"show":{"@version":[{"_cnd":{"eq":1}}]}}},{"displayName":"Frequency Penalty","name":"frequencyPenalty","default":0,"description":"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim","type":"number","typeOptions":{"minValue":-2,"maxValue":2,"numberPrecision":1}},{"displayName":"Maximum Number of Tokens","name":"maxOutputTokens","default":16,"description":"The maximum number of tokens to generate in the completion","type":"number","typeOptions":{"minValue":1,"numberPrecision":0}},{"displayName":"Number of Completions","name":"candidateCount","default":1,"description":"How many completions to generate for each prompt","type":"number","typeOptions":{"minValue":1,"maxValue":8,"numberPrecision":0}},{"displayName":"Presence Penalty","name":"presencePenalty","default":0,"description":"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics","type":"number","typeOptions":{"minValue":-2,"maxValue":2,"numberPrecision":1}},{"displayName":"Output Randomness (Temperature)","name":"temperature","default":1,"description":"Controls the randomness of the output. Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive","type":"number","typeOptions":{"minValue":0,"maxValue":2,"numberPrecision":1}},{"displayName":"Output Randomness (Top P)","name":"topP","default":1,"description":"The maximum cumulative probability of tokens to consider when sampling","type":"number","typeOptions":{"minValue":0,"maxValue":1,"numberPrecision":1}},{"displayName":"Output Randomness (Top K)","name":"topK","default":1,"description":"The maximum number of tokens to consider when sampling","type":"number","typeOptions":{"minValue":1,"numberPrecision":0}},{"displayName":"Thinking Budget","name":"thinkingBudget","type":"number","default":-1,"description":"Controls reasoning tokens for thinking models. Set to 0 to disable automatic thinking. Set to -1 for dynamic thinking (default).","typeOptions":{"minValue":-1,"numberPrecision":0}},{"displayName":"Max Tool Calls Iterations","name":"maxToolsIterations","type":"number","default":15,"description":"The maximum number of tool iteration cycles the LLM will run before stopping. A single iteration can contain multiple tool calls. Set to 0 for no limit","typeOptions":{"minValue":0,"numberPrecision":0}}],"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Analyze Video","value":"analyze","action":"Analyze video","description":"Take in videos and answer questions about them"},{"name":"Generate a Video","value":"generate","action":"Generate a video","description":"Creates a video from a text prompt"},{"name":"Download Video","value":"download","action":"Download a video","description":"Download a generated video from the Google Gemini API using a URL"}],"default":"generate","displayOptions":{"show":{"resource":["video"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"typeOptions":{"loadOptionsDependsOn":["operation","resource"]},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"modelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. models/gemini-2.5-flash"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["video"]}}},{"displayName":"Text Input","name":"text","type":"string","placeholder":"e.g. What's in this video?","default":"What's in this video?","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["analyze"],"resource":["video"]}}},{"displayName":"Input Type","name":"inputType","type":"options","default":"url","options":[{"name":"Video URL(s)","value":"url"},{"name":"Binary File(s)","value":"binary"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["video"]}}},{"displayName":"URL(s)","name":"videoUrls","type":"string","placeholder":"e.g. https://example.com/video.mp4","description":"URL(s) of the video(s) to analyze, multiple URLs can be added separated by comma","default":"","displayOptions":{"show":{"inputType":["url"],"operation":["analyze"],"resource":["video"]}}},{"displayName":"Input Data Field Name(s)","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary field(s) which contains the video(s), seperate multiple field names with commas","displayOptions":{"show":{"inputType":["binary"],"operation":["analyze"],"resource":["video"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to simplify the response or not","displayOptions":{"show":{"operation":["analyze"],"resource":["video"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Length of Description (Max Tokens)","description":"Fewer tokens will result in shorter, less detailed video description","name":"maxOutputTokens","type":"number","default":300,"typeOptions":{"minValue":1}}],"displayOptions":{"show":{"operation":["analyze"],"resource":["video"]}}},{"displayName":"URL","name":"url","type":"string","placeholder":"e.g. https://generativelanguage.googleapis.com/v1beta/files/abcdefg:download","description":"The URL from Google Gemini API to download the video from","default":"","displayOptions":{"show":{"operation":["download"],"resource":["video"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Put Output in Field","name":"binaryPropertyOutput","type":"string","default":"data","hint":"The name of the output field to put the binary file data in"}],"displayOptions":{"show":{"operation":["download"],"resource":["video"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"typeOptions":{"loadOptionsDependsOn":["operation","resource"]},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"videoGenerationModelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. models/gemini-2.5-flash"}],"displayOptions":{"show":{"operation":["generate"],"resource":["video"]}}},{"displayName":"Prompt","name":"prompt","type":"string","placeholder":"e.g. Panning wide shot of a calico kitten sleeping in the sunshine","description":"A text description of the desired video","default":"","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["generate"],"resource":["video"]}}},{"displayName":"Return As","name":"returnAs","type":"options","options":[{"name":"Video","value":"video"},{"name":"URL","value":"url"}],"description":"Whether to return the video as a binary file or a URL that can be used to download the video later","default":"video","displayOptions":{"show":{"operation":["generate"],"resource":["video"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Number of Videos","name":"sampleCount","type":"number","default":1,"description":"How many videos to generate","typeOptions":{"minValue":1,"maxValue":4}},{"displayName":"Duration (Seconds)","name":"durationSeconds","type":"number","default":8,"description":"Length of the generated video in seconds. Supported only by certain models.","typeOptions":{"minValue":5,"maxValue":8}},{"displayName":"Aspect Ratio","name":"aspectRatio","type":"options","options":[{"name":"Widescreen (16:9)","value":"16:9","description":"Most common aspect ratio for televisions and monitors"},{"name":"Portrait (9:16)","value":"9:16","description":"Popular for short-form videos like YouTube Shorts"}],"default":"16:9"},{"displayName":"Person Generation","name":"personGeneration","type":"options","options":[{"name":"Don't Allow","value":"dont_allow","description":"Prevent generation of people in the video"},{"name":"Allow Adult","value":"allow_adult","description":"Allow generation of adult people in the video"},{"name":"Allow All","value":"allow_all","description":"Allow generation of all people in the video"}],"default":"dont_allow"},{"displayName":"Put Output in Field","name":"binaryPropertyOutput","type":"string","default":"data","hint":"The name of the output field to put the binary file data in"}],"displayOptions":{"show":{"operation":["generate"],"resource":["video"]}}}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vendors/GoogleGemini/gemini.svg"},
|
|
5
5
|
{"displayName":"MiniMax","name":"minimax","group":["transform"],"version":1,"subtitle":"={{ $parameter[\"operation\"] + \": \" + $parameter[\"resource\"] }}","description":"Interact with MiniMax AI models","defaults":{"name":"MiniMax"},"usableAsTool":true,"codex":{"alias":["minimax","hailuo","LangChain","video","image","tts","speech"],"categories":["AI"],"subcategories":{"AI":["Agents","Miscellaneous","Root Nodes"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/app-nodes/n8n-nodes-langchain.minimax/"}]}},"inputs":"={{\n\t\t(() => {\n\t\t\tconst resource = $parameter.resource;\n\t\t\tconst operation = $parameter.operation;\n\t\t\tif (resource === 'text' && operation === 'message') {\n\t\t\t\treturn [{ type: 'main' }, { type: 'ai_tool', displayName: 'Tools' }];\n\t\t\t}\n\n\t\t\treturn ['main'];\n\t\t})()\n\t}}","outputs":["main"],"credentials":[{"name":"minimaxApi","required":true}],"properties":[{"displayName":"Resource","name":"resource","type":"options","noDataExpression":true,"options":[{"name":"Audio","value":"audio"},{"name":"Image","value":"image"},{"name":"Text","value":"text"},{"name":"Video","value":"video"}],"default":"text"},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"displayOptions":{"show":{"resource":["audio"]}},"options":[{"name":"Text to Speech","value":"textToSpeech","action":"Convert text to speech","description":"Generate speech audio from text input"}],"default":"textToSpeech"},{"displayName":"Model","name":"modelId","type":"options","options":[{"name":"Speech 02 HD","value":"speech-02-hd","description":"Superior rhythm and stability with outstanding quality"},{"name":"Speech 02 Turbo","value":"speech-02-turbo","description":"Enhanced multilingual capabilities and performance"},{"name":"Speech 2.6 HD","value":"speech-2.6-hd","description":"HD model with outstanding prosody and cloning similarity"},{"name":"Speech 2.6 Turbo","value":"speech-2.6-turbo","description":"Turbo model with support for 40 languages"},{"name":"Speech 2.8 HD","value":"speech-2.8-hd","description":"Latest HD model with ultra-realistic quality and sound tags"},{"name":"Speech 2.8 Turbo","value":"speech-2.8-turbo","description":"Latest Turbo model with seamless speed and natural flow"}],"default":"speech-2.8-hd","description":"The speech synthesis model to use","displayOptions":{"show":{"resource":["audio"],"operation":["textToSpeech"]}}},{"displayName":"Text","name":"text","type":"string","typeOptions":{"rows":4},"default":"","required":true,"description":"The text to convert to speech (max 10,000 characters)","placeholder":"e.g. Hello, welcome to our service!","displayOptions":{"show":{"resource":["audio"],"operation":["textToSpeech"]}}},{"displayName":"Voice ID","name":"voiceId","type":"string","default":"English_Graceful_Lady","required":true,"description":"Voice ID to use for speech synthesis. Browse available voices in the <a href=\"https://platform.minimax.io/docs/faq/system-voice-id\">MiniMax documentation</a>.","placeholder":"e.g. English_Graceful_Lady","displayOptions":{"show":{"resource":["audio"],"operation":["textToSpeech"]}}},{"displayName":"Download Audio","name":"downloadAudio","type":"boolean","default":true,"description":"Whether to download the generated audio as binary data. When disabled, only the audio URL is returned.","displayOptions":{"show":{"resource":["audio"],"operation":["textToSpeech"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Audio Format","name":"audioFormat","type":"options","options":[{"name":"MP3","value":"mp3"},{"name":"PCM","value":"pcm"},{"name":"FLAC","value":"flac"},{"name":"WAV","value":"wav"}],"default":"mp3","description":"Output audio format. WAV is only supported in non-streaming mode."},{"displayName":"Emotion","name":"emotion","type":"options","options":[{"name":"Angry","value":"angry"},{"name":"Calm","value":"calm"},{"name":"Disgusted","value":"disgusted"},{"name":"Fearful","value":"fearful"},{"name":"Happy","value":"happy"},{"name":"Sad","value":"sad"},{"name":"Surprised","value":"surprised"}],"default":"calm","description":"Emotion for synthesized speech. By default the model auto-selects the most natural emotion."},{"displayName":"Language Boost","name":"languageBoost","type":"options","options":[{"name":"Arabic","value":"Arabic"},{"name":"Auto Detect","value":"auto"},{"name":"Chinese","value":"Chinese"},{"name":"English","value":"English"},{"name":"French","value":"French"},{"name":"German","value":"German"},{"name":"Indonesian","value":"Indonesian"},{"name":"Italian","value":"Italian"},{"name":"Japanese","value":"Japanese"},{"name":"Korean","value":"Korean"},{"name":"Portuguese","value":"Portuguese"},{"name":"Russian","value":"Russian"},{"name":"Spanish","value":"Spanish"},{"name":"Thai","value":"Thai"},{"name":"Turkish","value":"Turkish"},{"name":"Vietnamese","value":"Vietnamese"}],"default":"auto","description":"Enhance recognition for a specific language"},{"displayName":"Pitch","name":"pitch","type":"number","typeOptions":{"minValue":-12,"maxValue":12},"default":0,"description":"Speech pitch adjustment (-12 to 12, 0 = original pitch)"},{"displayName":"Speed","name":"speed","type":"number","typeOptions":{"minValue":0.5,"maxValue":2,"numberPrecision":1},"default":1,"description":"Speech speed (0.5-2, higher = faster)"},{"displayName":"Volume","name":"volume","type":"number","typeOptions":{"minValue":0.1,"maxValue":10,"numberPrecision":1},"default":1,"description":"Speech volume (0.1-10, higher = louder)"}],"displayOptions":{"show":{"resource":["audio"],"operation":["textToSpeech"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"displayOptions":{"show":{"resource":["image"]}},"options":[{"name":"Generate an Image","value":"generate","action":"Generate an image","description":"Create an image from a text prompt"}],"default":"generate"},{"displayName":"Model","name":"modelId","type":"options","options":[{"name":"Image-01","value":"image-01","description":"High-quality image generation with fine-grained details"}],"default":"image-01","description":"The model to use for image generation","displayOptions":{"show":{"operation":["generate"],"resource":["image"]}}},{"displayName":"Prompt","name":"prompt","type":"string","typeOptions":{"rows":4},"default":"","required":true,"description":"Text description of the image to generate (max 1500 characters)","placeholder":"e.g. A serene mountain landscape at sunset with reflections in a lake","displayOptions":{"show":{"operation":["generate"],"resource":["image"]}}},{"displayName":"Aspect Ratio","name":"aspectRatio","type":"options","options":[{"name":"1:1 (1024x1024)","value":"1:1"},{"name":"16:9 (1280x720)","value":"16:9"},{"name":"2:3 (832x1248)","value":"2:3"},{"name":"21:9 (1344x576)","value":"21:9"},{"name":"3:2 (1248x832)","value":"3:2"},{"name":"3:4 (864x1152)","value":"3:4"},{"name":"4:3 (1152x864)","value":"4:3"},{"name":"9:16 (720x1280)","value":"9:16"}],"default":"1:1","description":"Aspect ratio of the generated image","displayOptions":{"show":{"operation":["generate"],"resource":["image"]}}},{"displayName":"Number of Images","name":"numberOfImages","type":"number","typeOptions":{"minValue":1,"maxValue":9},"default":1,"description":"Number of images to generate per request (1-9)","displayOptions":{"show":{"operation":["generate"],"resource":["image"]}}},{"displayName":"Download Image","name":"downloadImage","type":"boolean","default":true,"description":"Whether to download the generated image as binary data. When disabled, only the image URL is returned.","displayOptions":{"show":{"operation":["generate"],"resource":["image"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Prompt Optimizer","name":"promptOptimizer","type":"boolean","default":false,"description":"Whether to automatically optimize the prompt for better results"},{"displayName":"Seed","name":"seed","type":"number","default":0,"description":"Random seed for reproducible outputs. Using the same seed and parameters produces the same image."}],"displayOptions":{"show":{"operation":["generate"],"resource":["image"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Message a Model","value":"message","action":"Message a model","description":"Send a message and get a response from a MiniMax model"}],"default":"message","displayOptions":{"show":{"resource":["text"]}}},{"displayName":"Model","name":"modelId","type":"options","options":[{"name":"MiniMax-M2","value":"MiniMax-M2"},{"name":"MiniMax-M2.1","value":"MiniMax-M2.1"},{"name":"MiniMax-M2.1-Highspeed","value":"MiniMax-M2.1-highspeed"},{"name":"MiniMax-M2.5","value":"MiniMax-M2.5"},{"name":"MiniMax-M2.5-Highspeed","value":"MiniMax-M2.5-highspeed"},{"name":"MiniMax-M2.7","value":"MiniMax-M2.7"},{"name":"MiniMax-M2.7-Highspeed","value":"MiniMax-M2.7-highspeed"}],"default":"MiniMax-M2.7","description":"The model to use for generating the response","displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Messages","name":"messages","type":"fixedCollection","typeOptions":{"sortable":true,"multipleValues":true},"placeholder":"Add Message","default":{"values":[{"content":"","role":"user"}]},"options":[{"displayName":"Values","name":"values","values":[{"displayName":"Prompt","name":"content","type":"string","description":"The content of the message to be sent","default":"","placeholder":"e.g. Hello, how can you help me?","typeOptions":{"rows":2}},{"displayName":"Role","name":"role","type":"options","description":"Role in shaping the model's response, it tells the model how it should behave and interact with the user","options":[{"name":"User","value":"user","description":"Send a message as a user and get a response from the model"},{"name":"Assistant","value":"assistant","description":"Tell the model to adopt a specific tone or personality"}],"default":"user"}]}],"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to return a simplified version of the response instead of the raw data","displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Hide Thinking","name":"hideThinking","type":"boolean","default":true,"description":"Whether to strip chain-of-thought reasoning from the response, returning only the final answer"},{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":1024,"description":"The maximum number of tokens to generate in the completion","type":"number","typeOptions":{"minValue":1,"numberPrecision":0}},{"displayName":"Max Tool Calls Iterations","name":"maxToolsIterations","type":"number","default":15,"description":"The maximum number of tool iteration cycles the LLM will run before stopping. A single iteration can contain multiple tool calls. Set to 0 for no limit.","typeOptions":{"minValue":0,"numberPrecision":0}},{"displayName":"Output Randomness (Temperature)","name":"temperature","default":0.7,"description":"Controls the randomness of the output. Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number","typeOptions":{"minValue":0,"maxValue":1,"numberPrecision":1}},{"displayName":"Output Randomness (Top P)","name":"topP","default":0.95,"description":"The maximum cumulative probability of tokens to consider when sampling","type":"number","typeOptions":{"minValue":0,"maxValue":1,"numberPrecision":2}},{"displayName":"System Message","name":"system","type":"string","default":"","placeholder":"e.g. You are a helpful assistant"}],"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"displayOptions":{"show":{"resource":["video"]}},"options":[{"name":"Generate Video From Text","value":"textToVideo","action":"Generate video from text prompt","description":"Generate a video from a text prompt"},{"name":"Generate Video From Image","value":"imageToVideo","action":"Generate video from image","description":"Generate a video from an image, with optional last frame and subject reference"}],"default":"textToVideo"},{"displayName":"Model","name":"modelId","type":"options","options":[{"name":"MiniMax-Hailuo-2.3","value":"MiniMax-Hailuo-2.3","description":"Latest video generation model with enhanced realism"},{"name":"MiniMax-Hailuo-02","value":"MiniMax-Hailuo-02","description":"Video model supporting higher resolution and longer duration"},{"name":"T2V-01-Director","value":"T2V-01-Director","description":"Text-to-video model with camera control commands"},{"name":"T2V-01","value":"T2V-01","description":"Standard text-to-video model"}],"default":"MiniMax-Hailuo-2.3","description":"The model to use for video generation","displayOptions":{"show":{"resource":["video"],"operation":["textToVideo"]}}},{"displayName":"Prompt","name":"prompt","type":"string","typeOptions":{"rows":4},"default":"","required":true,"description":"Text description of the video (max 2000 characters). Camera movements can be controlled using [command] syntax, e.g. [Push in], [Pan left].","placeholder":"e.g. A cat playing with a ball of yarn [Static shot]","displayOptions":{"show":{"resource":["video"],"operation":["textToVideo"]}}},{"displayName":"Duration (Seconds)","name":"duration","type":"options","options":[{"name":"6 Seconds","value":6},{"name":"10 Seconds","value":10}],"default":6,"description":"Duration of the generated video","displayOptions":{"show":{"resource":["video"],"operation":["textToVideo"]}}},{"displayName":"Resolution","name":"resolution","type":"options","options":[{"name":"720P","value":"720P"},{"name":"768P","value":"768P"},{"name":"1080P","value":"1080P"}],"default":"768P","description":"Resolution of the generated video. Available options depend on the model.","displayOptions":{"show":{"resource":["video"],"operation":["textToVideo"]}}},{"displayName":"Download Video","name":"downloadVideo","type":"boolean","default":true,"description":"Whether to download the generated video as binary data. When disabled, only the video URL is returned.","displayOptions":{"show":{"resource":["video"],"operation":["textToVideo"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Prompt Optimizer","name":"promptOptimizer","type":"boolean","default":true,"description":"Whether to automatically optimize the prompt for better results"}],"displayOptions":{"show":{"resource":["video"],"operation":["textToVideo"]}}},{"displayName":"Model","name":"modelId","type":"options","options":[{"name":"I2V-01","value":"I2V-01","description":"Standard image-to-video model"},{"name":"I2V-01-Director","value":"I2V-01-Director","description":"Image-to-video with camera control commands"},{"name":"I2V-01-Live","value":"I2V-01-live","description":"Image-to-video live model"},{"name":"MiniMax-Hailuo-02","value":"MiniMax-Hailuo-02","description":"Model supporting higher resolution and longer duration"},{"name":"MiniMax-Hailuo-2.3","value":"MiniMax-Hailuo-2.3","description":"Latest model with enhanced realism"},{"name":"MiniMax-Hailuo-2.3-Fast","value":"MiniMax-Hailuo-2.3-Fast","description":"Faster image-to-video model for value and efficiency"}],"default":"MiniMax-Hailuo-2.3","description":"The model to use for video generation","displayOptions":{"show":{"resource":["video"],"operation":["imageToVideo"]}}},{"displayName":"Image Input Type","name":"imageInputType","type":"options","options":[{"name":"URL","value":"url"},{"name":"Binary File","value":"binary"}],"default":"url","description":"How to provide the first frame image","displayOptions":{"show":{"resource":["video"],"operation":["imageToVideo"]}}},{"displayName":"Image URL","name":"imageUrl","type":"string","default":"","required":true,"placeholder":"https://example.com/image.jpg","description":"Public URL of the image to use as first frame (JPG, JPEG, PNG, WebP, <20MB)","displayOptions":{"show":{"imageInputType":["url"],"resource":["video"],"operation":["imageToVideo"]}}},{"displayName":"Input Data Field Name","name":"binaryPropertyName","type":"string","default":"data","required":true,"placeholder":"e.g. data","hint":"The name of the input field containing the binary image data","typeOptions":{"binaryDataProperty":true},"displayOptions":{"show":{"imageInputType":["binary"],"resource":["video"],"operation":["imageToVideo"]}}},{"displayName":"Prompt","name":"prompt","type":"string","typeOptions":{"rows":4},"default":"","description":"Optional text description of the video (max 2000 characters). Camera movements can be controlled using [command] syntax.","placeholder":"e.g. The subject smiles and waves at the camera [Zoom in]","displayOptions":{"show":{"resource":["video"],"operation":["imageToVideo"]}}},{"displayName":"Duration (Seconds)","name":"duration","type":"options","options":[{"name":"6 Seconds","value":6},{"name":"10 Seconds","value":10}],"default":6,"description":"Duration of the generated video","displayOptions":{"show":{"resource":["video"],"operation":["imageToVideo"]}}},{"displayName":"Resolution","name":"resolution","type":"options","options":[{"name":"512P","value":"512P"},{"name":"720P","value":"720P"},{"name":"768P","value":"768P"},{"name":"1080P","value":"1080P"}],"default":"768P","description":"Resolution of the generated video. Available options depend on the model.","displayOptions":{"show":{"resource":["video"],"operation":["imageToVideo"]}}},{"displayName":"Download Video","name":"downloadVideo","type":"boolean","default":true,"description":"Whether to download the generated video as binary data. When disabled, only the video URL is returned.","displayOptions":{"show":{"resource":["video"],"operation":["imageToVideo"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Prompt Optimizer","name":"promptOptimizer","type":"boolean","default":true,"description":"Whether to automatically optimize the prompt"},{"displayName":"Last Frame Image Input Type","name":"lastFrameInputType","type":"options","options":[{"name":"None","value":"none"},{"name":"URL","value":"url"},{"name":"Binary File","value":"binary"}],"default":"none","description":"Provide a last frame image to generate a first-and-last-frame video. Only supported by MiniMax-Hailuo-2.3 and MiniMax-Hailuo-02."},{"displayName":"Last Frame Image URL","name":"lastFrameImageUrl","type":"string","default":"","placeholder":"https://example.com/last-frame.jpg","displayOptions":{"show":{"lastFrameInputType":["url"]}}},{"displayName":"Last Frame Data Field Name","name":"lastFrameBinaryPropertyName","type":"string","default":"lastFrame","placeholder":"e.g. lastFrame","typeOptions":{"binaryDataProperty":true},"displayOptions":{"show":{"lastFrameInputType":["binary"]}}},{"displayName":"Subject Reference Input Type","name":"subjectReferenceInputType","type":"options","options":[{"name":"None","value":"none"},{"name":"URL","value":"url"},{"name":"Binary File","value":"binary"}],"default":"none","description":"Provide a face photo for facial consistency in the generated video. Only supported by MiniMax-Hailuo-2.3."},{"displayName":"Subject Reference Image URL","name":"subjectReferenceImageUrl","type":"string","default":"","placeholder":"https://example.com/face.jpg","displayOptions":{"show":{"subjectReferenceInputType":["url"]}}},{"displayName":"Subject Reference Data Field Name","name":"subjectReferenceBinaryPropertyName","type":"string","default":"subjectReference","placeholder":"e.g. subjectReference","typeOptions":{"binaryDataProperty":true},"displayOptions":{"show":{"subjectReferenceInputType":["binary"]}}}],"displayOptions":{"show":{"resource":["video"],"operation":["imageToVideo"]}}}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vendors/MiniMax/minimax.svg"},
|
|
6
|
-
{"displayName":"Moonshot Kimi","name":"moonshot","group":["transform"],"version":1,"subtitle":"={{ $parameter[\"operation\"] + \": \" + $parameter[\"resource\"] }}","description":"Interact with Moonshot Kimi AI models","defaults":{"name":"Moonshot Kimi"},"usableAsTool":true,"codex":{"alias":["kimi","moonshot","LangChain","image","vision"],"categories":["AI"],"subcategories":{"AI":["Agents","Miscellaneous","Root Nodes"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/app-nodes/n8n-nodes-langchain.moonshot/"}]}},"inputs":"={{\n\t\t(() => {\n\t\t\tconst resource = $parameter.resource;\n\t\t\tconst operation = $parameter.operation;\n\t\t\tif (resource === 'text' && operation === 'message') {\n\t\t\t\treturn [{ type: 'main' }, { type: 'ai_tool', displayName: 'Tools' }];\n\t\t\t}\n\n\t\t\treturn ['main'];\n\t\t})()\n\t}}","outputs":["main"],"credentials":[{"name":"moonshotApi","required":true}],"properties":[{"displayName":"Resource","name":"resource","type":"options","noDataExpression":true,"options":[{"name":"Image","value":"image"},{"name":"Text","value":"text"}],"default":"text"},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Analyze Image","value":"analyze","action":"Analyze image","description":"Analyze an image and answer questions about it"}],"default":"analyze","displayOptions":{"show":{"resource":["image"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"modelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. kimi-k2.
|
|
6
|
+
{"displayName":"Moonshot Kimi","name":"moonshot","group":["transform"],"version":1,"subtitle":"={{ $parameter[\"operation\"] + \": \" + $parameter[\"resource\"] }}","description":"Interact with Moonshot Kimi AI models","defaults":{"name":"Moonshot Kimi"},"usableAsTool":true,"codex":{"alias":["kimi","moonshot","LangChain","image","vision"],"categories":["AI"],"subcategories":{"AI":["Agents","Miscellaneous","Root Nodes"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/app-nodes/n8n-nodes-langchain.moonshot/"}]}},"inputs":"={{\n\t\t(() => {\n\t\t\tconst resource = $parameter.resource;\n\t\t\tconst operation = $parameter.operation;\n\t\t\tif (resource === 'text' && operation === 'message') {\n\t\t\t\treturn [{ type: 'main' }, { type: 'ai_tool', displayName: 'Tools' }];\n\t\t\t}\n\n\t\t\treturn ['main'];\n\t\t})()\n\t}}","outputs":["main"],"credentials":[{"name":"moonshotApi","required":true}],"properties":[{"displayName":"Resource","name":"resource","type":"options","noDataExpression":true,"options":[{"name":"Image","value":"image"},{"name":"Text","value":"text"}],"default":"text"},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Analyze Image","value":"analyze","action":"Analyze image","description":"Analyze an image and answer questions about it"}],"default":"analyze","displayOptions":{"show":{"resource":["image"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"modelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. kimi-k2.6"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Text Input","name":"text","type":"string","placeholder":"e.g. What's in this image?","default":"What's in this image?","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Input Data Field Name(s)","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary field(s) which contains the image(s), separate multiple field names with commas","typeOptions":{"binaryDataProperty":true},"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to return a simplified version of the response instead of the raw data","displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Maximum Number of Tokens","description":"Fewer tokens will result in shorter, less detailed image description","name":"maxTokens","type":"number","default":1024,"typeOptions":{"minValue":1}}],"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Message a Model","value":"message","action":"Message a model","description":"Send a message and get a response from a Moonshot Kimi model"}],"default":"message","displayOptions":{"show":{"resource":["text"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"modelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. kimi-k2.6"}],"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Messages","name":"messages","type":"fixedCollection","typeOptions":{"sortable":true,"multipleValues":true},"placeholder":"Add Message","default":{"values":[{"content":"","role":"user"}]},"options":[{"displayName":"Values","name":"values","values":[{"displayName":"Prompt","name":"content","type":"string","description":"The content of the message to be sent","default":"","placeholder":"e.g. Hello, how can you help me?","typeOptions":{"rows":2}},{"displayName":"Role","name":"role","type":"options","description":"Role in shaping the model's response, it tells the model how it should behave and interact with the user","options":[{"name":"User","value":"user","description":"Send a message as a user and get a response from the model"},{"name":"Assistant","value":"assistant","description":"Tell the model to adopt a specific tone or personality"}],"default":"user"}]}],"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Add Attachments","name":"addAttachments","type":"boolean","default":false,"description":"Whether to add image attachments to the message","displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Attachment Input Data Field Name(s)","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","description":"Name of the binary field(s) which contains the image(s) to attach, separate multiple field names with commas","typeOptions":{"binaryDataProperty":true},"displayOptions":{"show":{"addAttachments":[true],"operation":["message"],"resource":["text"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to return a simplified version of the response instead of the raw data","displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Frequency Penalty","name":"frequencyPenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim","type":"number"},{"displayName":"Include Merged Response","name":"includeMergedResponse","type":"boolean","default":false,"description":"Whether to include a single output string merging all text parts of the response"},{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":1024,"description":"The maximum number of tokens to generate in the completion","type":"number","typeOptions":{"minValue":1,"numberPrecision":0}},{"displayName":"Max Tool Calls Iterations","name":"maxToolsIterations","type":"number","default":15,"description":"The maximum number of tool iteration cycles the LLM will run before stopping. A single iteration can contain multiple tool calls. Set to 0 for no limit.","typeOptions":{"minValue":0,"numberPrecision":0}},{"displayName":"Output Randomness (Temperature)","name":"temperature","default":0.7,"description":"Controls the randomness of the output. Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number","typeOptions":{"minValue":0,"maxValue":1,"numberPrecision":1}},{"displayName":"Output Randomness (Top P)","name":"topP","default":1,"description":"The maximum cumulative probability of tokens to consider when sampling","type":"number","typeOptions":{"minValue":0,"maxValue":1,"numberPrecision":1}},{"displayName":"Presence Penalty","name":"presencePenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics","type":"number"},{"displayName":"Response Format","name":"responseFormat","default":"text","type":"options","options":[{"name":"Text","value":"text","description":"Regular text response"},{"name":"JSON","value":"json_object","description":"Enables JSON mode, which should guarantee the message the model generates is valid JSON"}]},{"displayName":"System Message","name":"system","type":"string","default":"","placeholder":"e.g. You are a helpful assistant"},{"displayName":"Thinking Mode","name":"thinkingMode","type":"boolean","default":false,"description":"Whether to enable thinking mode for deep reasoning. The model will include reasoning steps in the response. Cannot be used together with Web Search."},{"displayName":"Web Search","name":"webSearch","type":"boolean","default":false,"description":"Whether to enable built-in web search. The model will search the web for relevant information. Cannot be used together with Thinking Mode."}],"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vendors/Moonshot/moonshot.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vendors/Moonshot/moonshot.dark.svg"}},
|
|
7
7
|
{"displayName":"Ollama","name":"ollama","group":["transform"],"version":1,"subtitle":"={{ $parameter[\"operation\"] + \": \" + $parameter[\"resource\"] }}","description":"Interact with Ollama AI models","defaults":{"name":"Ollama"},"usableAsTool":true,"codex":{"alias":["LangChain","image","vision","AI","local"],"categories":["AI"],"subcategories":{"AI":["Agents","Miscellaneous","Root Nodes"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/app-nodes/n8n-nodes-langchain.ollama/"}]}},"inputs":"={{\n\t\t(() => {\n\t\t\tconst resource = $parameter.resource;\n\t \tconst operation = $parameter.operation;\n\t\t\tif (resource === 'text' && operation === 'message') {\n\t\t\t\treturn [{ type: 'main' }, { type: 'ai_tool', displayName: 'Tools' }];\n\t\t\t}\n\n\t\t\treturn ['main'];\n\t\t})()\n\t}}","outputs":["main"],"credentials":[{"name":"ollamaApi","required":true}],"properties":[{"displayName":"Resource","name":"resource","type":"options","noDataExpression":true,"options":[{"name":"Image","value":"image"},{"name":"Text","value":"text"}],"default":"text"},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Analyze Image","value":"analyze","action":"Analyze image","description":"Take in images and answer questions about them"}],"default":"analyze","displayOptions":{"show":{"resource":["image"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"modelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. llava, llama3.2-vision"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Text Input","name":"text","type":"string","placeholder":"e.g. What's in this image?","default":"What's in this image?","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Input Type","name":"inputType","type":"options","default":"binary","options":[{"name":"Binary File(s)","value":"binary"},{"name":"Image URL(s)","value":"url"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Input Data Field Name(s)","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary field(s) which contains the image(s), separate multiple field names with commas","displayOptions":{"show":{"inputType":["binary"],"operation":["analyze"],"resource":["image"]}}},{"displayName":"URL(s)","name":"imageUrls","type":"string","placeholder":"e.g. https://example.com/image.png","description":"URL(s) of the image(s) to analyze, multiple URLs can be added separated by comma","default":"","displayOptions":{"show":{"inputType":["url"],"operation":["analyze"],"resource":["image"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to simplify the response or not","displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"System Message","name":"system","type":"string","default":"","placeholder":"e.g. You are a helpful assistant.","description":"System message to set the context for the conversation","typeOptions":{"rows":2}},{"displayName":"Temperature","name":"temperature","type":"number","default":0.8,"typeOptions":{"minValue":0,"maxValue":2,"numberPrecision":2},"description":"Controls randomness in responses. Lower values make output more focused."},{"displayName":"Output Randomness (Top P)","name":"top_p","default":0.7,"description":"The maximum cumulative probability of tokens to consider when sampling","type":"number","typeOptions":{"minValue":0,"maxValue":1,"numberPrecision":1}},{"displayName":"Top K","name":"top_k","type":"number","default":40,"typeOptions":{"minValue":1},"description":"Controls diversity by limiting the number of top tokens to consider"},{"displayName":"Max Tokens","name":"num_predict","type":"number","default":1024,"typeOptions":{"minValue":1,"numberPrecision":0},"description":"Maximum number of tokens to generate in the completion"},{"displayName":"Frequency Penalty","name":"frequency_penalty","type":"number","default":0,"typeOptions":{"minValue":0,"numberPrecision":2},"description":"Adjusts the penalty for tokens that have already appeared in the generated text. Higher values discourage repetition."},{"displayName":"Presence Penalty","name":"presence_penalty","type":"number","default":0,"typeOptions":{"numberPrecision":2},"description":"Adjusts the penalty for tokens based on their presence in the generated text so far. Positive values penalize tokens that have already appeared, encouraging diversity."},{"displayName":"Repetition Penalty","name":"repeat_penalty","type":"number","default":1.1,"typeOptions":{"minValue":0,"numberPrecision":2},"description":"Sets how strongly to penalize repetitions. A higher value (e.g., 1.5) will penalize repetitions more strongly, while a lower value (e.g., 0.9) will be more lenient."},{"displayName":"Context Length","name":"num_ctx","type":"number","default":4096,"typeOptions":{"minValue":1,"numberPrecision":0},"description":"Sets the size of the context window used to generate the next token"},{"displayName":"Repeat Last N","name":"repeat_last_n","type":"number","default":64,"typeOptions":{"minValue":-1,"numberPrecision":0},"description":"Sets how far back for the model to look back to prevent repetition. (0 = disabled, -1 = num_ctx)."},{"displayName":"Min P","name":"min_p","type":"number","default":0,"typeOptions":{"minValue":0,"maxValue":1,"numberPrecision":3},"description":"Alternative to the top_p, and aims to ensure a balance of quality and variety. The parameter p represents the minimum probability for a token to be considered, relative to the probability of the most likely token."},{"displayName":"Seed","name":"seed","type":"number","default":0,"typeOptions":{"minValue":0,"numberPrecision":0},"description":"Sets the random number seed to use for generation. Setting this to a specific number will make the model generate the same text for the same prompt."},{"displayName":"Stop Sequences","name":"stop","type":"string","default":"","description":"Sets the stop sequences to use. When this pattern is encountered the LLM will stop generating text and return. Separate multiple patterns with commas"},{"displayName":"Keep Alive","name":"keep_alive","type":"string","default":"5m","description":"Specifies the duration to keep the loaded model in memory after use. Format: 1h30m (1 hour 30 minutes)."},{"displayName":"Low VRAM Mode","name":"low_vram","type":"boolean","default":false,"description":"Whether to activate low VRAM mode, which reduces memory usage at the cost of slower generation speed. Useful for GPUs with limited memory."},{"displayName":"Main GPU ID","name":"main_gpu","type":"number","default":0,"typeOptions":{"minValue":0,"numberPrecision":0},"description":"Specifies the ID of the GPU to use for the main computation. Only change this if you have multiple GPUs."},{"displayName":"Context Batch Size","name":"num_batch","type":"number","default":512,"typeOptions":{"minValue":1,"numberPrecision":0},"description":"Sets the batch size for prompt processing. Larger batch sizes may improve generation speed but increase memory usage."},{"displayName":"Number of GPUs","name":"num_gpu","type":"number","default":-1,"typeOptions":{"minValue":-1,"numberPrecision":0},"description":"Specifies the number of GPUs to use for parallel processing. Set to -1 for auto-detection."},{"displayName":"Number of CPU Threads","name":"num_thread","type":"number","default":0,"typeOptions":{"minValue":0,"numberPrecision":0},"description":"Specifies the number of CPU threads to use for processing. Set to 0 for auto-detection."},{"displayName":"Penalize Newlines","name":"penalize_newline","type":"boolean","default":true,"description":"Whether the model will be less likely to generate newline characters, encouraging longer continuous sequences of text"},{"displayName":"Use Memory Locking","name":"use_mlock","type":"boolean","default":false,"description":"Whether to lock the model in memory to prevent swapping. This can improve performance but requires sufficient available memory."},{"displayName":"Use Memory Mapping","name":"use_mmap","type":"boolean","default":true,"description":"Whether to use memory mapping for loading the model. This can reduce memory usage but may impact performance."},{"displayName":"Load Vocabulary Only","name":"vocab_only","type":"boolean","default":false,"description":"Whether to only load the model vocabulary without the weights. Useful for quickly testing tokenization."},{"displayName":"Output Format","name":"format","type":"options","options":[{"name":"Default","value":""},{"name":"JSON","value":"json"}],"default":"","description":"Specifies the format of the API response"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Message a Model","value":"message","action":"Message a model","description":"Send a message to Ollama model"}],"default":"message","displayOptions":{"show":{"resource":["text"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"modelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. llava, llama3.2-vision"}],"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Messages","name":"messages","type":"fixedCollection","typeOptions":{"sortable":true,"multipleValues":true},"placeholder":"Add Message","default":{"values":[{"content":"","role":"user"}]},"options":[{"displayName":"Values","name":"values","values":[{"displayName":"Content","name":"content","type":"string","description":"The content of the message to be sent","default":"","placeholder":"e.g. Hello, how can you help me?","typeOptions":{"rows":2}},{"displayName":"Role","name":"role","type":"options","description":"The role of this message in the conversation","options":[{"name":"User","value":"user","description":"Message from the user"},{"name":"Assistant","value":"assistant","description":"Response from the assistant (for conversation history)"}],"default":"user"}]}],"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to simplify the response or not","displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"System Message","name":"system","type":"string","default":"","placeholder":"e.g. You are a helpful assistant.","description":"System message to set the context for the conversation","typeOptions":{"rows":2}},{"displayName":"Temperature","name":"temperature","type":"number","default":0.8,"typeOptions":{"minValue":0,"maxValue":2,"numberPrecision":2},"description":"Controls randomness in responses. Lower values make output more focused."},{"displayName":"Output Randomness (Top P)","name":"top_p","default":0.7,"description":"The maximum cumulative probability of tokens to consider when sampling","type":"number","typeOptions":{"minValue":0,"maxValue":1,"numberPrecision":1}},{"displayName":"Top K","name":"top_k","type":"number","default":40,"typeOptions":{"minValue":1},"description":"Controls diversity by limiting the number of top tokens to consider"},{"displayName":"Max Tokens","name":"num_predict","type":"number","default":1024,"typeOptions":{"minValue":1,"numberPrecision":0},"description":"Maximum number of tokens to generate in the completion"},{"displayName":"Frequency Penalty","name":"frequency_penalty","type":"number","default":0,"typeOptions":{"minValue":0,"numberPrecision":2},"description":"Adjusts the penalty for tokens that have already appeared in the generated text. Higher values discourage repetition."},{"displayName":"Presence Penalty","name":"presence_penalty","type":"number","default":0,"typeOptions":{"numberPrecision":2},"description":"Adjusts the penalty for tokens based on their presence in the generated text so far. Positive values penalize tokens that have already appeared, encouraging diversity."},{"displayName":"Repetition Penalty","name":"repeat_penalty","type":"number","default":1.1,"typeOptions":{"minValue":0,"numberPrecision":2},"description":"Sets how strongly to penalize repetitions. A higher value (e.g., 1.5) will penalize repetitions more strongly, while a lower value (e.g., 0.9) will be more lenient."},{"displayName":"Context Length","name":"num_ctx","type":"number","default":4096,"typeOptions":{"minValue":1,"numberPrecision":0},"description":"Sets the size of the context window used to generate the next token"},{"displayName":"Repeat Last N","name":"repeat_last_n","type":"number","default":64,"typeOptions":{"minValue":-1,"numberPrecision":0},"description":"Sets how far back for the model to look back to prevent repetition. (0 = disabled, -1 = num_ctx)."},{"displayName":"Min P","name":"min_p","type":"number","default":0,"typeOptions":{"minValue":0,"maxValue":1,"numberPrecision":3},"description":"Alternative to the top_p, and aims to ensure a balance of quality and variety. The parameter p represents the minimum probability for a token to be considered, relative to the probability of the most likely token."},{"displayName":"Seed","name":"seed","type":"number","default":0,"typeOptions":{"minValue":0,"numberPrecision":0},"description":"Sets the random number seed to use for generation. Setting this to a specific number will make the model generate the same text for the same prompt."},{"displayName":"Stop Sequences","name":"stop","type":"string","default":"","description":"Sets the stop sequences to use. When this pattern is encountered the LLM will stop generating text and return. Separate multiple patterns with commas"},{"displayName":"Keep Alive","name":"keep_alive","type":"string","default":"5m","description":"Specifies the duration to keep the loaded model in memory after use. Format: 1h30m (1 hour 30 minutes)."},{"displayName":"Low VRAM Mode","name":"low_vram","type":"boolean","default":false,"description":"Whether to activate low VRAM mode, which reduces memory usage at the cost of slower generation speed. Useful for GPUs with limited memory."},{"displayName":"Main GPU ID","name":"main_gpu","type":"number","default":0,"typeOptions":{"minValue":0,"numberPrecision":0},"description":"Specifies the ID of the GPU to use for the main computation. Only change this if you have multiple GPUs."},{"displayName":"Context Batch Size","name":"num_batch","type":"number","default":512,"typeOptions":{"minValue":1,"numberPrecision":0},"description":"Sets the batch size for prompt processing. Larger batch sizes may improve generation speed but increase memory usage."},{"displayName":"Number of GPUs","name":"num_gpu","type":"number","default":-1,"typeOptions":{"minValue":-1,"numberPrecision":0},"description":"Specifies the number of GPUs to use for parallel processing. Set to -1 for auto-detection."},{"displayName":"Number of CPU Threads","name":"num_thread","type":"number","default":0,"typeOptions":{"minValue":0,"numberPrecision":0},"description":"Specifies the number of CPU threads to use for processing. Set to 0 for auto-detection."},{"displayName":"Penalize Newlines","name":"penalize_newline","type":"boolean","default":true,"description":"Whether the model will be less likely to generate newline characters, encouraging longer continuous sequences of text"},{"displayName":"Use Memory Locking","name":"use_mlock","type":"boolean","default":false,"description":"Whether to lock the model in memory to prevent swapping. This can improve performance but requires sufficient available memory."},{"displayName":"Use Memory Mapping","name":"use_mmap","type":"boolean","default":true,"description":"Whether to use memory mapping for loading the model. This can reduce memory usage but may impact performance."},{"displayName":"Load Vocabulary Only","name":"vocab_only","type":"boolean","default":false,"description":"Whether to only load the model vocabulary without the weights. Useful for quickly testing tokenization."},{"displayName":"Output Format","name":"format","type":"options","options":[{"name":"Default","value":""},{"name":"JSON","value":"json"}],"default":"","description":"Specifies the format of the API response"}],"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vendors/Ollama/ollama.svg"},
|
|
8
|
-
{"displayName":"OpenAI","name":"openAi","group":["transform"],"defaultVersion":2.2,"subtitle":"={{((resource, operation) => {\n if (operation === 'deleteAssistant') {\n return 'Delete Assistant';\n }\n if (operation === 'deleteFile') {\n return 'Delete File';\n }\n if (operation === 'classify') {\n return 'Classify Text';\n }\n if (operation === 'message' && resource === 'text') {\n return 'Message Model';\n }\n const capitalize = (str) => {\n const chars = str.split('');\n chars[0] = chars[0].toUpperCase();\n return chars.join('');\n };\n if (['transcribe', 'translate'].includes(operation)) {\n resource = 'recording';\n }\n if (operation === 'list') {\n resource = resource + 's';\n }\n return `${capitalize(operation)} ${capitalize(resource)}`;\n})($parameter.resource, $parameter.operation)}}","description":"Message an assistant or GPT, analyze images, generate audio, etc.","codex":{"alias":["LangChain","ChatGPT","Sora","DallE","whisper","audio","transcribe","tts","assistant"],"categories":["AI"],"subcategories":{"AI":["Agents","Miscellaneous","Root Nodes"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/app-nodes/n8n-nodes-langchain.openai/"}]}},"builderHint":{"message":"For text generation, reasoning and tools, use AI Agent with OpenAI Chat Model. This OpenAI node is for specialized operations: image generation (DALL-E), audio (Whisper, TTS), and video generation (Sora).","relatedNodes":[{"nodeType":"@n8n/n8n-nodes-langchain.agent","relationHint":"Prefer for most LLM tasks"},{"nodeType":"@n8n/n8n-nodes-langchain.lmChatOpenAi","relationHint":"Prefer for most LLM tasks"}]},"version":[2,2.1,2.2],"defaults":{"name":"OpenAI"},"inputs":"={{((resource, operation, hideTools, memory) => {\n if (resource === 'assistant' && operation === 'message') {\n const inputs = [\n { type: 'main' },\n { type: 'ai_tool', displayName: 'Tools' },\n ];\n if (memory !== 'threadId') {\n inputs.push({ type: 'ai_memory', displayName: 'Memory', maxConnections: 1 });\n }\n return inputs;\n }\n if (resource === 'text' && (operation === 'message' || operation === 'response')) {\n if (hideTools === 'hide') {\n return ['main'];\n }\n return [{ type: 'main' }, { type: 'ai_tool', displayName: 'Tools' }];\n }\n return ['main'];\n})($parameter.resource, $parameter.operation, $parameter.hideTools, $parameter.memory ?? undefined)}}","outputs":["main"],"credentials":[{"name":"openAiApi","required":true}],"properties":[{"displayName":"Resource","name":"resource","type":"options","noDataExpression":true,"options":[{"name":"Text","value":"text","builderHint":{"message":"For text generation, reasoning and tools, use AI Agent with OpenAI Chat Model instead of this resource."}},{"name":"Image","value":"image"},{"name":"Audio","value":"audio"},{"name":"File","value":"file"},{"name":"Conversation","value":"conversation"},{"name":"Video","value":"video"}],"default":"text"},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Generate Audio","value":"generate","action":"Generate audio","description":"Creates audio from a text prompt"},{"name":"Transcribe a Recording","value":"transcribe","action":"Transcribe a recording","description":"Transcribes audio into text"},{"name":"Translate a Recording","value":"translate","action":"Translate a recording","description":"Translates audio into text in English"}],"default":"generate","displayOptions":{"show":{"resource":["audio"]}}},{"displayName":"OpenAI API limits the size of the audio file to 25 MB","name":"fileSizeLimitNotice","type":"notice","default":" ","displayOptions":{"show":{"resource":["audio"],"operation":["translate","transcribe"]}}},{"displayName":"Model","name":"model","type":"options","default":"tts-1","options":[{"name":"TTS-1","value":"tts-1"},{"name":"TTS-1-HD","value":"tts-1-hd"}],"displayOptions":{"show":{"operation":["generate"],"resource":["audio"]}}},{"displayName":"Text Input","name":"input","type":"string","placeholder":"e.g. The quick brown fox jumped over the lazy dog","description":"The text to generate audio for. The maximum length is 4096 characters.","default":"","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["generate"],"resource":["audio"]}}},{"displayName":"Voice","name":"voice","type":"options","default":"alloy","description":"The voice to use when generating the audio","options":[{"name":"Alloy","value":"alloy"},{"name":"Echo","value":"echo"},{"name":"Fable","value":"fable"},{"name":"Nova","value":"nova"},{"name":"Onyx","value":"onyx"},{"name":"Shimmer","value":"shimmer"}],"displayOptions":{"show":{"operation":["generate"],"resource":["audio"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Response Format","name":"response_format","type":"options","default":"mp3","options":[{"name":"MP3","value":"mp3"},{"name":"OPUS","value":"opus"},{"name":"AAC","value":"aac"},{"name":"FLAC","value":"flac"}]},{"displayName":"Audio Speed","name":"speed","type":"number","default":1,"typeOptions":{"minValue":0.25,"maxValue":4,"numberPrecision":1}},{"displayName":"Put Output in Field","name":"binaryPropertyOutput","type":"string","default":"data","hint":"The name of the output field to put the binary file data in"}],"displayOptions":{"show":{"operation":["generate"],"resource":["audio"]}}},{"displayName":"Input Data Field Name","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary property which contains the audio file in one of these formats: flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm","displayOptions":{"show":{"operation":["transcribe"],"resource":["audio"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Language of the Audio File","name":"language","type":"string","description":"The language of the input audio. Supplying the input language in <a href=\"https://en.wikipedia.org/wiki/List_of_ISO_639_language_codes\" target=\"_blank\">ISO-639-1</a> format will improve accuracy and latency.","default":""},{"displayName":"Output Randomness (Temperature)","name":"temperature","type":"number","default":0,"typeOptions":{"minValue":0,"maxValue":1,"numberPrecision":1}}],"displayOptions":{"show":{"operation":["transcribe"],"resource":["audio"]}}},{"displayName":"Input Data Field Name","name":"binaryPropertyName","type":"string","default":"data","hint":"The name of the input field containing the binary file data to be processed","placeholder":"e.g. data","description":"Name of the binary property which contains the audio file in one of these formats: flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm","displayOptions":{"show":{"operation":["translate"],"resource":["audio"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Output Randomness (Temperature)","name":"temperature","type":"number","default":0,"typeOptions":{"minValue":0,"maxValue":1,"numberPrecision":1}}],"displayOptions":{"show":{"operation":["translate"],"resource":["audio"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Delete a File","value":"deleteFile","action":"Delete a file","description":"Delete a file from the server"},{"name":"List Files","value":"list","action":"List files","description":"Returns a list of files that belong to the user's organization"},{"name":"Upload a File","value":"upload","action":"Upload a file","description":"Upload a file that can be used across various endpoints"}],"default":"upload","displayOptions":{"show":{"resource":["file"]}}},{"displayName":"Input Data Field Name","name":"binaryPropertyName","type":"string","default":"data","hint":"The name of the input field containing the binary file data to be processed","placeholder":"e.g. data","description":"Name of the binary property which contains the file. The size of individual files can be a maximum of 512 MB or 2 million tokens for Assistants.","displayOptions":{"show":{"operation":["upload"],"resource":["file"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Purpose","name":"purpose","type":"options","default":"user_data","description":"The intended purpose of the uploaded file, the 'Fine-tuning' only supports .jsonl files","options":[{"name":"Assistants","value":"assistants"},{"name":"Fine-Tune","value":"fine-tune"},{"name":"Vision","value":"vision"},{"name":"User Data","value":"user_data"}]}],"displayOptions":{"show":{"operation":["upload"],"resource":["file"]}}},{"displayName":"File","name":"fileId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"fileSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","validation":[{"type":"regex","properties":{"regex":"file-[a-zA-Z0-9]","errorMessage":"Not a valid File ID"}}],"placeholder":"e.g. file-1234567890"}],"displayOptions":{"show":{"operation":["deleteFile"],"resource":["file"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Purpose","name":"purpose","type":"options","default":"any","description":"Only return files with the given purpose","options":[{"name":"Any [Default]","value":"any"},{"name":"Assistants","value":"assistants"},{"name":"Fine-Tune","value":"fine-tune"},{"name":"Vision","value":"vision"},{"name":"User Data","value":"user_data"}]}],"displayOptions":{"show":{"operation":["list"],"resource":["file"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Analyze Image","value":"analyze","action":"Analyze image","description":"Take in images and answer questions about them"},{"name":"Generate an Image","value":"generate","action":"Generate an image","description":"Creates an image from a text prompt"},{"name":"Edit Image","value":"edit","action":"Edit image","description":"Edit an image"}],"default":"generate","displayOptions":{"show":{"resource":["image"]}}},{"displayName":"Model","name":"model","type":"options","default":"dall-e-3","description":"The model to use for image generation","options":[{"name":"DALL·E 2","value":"dall-e-2"},{"name":"DALL·E 3","value":"dall-e-3"},{"name":"GPT Image 1","value":"gpt-image-1"}],"displayOptions":{"show":{"@version":[{"_cnd":{"lt":2.2}}],"operation":["generate"],"resource":["image"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":"gpt-image-1-mini"},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"imageGenerateModelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. gpt-4"}],"displayOptions":{"show":{"@version":[{"_cnd":{"gte":2.2}}],"operation":["generate"],"resource":["image"]}}},{"displayName":"Prompt","name":"prompt","type":"string","placeholder":"e.g. A cute cat eating a dinosaur","description":"A text description of the desired image(s). The maximum length is 1000 characters for dall-e-2 and 4000 characters for dall-e-3.","default":"","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["generate"],"resource":["image"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"displayOptions":{"show":{"@version":[{"_cnd":{"lt":2.2}}],"operation":["generate"],"resource":["image"]}},"options":[{"displayName":"Number of Images","name":"n","default":1,"description":"Number of images to generate","type":"number","typeOptions":{"minValue":1,"maxValue":10},"displayOptions":{"show":{"/model":["dall-e-2"]}}},{"displayName":"Quality","name":"dalleQuality","type":"options","description":"The quality of the image that will be generated, HD creates images with finer details and greater consistency across the image","options":[{"name":"HD","value":"hd"},{"name":"Standard","value":"standard"}],"displayOptions":{"show":{"/model":["dall-e-3"]}},"default":"standard"},{"displayName":"Quality","name":"quality","type":"options","description":"The quality of the image that will be generated, High creates images with finer details and greater consistency across the image","options":[{"name":"High","value":"high"},{"name":"Medium","value":"medium"},{"name":"Low","value":"low"}],"displayOptions":{"show":{"/model":[{"_cnd":{"includes":"gpt-image"}}]}},"default":"medium"},{"displayName":"Resolution","name":"size","type":"options","options":[{"name":"256x256","value":"256x256"},{"name":"512x512","value":"512x512"},{"name":"1024x1024","value":"1024x1024"}],"displayOptions":{"show":{"/model":["dall-e-2"]}},"default":"1024x1024"},{"displayName":"Resolution","name":"size","type":"options","options":[{"name":"1024x1024","value":"1024x1024"},{"name":"1792x1024","value":"1792x1024"},{"name":"1024x1792","value":"1024x1792"}],"displayOptions":{"show":{"/model":["dall-e-3"]}},"default":"1024x1024"},{"displayName":"Resolution","name":"size","type":"options","options":[{"name":"1024x1024","value":"1024x1024"},{"name":"1024x1536","value":"1024x1536"},{"name":"1536x1024","value":"1536x1024"}],"displayOptions":{"show":{"/model":[{"_cnd":{"includes":"gpt-image"}}]}},"default":"1024x1024"},{"displayName":"Style","name":"style","type":"options","options":[{"name":"Natural","value":"natural","description":"Produce more natural looking images"},{"name":"Vivid","value":"vivid","description":"Lean towards generating hyper-real and dramatic images"}],"displayOptions":{"show":{"/model":["dall-e-3"]}},"default":"vivid"},{"displayName":"Respond with Image URL(s)","name":"returnImageUrls","type":"boolean","default":false,"description":"Whether to return image URL(s) instead of binary file(s)","displayOptions":{"hide":{"/model":[{"_cnd":{"includes":"gpt-image"}}]}}},{"displayName":"Put Output in Field","name":"binaryPropertyOutput","type":"string","default":"data","hint":"The name of the output field to put the binary file data in","displayOptions":{"show":{"returnImageUrls":[false]}}}]},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"displayOptions":{"show":{"@version":[{"_cnd":{"gte":2.2}}],"operation":["generate"],"resource":["image"]}},"options":[{"displayName":"Number of Images","name":"n","default":1,"description":"Number of images to generate","type":"number","typeOptions":{"minValue":1,"maxValue":10},"displayOptions":{"show":{"/modelId":["dall-e-2"]}}},{"displayName":"Quality","name":"dalleQuality","type":"options","description":"The quality of the image that will be generated, HD creates images with finer details and greater consistency across the image","options":[{"name":"HD","value":"hd"},{"name":"Standard","value":"standard"}],"displayOptions":{"show":{"/modelId":["dall-e-3"]}},"default":"standard"},{"displayName":"Quality","name":"quality","type":"options","description":"The quality of the image that will be generated, High creates images with finer details and greater consistency across the image","options":[{"name":"High","value":"high"},{"name":"Medium","value":"medium"},{"name":"Low","value":"low"}],"displayOptions":{"show":{"/modelId":[{"_cnd":{"includes":"gpt-image"}}]}},"default":"medium"},{"displayName":"Resolution","name":"size","type":"options","options":[{"name":"256x256","value":"256x256"},{"name":"512x512","value":"512x512"},{"name":"1024x1024","value":"1024x1024"}],"displayOptions":{"show":{"/modelId":["dall-e-2"]}},"default":"1024x1024"},{"displayName":"Resolution","name":"size","type":"options","options":[{"name":"1024x1024","value":"1024x1024"},{"name":"1792x1024","value":"1792x1024"},{"name":"1024x1792","value":"1024x1792"}],"displayOptions":{"show":{"/modelId":["dall-e-3"]}},"default":"1024x1024"},{"displayName":"Resolution","name":"size","type":"options","options":[{"name":"1024x1024","value":"1024x1024"},{"name":"1024x1536","value":"1024x1536"},{"name":"1536x1024","value":"1536x1024"}],"displayOptions":{"show":{"/modelId":[{"_cnd":{"includes":"gpt-image"}}]}},"default":"1024x1024"},{"displayName":"Style","name":"style","type":"options","options":[{"name":"Natural","value":"natural","description":"Produce more natural looking images"},{"name":"Vivid","value":"vivid","description":"Lean towards generating hyper-real and dramatic images"}],"displayOptions":{"show":{"/modelId":["dall-e-3"]}},"default":"vivid"},{"displayName":"Respond with Image URL(s)","name":"returnImageUrls","type":"boolean","default":false,"description":"Whether to return image URL(s) instead of binary file(s)","displayOptions":{"hide":{"/modelId":[{"_cnd":{"includes":"gpt-image"}}]}}},{"displayName":"Put Output in Field","name":"binaryPropertyOutput","type":"string","default":"data","hint":"The name of the output field to put the binary file data in","displayOptions":{"show":{"returnImageUrls":[false]}}}]},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"imageModelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. gpt-4"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Text Input","name":"text","type":"string","placeholder":"e.g. What's in this image?","default":"What's in this image?","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Input Type","name":"inputType","type":"options","default":"url","options":[{"name":"Image URL(s)","value":"url"},{"name":"Binary File(s)","value":"base64"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"URL(s)","name":"imageUrls","type":"string","placeholder":"e.g. https://example.com/image.jpeg","description":"URL(s) of the image(s) to analyze, multiple URLs can be added separated by comma","default":"","displayOptions":{"show":{"inputType":["url"],"operation":["analyze"],"resource":["image"]}}},{"displayName":"Input Data Field Name","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary property which contains the image(s)","displayOptions":{"show":{"inputType":["base64"],"operation":["analyze"],"resource":["image"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to simplify the response or not","displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Detail","name":"detail","type":"options","default":"auto","options":[{"name":"Auto","value":"auto","description":"Model will look at the image input size and decide if it should use the low or high setting"},{"name":"Low","value":"low","description":"Return faster responses and consume fewer tokens"},{"name":"High","value":"high","description":"Return more detailed responses, consumes more tokens"}]},{"displayName":"Length of Description (Max Tokens)","description":"Fewer tokens will result in shorter, less detailed image description","name":"maxTokens","type":"number","default":300,"typeOptions":{"minValue":1}}],"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Model","name":"model","type":"options","default":"gpt-image-1","description":"The model to use for image generation","options":[{"name":"DALL·E 2","value":"dall-e-2"},{"name":"GPT Image 1","value":"gpt-image-1"}],"displayOptions":{"show":{"operation":["edit"],"resource":["image"]}}},{"displayName":"Prompt","name":"prompt","type":"string","required":true,"default":"","description":"A text description of the desired image(s). Maximum 1000 characters for dall-e-2, 32000 characters for gpt-image-1.","placeholder":"A beautiful sunset over mountains","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["edit"],"resource":["image"]}}},{"displayName":"Images","name":"images","type":"fixedCollection","placeholder":"Add Image","typeOptions":{"multipleValues":true,"multipleValueButtonText":"Add Image"},"default":{"values":[{"binaryPropertyName":"data"}]},"description":"Add one or more binary fields to include images with your prompt. Each image should be a png, webp, or jpg file less than 50MB. You can provide up to 16 images.","displayOptions":{"show":{"/model":["gpt-image-1"],"operation":["edit"],"resource":["image"]}},"options":[{"displayName":"Image","name":"values","values":[{"displayName":"Binary Field Name","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","description":"The name of the binary field containing the image data"}]}]},{"displayName":"Binary Field Name","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary property which contains the image. It should be a square png file less than 4MB.","displayOptions":{"show":{"/model":["dall-e-2"],"operation":["edit"],"resource":["image"]}}},{"displayName":"Number of Images","name":"n","type":"number","default":1,"description":"The number of images to generate. Must be between 1 and 10.","typeOptions":{"minValue":1,"maxValue":10},"displayOptions":{"show":{"operation":["edit"],"resource":["image"]}}},{"displayName":"Size","name":"size","type":"options","default":"1024x1024","description":"The size of the generated images","options":[{"name":"256x256","value":"256x256"},{"name":"512x512","value":"512x512"},{"name":"1024x1024","value":"1024x1024"},{"name":"1024x1536 (Portrait)","value":"1024x1536"},{"name":"1536x1024 (Landscape)","value":"1536x1024"},{"name":"Auto","value":"auto"}],"displayOptions":{"show":{"operation":["edit"],"resource":["image"]}}},{"displayName":"Quality","name":"quality","type":"options","default":"auto","description":"The quality of the image that will be generated","options":[{"name":"Auto","value":"auto"},{"name":"High","value":"high"},{"name":"Medium","value":"medium"},{"name":"Low","value":"low"},{"name":"Standard","value":"standard"}],"displayOptions":{"show":{"/model":["gpt-image-1"],"operation":["edit"],"resource":["image"]}}},{"displayName":"Response Format","name":"responseFormat","type":"options","default":"url","description":"The format in which the generated images are returned. URLs are only valid for 60 minutes after generation.","options":[{"name":"URL","value":"url"},{"name":"Base64 JSON","value":"b64_json"}],"displayOptions":{"show":{"/model":["dall-e-2"],"operation":["edit"],"resource":["image"]}}},{"displayName":"Output Format","name":"outputFormat","type":"options","default":"png","description":"The format in which the generated images are returned. Only supported for gpt-image-1.","options":[{"name":"PNG","value":"png"},{"name":"JPEG","value":"jpeg"},{"name":"WebP","value":"webp"}],"displayOptions":{"show":{"/model":["gpt-image-1"],"operation":["edit"],"resource":["image"]}}},{"displayName":"Output Compression","name":"outputCompression","type":"number","default":100,"description":"The compression level (0-100%) for the generated images. Only supported for gpt-image-1 with webp or jpeg output formats.","typeOptions":{"minValue":0,"maxValue":100},"displayOptions":{"show":{"/model":["gpt-image-1"],"outputFormat":["webp","jpeg"],"operation":["edit"],"resource":["image"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"User","name":"user","type":"string","default":"","description":"A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse","placeholder":"user-12345"},{"displayName":"Background","name":"background","type":"options","default":"auto","description":"Allows to set transparency for the background of the generated image(s). Only supported for gpt-image-1.","options":[{"name":"Auto","value":"auto"},{"name":"Transparent","value":"transparent"},{"name":"Opaque","value":"opaque"}],"displayOptions":{"show":{"/model":["gpt-image-1"]}}},{"displayName":"Input Fidelity","name":"inputFidelity","type":"options","default":"low","description":"Control how much effort the model will exert to match the style and features of input images. Only supported for gpt-image-1.","options":[{"name":"Low","value":"low"},{"name":"High","value":"high"}],"displayOptions":{"show":{"/model":["gpt-image-1"]}}},{"displayName":"Image Mask","name":"imageMask","type":"string","default":"data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary property which contains the image. An additional image whose fully transparent areas (e.g. where alpha is zero) indicate where image should be edited. If there are multiple images provided, the mask will be applied on the first image. Must be a valid PNG file, less than 4MB, and have the same dimensions as image."}],"displayOptions":{"show":{"operation":["edit"],"resource":["image"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Message a Model","value":"response","action":"Message a model","description":"Generate a model response with GPT 3, 4, 5, etc. using Responses API"},{"name":"Classify Text for Violations","value":"classify","action":"Classify text for violations","description":"Check whether content complies with usage policies"}],"default":"response","displayOptions":{"show":{"resource":["text"]}}},{"displayName":"Text Input","name":"input","type":"string","placeholder":"e.g. Sample text goes here","description":"The input text to classify if it is violates the moderation policy","default":"","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["classify"],"resource":["text"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":false,"description":"Whether to return a simplified version of the response instead of the raw data","displayOptions":{"show":{"operation":["classify"],"resource":["text"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Use Stable Model","name":"useStableModel","type":"boolean","default":false,"description":"Whether to use the stable version of the model instead of the latest version, accuracy may be slightly lower"}],"displayOptions":{"show":{"@version":[{"_cnd":{"lt":2.1}}],"operation":["classify"],"resource":["text"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"modelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. gpt-4"}],"displayOptions":{"show":{"operation":["response"],"resource":["text"]}}},{"displayName":"Messages","name":"responses","type":"fixedCollection","typeOptions":{"sortable":true,"multipleValues":true},"placeholder":"Add Message","default":{"values":[{"type":"text"}]},"options":[{"displayName":"Message","name":"values","values":[{"displayName":"Type","name":"type","type":"options","default":"text","options":[{"name":"Text","value":"text"},{"name":"Image","value":"image"},{"name":"File","value":"file"}]},{"displayName":"Role","name":"role","type":"options","description":"Role in shaping the model's response, it tells the model how it should behave and interact with the user","options":[{"name":"User","value":"user","description":"Send a message as a user and get a response from the model"},{"name":"Assistant","value":"assistant","description":"Tell the model to adopt a specific tone or personality"},{"name":"System","value":"system","description":"Usually used to set the model's behavior or context for the next user message"}],"default":"user"},{"displayName":"Prompt","name":"content","type":"string","description":"The content of the message to be send","default":"","placeholder":"e.g. Hello, how can you help me?","typeOptions":{"rows":2},"displayOptions":{"show":{"type":["text"]}}},{"displayName":"Image Type","name":"imageType","type":"options","default":"url","options":[{"name":"Image URL","value":"url"},{"name":"File ID","value":"fileId"},{"name":"File Data","value":"base64"}],"displayOptions":{"show":{"type":["image"]}}},{"displayName":"Image URL","name":"imageUrl","type":"string","default":"","placeholder":"e.g. https://example.com/image.jpeg","description":"URL of the image to be sent","displayOptions":{"show":{"type":["image"],"imageType":["url"]}}},{"displayName":"Image Data","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary property which contains the image(s)","displayOptions":{"show":{"type":["image"],"imageType":["base64"]}}},{"displayName":"File ID","name":"fileId","type":"string","default":"","description":"ID of the file to be sent","displayOptions":{"show":{"type":["image"],"imageType":["fileId"]}}},{"displayName":"Detail","name":"imageDetail","type":"options","default":"auto","description":"The detail level of the image to be sent to the model","options":[{"name":"Auto","value":"auto"},{"name":"Low","value":"low"},{"name":"High","value":"high"}],"displayOptions":{"show":{"type":["image"]}}},{"displayName":"File Type","name":"fileType","type":"options","default":"url","options":[{"name":"File URL","value":"url"},{"name":"File ID","value":"fileId"},{"name":"File Data","value":"base64"}],"displayOptions":{"show":{"type":["file"]}}},{"displayName":"File URL","name":"fileUrl","type":"string","default":"","placeholder":"e.g. https://example.com/file.pdf","description":"URL of the file to be sent. Accepts base64 encoded files as well.","displayOptions":{"show":{"type":["file"],"fileType":["url"]}}},{"displayName":"File ID","name":"fileId","type":"string","default":"","description":"ID of the file to be sent","displayOptions":{"show":{"type":["file"],"fileType":["fileId"]}}},{"displayName":"File Data","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary property which contains the file","displayOptions":{"show":{"type":["file"],"fileType":["base64"]}}},{"displayName":"File Name","name":"fileName","type":"string","default":"","required":true,"displayOptions":{"show":{"type":["file"],"fileType":["base64"]}}}]}],"displayOptions":{"show":{"operation":["response"],"resource":["text"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to return a simplified version of the response instead of the raw data","displayOptions":{"show":{"operation":["response"],"resource":["text"]}}},{"displayName":"Hide Tools","name":"hideTools","type":"hidden","default":"hide","displayOptions":{"show":{"modelId":["gpt-3.5-turbo-16k-0613","dall-e-3","text-embedding-3-large","dall-e-2","whisper-1","tts-1-hd-1106","tts-1-hd","gpt-4-0314","text-embedding-3-small","gpt-4-32k-0314","gpt-3.5-turbo-0301","gpt-4-vision-preview","gpt-3.5-turbo-16k","gpt-3.5-turbo-instruct-0914","tts-1","davinci-002","gpt-3.5-turbo-instruct","babbage-002","tts-1-1106","text-embedding-ada-002"],"operation":["response"],"resource":["text"]}}},{"displayName":"Connect your own custom n8n tools to this node on the canvas","name":"noticeTools","type":"notice","default":"","displayOptions":{"hide":{"hideTools":["hide"]},"show":{"operation":["response"],"resource":["text"]}}},{"displayName":"Built-in Tools","name":"builtInTools","placeholder":"Add Built-in Tool","type":"collection","default":{},"options":[{"displayName":"Web Search","name":"webSearch","type":"collection","default":{"searchContextSize":"medium"},"options":[{"displayName":"Search Context Size","description":"High level guidance for the amount of context window space to use for the search","name":"searchContextSize","type":"options","default":"medium","options":[{"name":"Low","value":"low"},{"name":"Medium","value":"medium"},{"name":"High","value":"high"}]},{"displayName":"Web Search Allowed Domains","name":"allowedDomains","type":"string","default":"","description":"Comma-separated list of domains to search. Only domains in this list will be searched.","placeholder":"e.g. google.com, wikipedia.org"},{"displayName":"Country","name":"country","type":"string","default":"","placeholder":"e.g. US, GB"},{"displayName":"City","name":"city","type":"string","default":"","placeholder":"e.g. New York, London"},{"displayName":"Region","name":"region","type":"string","default":"","placeholder":"e.g. New York, London"}]},{"displayName":"File Search","name":"fileSearch","type":"collection","default":{"vectorStoreIds":"[]"},"options":[{"displayName":"Vector Store IDs","name":"vectorStoreIds","description":"The vector store IDs to use for the file search. Vector stores are managed via OpenAI Dashboard.","type":"json","default":"[]","required":true},{"displayName":"Filters","name":"filters","type":"json","default":"{}"},{"displayName":"Max Results","name":"maxResults","type":"number","default":1,"typeOptions":{"minValue":1,"maxValue":50}}]},{"displayName":"Code Interpreter","name":"codeInterpreter","type":"boolean","default":true,"description":"Whether to allow the model to execute code in a sandboxed environment"}],"displayOptions":{"show":{"operation":["response"],"resource":["text"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Conversation ID","name":"conversationId","default":"","description":"The conversation that this response belongs to. Input items and output items from this response are automatically added to this conversation after this response completes.","type":"string"},{"displayName":"Include Additional Data","name":"include","default":[],"type":"multiOptions","description":"Specify additional output data to include in the model response","options":[{"name":"Code Interpreter Call Outputs","value":"code_interpreter_call.outputs"},{"name":"Computer Call Output Image URL","value":"computer_call_output.output.image_url"},{"name":"File Search Call Results","value":"file_search_call.results"},{"name":"Message Input Image URL","value":"message.input_image.image_url"},{"name":"Message Output Text Logprobs","value":"message.output_text.logprobs"},{"name":"Reasoning Encrypted Content","value":"reasoning.encrypted_content"},{"name":"Web Search Tool Call Sources","value":"web_search_call.action.sources"}]},{"displayName":"Instructions","name":"instructions","type":"string","default":"","description":"Instructions for the model to follow","typeOptions":{"rows":2}},{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":16,"description":"The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).","type":"number","typeOptions":{"maxValue":32768}},{"displayName":"Max Tool Calls Iterations","name":"maxToolsIterations","type":"number","default":15,"description":"The maximum number of tool iteration cycles the LLM will run before stopping. A single iteration can contain multiple tool calls. Set to 0 for no limit."},{"displayName":"Max Built-in Tool Calls","name":"maxToolCalls","type":"number","default":15,"description":"The maximum number of total calls to built-in tools that can be processed in a response. This maximum number applies across all built-in tool calls, not per individual tool. Any further attempts to call a tool by the model will be ignored."},{"displayName":"Metadata","name":"metadata","type":"json","description":"Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format, and querying for objects via API or the dashboard. Keys are strings with a maximum length of 64 characters. Values are strings with a maximum length of 512 characters.","default":"{}"},{"displayName":"Parallel Tool Calls","name":"parallelToolCalls","type":"boolean","default":false,"description":"Whether to allow parallel tool calls. If true, the model can call multiple tools at once."},{"displayName":"Previous Response ID","name":"previousResponseId","type":"string","default":"","description":"The ID of the previous response to continue from. Cannot be used in conjunction with Conversation ID."},{"displayName":"Prompt","name":"promptConfig","type":"fixedCollection","description":"Configure the reusable prompt template configured via OpenAI Dashboard. <a href=\"https://platform.openai.com/docs/guides/prompt-engineering#reusable-prompts\">Learn more</a>.","default":{"promptOptions":[{"promptId":""}]},"options":[{"displayName":"Prompt","name":"promptOptions","values":[{"displayName":"Prompt ID","name":"promptId","type":"string","default":"","description":"The unique identifier of the prompt template to use"},{"displayName":"Version","name":"version","type":"string","default":"","description":"Optional version of the prompt template"},{"displayName":"Variables","name":"variables","type":"json","default":"{}","description":"Variables to be substituted into the prompt template"}]}]},{"displayName":"Prompt Cache Key","name":"promptCacheKey","type":"string","default":"","description":"Used by OpenAI to cache responses for similar requests to optimize your cache hit rates"},{"displayName":"Reasoning","name":"reasoning","type":"fixedCollection","default":{"reasoningOptions":[{"effort":"medium","summary":"none"}]},"options":[{"displayName":"Reasoning","name":"reasoningOptions","values":[{"displayName":"Effort","name":"effort","type":"options","default":"medium","options":[{"name":"Low","value":"low"},{"name":"Medium","value":"medium"},{"name":"High","value":"high"}]},{"displayName":"Summary","name":"summary","type":"options","default":"auto","description":"A summary of the reasoning performed by the model. This can be useful for debugging and understanding the model's reasoning process.","options":[{"name":"None","value":"none"},{"name":"Auto","value":"auto"},{"name":"Concise","value":"concise"},{"name":"Detailed","value":"detailed"}]}]}]},{"displayName":"Safety Identifier","name":"safetyIdentifier","type":"string","default":"","description":"A stable identifier used to help detect users of your application that may be violating OpenAI's usage policies. The IDs should be a string that uniquely identifies each user."},{"displayName":"Service Tier","name":"serviceTier","type":"options","default":"auto","description":"The service tier to use for the request","options":[{"name":"Auto","value":"auto"},{"name":"Flex","value":"flex"},{"name":"Default","value":"default"},{"name":"Priority","value":"priority"}]},{"displayName":"Store","name":"store","type":"boolean","default":true,"description":"Whether to store the generated model response for later retrieval via API"},{"displayName":"Output Format","name":"textFormat","type":"fixedCollection","default":{"textOptions":[{"type":"text"}]},"options":[{"displayName":"Text","name":"textOptions","values":[{"displayName":"Type","name":"type","type":"options","default":"","options":[{"name":"Text","value":"text"},{"name":"JSON Schema (recommended)","value":"json_schema"},{"name":"JSON Object","value":"json_object"}]},{"displayName":"Verbosity","name":"verbosity","type":"options","default":"medium","options":[{"name":"Low","value":"low"},{"name":"Medium","value":"medium"},{"name":"High","value":"high"}]},{"displayName":"Name","name":"name","type":"string","default":"my_schema","description":"The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length of 64.","displayOptions":{"show":{"type":["json_schema"]}}},{"displayName":"All properties in the schema must be set to \"required\", when using \"strict\" mode.","name":"requiredNotice","type":"notice","default":"","displayOptions":{"show":{"strict":[true]}}},{"displayName":"Schema","name":"schema","type":"json","default":"{\n \"type\": \"object\",\n \"properties\": {\n \"message\": {\n \"type\": \"string\"\n }\n },\n \"additionalProperties\": false,\n \"required\": [\"message\"]\n}","description":"The schema of the response format","displayOptions":{"show":{"type":["json_schema"]}}},{"displayName":"Description","name":"description","type":"string","default":"","description":"The description of the response format","displayOptions":{"show":{"type":["json_schema"]}}},{"displayName":"Strict","name":"strict","type":"boolean","default":false,"description":"Whether to require that the AI will always generate responses that match the provided JSON Schema","displayOptions":{"show":{"type":["json_schema"]}}}]}]},{"displayName":"Top Logprobs","name":"topLogprobs","type":"number","default":0,"description":"An integer between 0 and 20 specifying the number of most likely tokens to return at each token position, each with an associated log probability","typeOptions":{"minValue":0,"maxValue":20}},{"displayName":"Output Randomness (Temperature)","name":"temperature","type":"number","default":1,"description":"What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. We generally recommend altering this or top_p but not both","typeOptions":{"minValue":0,"maxValue":2,"numberPrecision":1}},{"displayName":"Output Randomness (Top P)","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"An alternative to sampling with temperature, controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"},{"displayName":"Truncation","name":"truncation","type":"boolean","default":false,"description":"Whether to truncate the input to the model's context window size. When disabled will throw a 400 error instead."},{"displayName":"Background Mode","name":"backgroundMode","type":"fixedCollection","default":{"values":[{"backgroundMode":true}]},"options":[{"displayName":"Bakground","name":"values","values":[{"displayName":"Background Mode","name":"enabled","type":"boolean","default":false,"description":"Whether to run the model in background mode. If true, the model will run in background mode."},{"displayName":"Timeout","name":"timeout","type":"number","default":300,"description":"The timeout for the background mode in seconds. If 0, the timeout is infinite.","typeOptions":{"minValue":0,"maxValue":3600}}]}]}],"displayOptions":{"show":{"operation":["response"],"resource":["text"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Create","value":"create","action":"Create a conversation","description":"Create a conversation"},{"name":"Get","value":"get","action":"Get a conversation","description":"Get a conversation"},{"name":"Remove","value":"remove","action":"Remove a conversation","description":"Remove a conversation"},{"name":"Update","value":"update","action":"Update a conversation","description":"Update a conversation"}],"default":"create","displayOptions":{"show":{"resource":["conversation"]}}},{"displayName":"Messages","name":"messages","type":"fixedCollection","typeOptions":{"sortable":true,"multipleValues":true},"placeholder":"Add Message","default":{"values":[{"type":"text"}]},"options":[{"displayName":"Message","name":"values","values":[{"displayName":"Role","name":"role","type":"options","description":"Role in shaping the model's response, it tells the model how it should behave and interact with the user","options":[{"name":"User","value":"user","description":"Send a message as a user and get a response from the model"},{"name":"Assistant","value":"assistant","description":"Tell the model to adopt a specific tone or personality"},{"name":"System","value":"system","description":"Usually used to set the model's behavior or context for the next user message"}],"default":"user"},{"displayName":"Prompt","name":"content","type":"string","description":"The content of the message to be send","default":"","placeholder":"e.g. Hello, how can you help me?","typeOptions":{"rows":2},"displayOptions":{}}]}],"displayOptions":{"show":{"operation":["create"],"resource":["conversation"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Metadata","name":"metadata","type":"json","description":"Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format, and querying for objects via API or the dashboard. Keys are strings with a maximum length of 64 characters. Values are strings with a maximum length of 512 characters.","default":"{}"}],"displayOptions":{"show":{"operation":["create"],"resource":["conversation"]}}},{"displayName":"Conversation ID","name":"conversationId","type":"string","default":"","placeholder":"conv_1234567890","description":"The ID of the conversation to delete","required":true,"displayOptions":{"show":{"operation":["remove"],"resource":["conversation"]}}},{"displayName":"Conversation ID","name":"conversationId","type":"string","default":"","placeholder":"conv_1234567890","description":"The ID of the conversation to update","required":true,"displayOptions":{"show":{"operation":["update"],"resource":["conversation"]}}},{"displayName":"Metadata","name":"metadata","type":"json","description":"Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format, and querying for objects via API or the dashboard. Keys are strings with a maximum length of 64 characters. Values are strings with a maximum length of 512 characters.","default":"{}","required":true,"displayOptions":{"show":{"operation":["update"],"resource":["conversation"]}}},{"displayName":"Conversation ID","name":"conversationId","type":"string","default":"","placeholder":"conv_1234567890","description":"The ID of the conversation to retrieve","required":true,"displayOptions":{"show":{"operation":["get"],"resource":["conversation"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Generate","value":"generate","action":"Generate a video","description":"Creates a video from a text prompt"}],"default":"generate","displayOptions":{"show":{"resource":["video"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"videoModelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. gpt-4"}],"displayOptions":{"show":{"operation":["generate"],"resource":["video"]}}},{"displayName":"Prompt","name":"prompt","type":"string","default":"A video of a cat playing with a ball","description":"The prompt to generate a video from","required":true,"typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["generate"],"resource":["video"]}}},{"displayName":"Seconds","name":"seconds","type":"number","default":4,"description":"Clip duration in seconds","required":true,"displayOptions":{"show":{"operation":["generate"],"resource":["video"]}}},{"displayName":"Size","name":"size","type":"options","default":"1280x720","description":"Output resolution formatted as width x height. 1024x1792 and 1792x1024 are only supported by Sora 2 Pro.","options":[{"name":"720x1280","value":"720x1280"},{"name":"1280x720","value":"1280x720"},{"name":"1024x1792","value":"1024x1792"},{"name":"1792x1024","value":"1792x1024"}],"displayOptions":{"show":{"operation":["generate"],"resource":["video"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Reference","description":"Optional image reference that guides generation","name":"binaryPropertyNameReference","type":"string","default":"data","placeholder":"e.g. data"},{"displayName":"Wait Timeout","name":"waitTime","type":"number","default":300,"description":"Time to wait for the video to be generated in seconds","typeOptions":{"minValue":5,"maxValue":7200}},{"displayName":"Output Field Name","name":"fileName","type":"string","default":"data","hint":"The name of the output field to put the binary file data in"}],"displayOptions":{"show":{"operation":["generate"],"resource":["video"]}}}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vendors/OpenAi/openAi.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vendors/OpenAi/openAi.dark.svg"}},
|
|
9
|
-
{"displayName":"OpenAI","name":"openAi","group":["transform"],"defaultVersion":2.2,"subtitle":"={{((resource, operation) => {\n if (operation === 'deleteAssistant') {\n return 'Delete Assistant';\n }\n if (operation === 'deleteFile') {\n return 'Delete File';\n }\n if (operation === 'classify') {\n return 'Classify Text';\n }\n if (operation === 'message' && resource === 'text') {\n return 'Message Model';\n }\n const capitalize = (str) => {\n const chars = str.split('');\n chars[0] = chars[0].toUpperCase();\n return chars.join('');\n };\n if (['transcribe', 'translate'].includes(operation)) {\n resource = 'recording';\n }\n if (operation === 'list') {\n resource = resource + 's';\n }\n return `${capitalize(operation)} ${capitalize(resource)}`;\n})($parameter.resource, $parameter.operation)}}","description":"Message an assistant or GPT, analyze images, generate audio, etc.","codex":{"alias":["LangChain","ChatGPT","Sora","DallE","whisper","audio","transcribe","tts","assistant"],"categories":["AI"],"subcategories":{"AI":["Agents","Miscellaneous","Root Nodes"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/app-nodes/n8n-nodes-langchain.openai/"}]}},"builderHint":{"message":"For text generation, reasoning and tools, use AI Agent with OpenAI Chat Model. This OpenAI node is for specialized operations: image generation (DALL-E), audio (Whisper, TTS), and video generation (Sora).","relatedNodes":[{"nodeType":"@n8n/n8n-nodes-langchain.agent","relationHint":"Prefer for most LLM tasks"},{"nodeType":"@n8n/n8n-nodes-langchain.lmChatOpenAi","relationHint":"Prefer for most LLM tasks"}]},"version":[1,1.1,1.2,1.3,1.4,1.5,1.6,1.7,1.8],"defaults":{"name":"OpenAI"},"inputs":"={{((resource, operation, hideTools, memory) => {\n if (resource === 'assistant' && operation === 'message') {\n const inputs = [\n { type: 'main' },\n { type: 'ai_tool', displayName: 'Tools' },\n ];\n if (memory !== 'threadId') {\n inputs.push({ type: 'ai_memory', displayName: 'Memory', maxConnections: 1 });\n }\n return inputs;\n }\n if (resource === 'text' && (operation === 'message' || operation === 'response')) {\n if (hideTools === 'hide') {\n return ['main'];\n }\n return [{ type: 'main' }, { type: 'ai_tool', displayName: 'Tools' }];\n }\n return ['main'];\n})($parameter.resource, $parameter.operation, $parameter.hideTools, $parameter.memory ?? undefined)}}","outputs":["main"],"credentials":[{"name":"openAiApi","required":true}],"properties":[{"displayName":"Resource","name":"resource","type":"options","noDataExpression":true,"options":[{"name":"Assistant","value":"assistant"},{"name":"Text","value":"text"},{"name":"Image","value":"image"},{"name":"Audio","value":"audio"},{"name":"File","value":"file"}],"default":"text"},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Create an Assistant","value":"create","action":"Create an assistant","description":"Create a new assistant"},{"name":"Delete an Assistant","value":"deleteAssistant","action":"Delete an assistant","description":"Delete an assistant from the account"},{"name":"List Assistants","value":"list","action":"List assistants","description":"List assistants in the organization"},{"name":"Message an Assistant","value":"message","action":"Message an assistant","description":"Send messages to an assistant"},{"name":"Update an Assistant","value":"update","action":"Update an assistant","description":"Update an existing assistant"}],"default":"message","displayOptions":{"show":{"resource":["assistant"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"modelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. gpt-4"}],"displayOptions":{"show":{"operation":["create"],"resource":["assistant"]}}},{"displayName":"Name","name":"name","type":"string","default":"","description":"The name of the assistant. The maximum length is 256 characters.","placeholder":"e.g. My Assistant","required":true,"displayOptions":{"show":{"operation":["create"],"resource":["assistant"]}}},{"displayName":"Description","name":"description","type":"string","default":"","description":"The description of the assistant. The maximum length is 512 characters.","placeholder":"e.g. My personal assistant","displayOptions":{"show":{"operation":["create"],"resource":["assistant"]}}},{"displayName":"Instructions","name":"instructions","type":"string","description":"The system instructions that the assistant uses. The maximum length is 32768 characters.","default":"","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["create"],"resource":["assistant"]}}},{"displayName":"Code Interpreter","name":"codeInterpreter","type":"boolean","default":false,"description":"Whether to enable the code interpreter that allows the assistants to write and run Python code in a sandboxed execution environment, find more <a href=\"https://platform.openai.com/docs/assistants/tools/code-interpreter\" target=\"_blank\">here</a>","displayOptions":{"show":{"operation":["create"],"resource":["assistant"]}}},{"displayName":"Knowledge Retrieval","name":"knowledgeRetrieval","type":"boolean","default":false,"description":"Whether to augments the assistant with knowledge from outside its model, such as proprietary product information or documents, find more <a href=\"https://platform.openai.com/docs/assistants/tools/knowledge-retrieval\" target=\"_blank\">here</a>","displayOptions":{"show":{"operation":["create"],"resource":["assistant"]}}},{"displayName":"Files","name":"file_ids","type":"multiOptions","description":"The files to be used by the assistant, there can be a maximum of 20 files attached to the assistant. You can use expression to pass file IDs as an array or comma-separated string.","typeOptions":{"loadOptionsMethod":"getFiles"},"default":[],"hint":"Add more files by using the 'Upload a File' operation","displayOptions":{"show":{"codeInterpreter":[true],"operation":["create"],"resource":["assistant"]},"hide":{"knowledgeRetrieval":[true]}}},{"displayName":"Files","name":"file_ids","type":"multiOptions","description":"The files to be used by the assistant, there can be a maximum of 20 files attached to the assistant","typeOptions":{"loadOptionsMethod":"getFiles"},"default":[],"hint":"Add more files by using the 'Upload a File' operation","displayOptions":{"show":{"knowledgeRetrieval":[true],"operation":["create"],"resource":["assistant"]},"hide":{"codeInterpreter":[true]}}},{"displayName":"Files","name":"file_ids","type":"multiOptions","description":"The files to be used by the assistant, there can be a maximum of 20 files attached to the assistant","typeOptions":{"loadOptionsMethod":"getFiles"},"default":[],"hint":"Add more files by using the 'Upload a File' operation","displayOptions":{"show":{"knowledgeRetrieval":[true],"codeInterpreter":[true],"operation":["create"],"resource":["assistant"]}}},{"displayName":"Add custom n8n tools when you <i>message</i> your assistant (rather than when creating it)","name":"noticeTools","type":"notice","default":"","displayOptions":{"show":{"operation":["create"],"resource":["assistant"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Output Randomness (Temperature)","name":"temperature","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive. We generally recommend altering this or temperature but not both.","type":"number"},{"displayName":"Output Randomness (Top P)","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"An alternative to sampling with temperature, controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"},{"displayName":"Fail if Assistant Already Exists","name":"failIfExists","type":"boolean","default":false,"description":"Whether to fail an operation if the assistant with the same name already exists"}],"displayOptions":{"show":{"operation":["create"],"resource":["assistant"]}}},{"displayName":"Assistant","name":"assistantId","type":"resourceLocator","description":"Assistant to respond to the message. You can add, modify or remove assistants in the <a href=\"https://platform.openai.com/playground?mode=assistant\" target=\"_blank\">playground</a>.","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"assistantSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. asst_abc123"}],"displayOptions":{"show":{"operation":["deleteAssistant"],"resource":["assistant"]}}},{"displayName":"Assistant","name":"assistantId","type":"resourceLocator","description":"Assistant to respond to the message. You can add, modify or remove assistants in the <a href=\"https://platform.openai.com/playground?mode=assistant\" target=\"_blank\">playground</a>.","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"assistantSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. asst_abc123"}],"displayOptions":{"show":{"operation":["message"],"resource":["assistant"]}}},{"displayName":"Source for Prompt (User Message)","name":"prompt","type":"options","options":[{"name":"Connected Chat Trigger Node","value":"auto","description":"Looks for an input field called 'chatInput' that is coming from a directly connected Chat Trigger"},{"name":"Connected Guardrails Node","value":"guardrails","description":"Looks for an input field called 'guardrailsInput' that is coming from a directly connected Guardrails Node"},{"name":"Define below","value":"define","description":"Use an expression to reference data in previous nodes or enter static text"}],"default":"auto","displayOptions":{"show":{"operation":["message"],"resource":["assistant"]}}},{"displayName":"Prompt (User Message)","name":"text","type":"string","default":"","placeholder":"e.g. Hello, how can you help me?","typeOptions":{"rows":2},"displayOptions":{"show":{"prompt":["define"],"operation":["message"],"resource":["assistant"]}}},{"displayName":"Memory","name":"memory","type":"options","options":[{"name":"Use memory connector","value":"connector","description":"Connect one of the supported memory nodes"},{"name":"Use thread ID","value":"threadId","description":"Specify the ID of the thread to continue"}],"displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.6}}],"operation":["message"],"resource":["assistant"]}},"default":"connector"},{"displayName":"Thread ID","name":"threadId","type":"string","default":"","placeholder":"","description":"The ID of the thread to continue, a new thread will be created if not specified","hint":"If the thread ID is empty or undefined a new thread will be created and included in the response","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.6}}],"memory":["threadId"],"operation":["message"],"resource":["assistant"]}}},{"displayName":"Connect your own custom n8n tools to this node on the canvas","name":"noticeTools","type":"notice","default":"","displayOptions":{"show":{"operation":["message"],"resource":["assistant"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Base URL","name":"baseURL","default":"https://api.openai.com/v1","description":"Override the default base URL for the API","type":"string","displayOptions":{"hide":{"@version":[{"_cnd":{"gte":1.8}}]}}},{"displayName":"Max Retries","name":"maxRetries","default":2,"description":"Maximum number of retries to attempt","type":"number"},{"displayName":"Timeout","name":"timeout","default":10000,"description":"Maximum amount of time a request is allowed to take in milliseconds","type":"number"},{"displayName":"Preserve Original Tools","name":"preserveOriginalTools","type":"boolean","default":true,"description":"Whether to preserve the original tools of the assistant after the execution of this node, otherwise the tools will be replaced with the connected tools, if any, default is true","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.3}}]}}}],"displayOptions":{"show":{"operation":["message"],"resource":["assistant"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to return a simplified version of the response instead of the raw data","displayOptions":{"show":{"operation":["list"],"resource":["assistant"]}}},{"displayName":"Assistant","name":"assistantId","type":"resourceLocator","description":"Assistant to respond to the message. You can add, modify or remove assistants in the <a href=\"https://platform.openai.com/playground?mode=assistant\" target=\"_blank\">playground</a>.","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"assistantSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. asst_abc123"}],"displayOptions":{"show":{"operation":["update"],"resource":["assistant"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Code Interpreter","name":"codeInterpreter","type":"boolean","default":false,"description":"Whether to enable the code interpreter that allows the assistants to write and run Python code in a sandboxed execution environment, find more <a href=\"https://platform.openai.com/docs/assistants/tools/code-interpreter\" target=\"_blank\">here</a>"},{"displayName":"Description","name":"description","type":"string","default":"","description":"The description of the assistant. The maximum length is 512 characters.","placeholder":"e.g. My personal assistant"},{"displayName":"Files","name":"file_ids","type":"multiOptions","description":"The files to be used by the assistant, there can be a maximum of 20 files attached to the assistant. You can use expression to pass file IDs as an array or comma-separated string.","typeOptions":{"loadOptionsMethod":"getFiles"},"default":[],"hint":"Add more files by using the 'Upload a File' operation, any existing files not selected here will be removed."},{"displayName":"Instructions","name":"instructions","type":"string","description":"The system instructions that the assistant uses. The maximum length is 32768 characters.","default":"","typeOptions":{"rows":2}},{"displayName":"Knowledge Retrieval","name":"knowledgeRetrieval","type":"boolean","default":false,"description":"Whether to augments the assistant with knowledge from outside its model, such as proprietary product information or documents, find more <a href=\"https://platform.openai.com/docs/assistants/tools/knowledge-retrieval\" target=\"_blank\">here</a>"},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":false,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"modelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. gpt-4"}]},{"displayName":"Name","name":"name","type":"string","default":"","description":"The name of the assistant. The maximum length is 256 characters.","placeholder":"e.g. My Assistant"},{"displayName":"Remove All Custom Tools (Functions)","name":"removeCustomTools","type":"boolean","default":false,"description":"Whether to remove all custom tools (functions) from the assistant"},{"displayName":"Output Randomness (Temperature)","name":"temperature","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive. We generally recommend altering this or temperature but not both.","type":"number"},{"displayName":"Output Randomness (Top P)","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"An alternative to sampling with temperature, controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"}],"displayOptions":{"show":{"operation":["update"],"resource":["assistant"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Generate Audio","value":"generate","action":"Generate audio","description":"Creates audio from a text prompt"},{"name":"Transcribe a Recording","value":"transcribe","action":"Transcribe a recording","description":"Transcribes audio into text"},{"name":"Translate a Recording","value":"translate","action":"Translate a recording","description":"Translates audio into text in English"}],"default":"generate","displayOptions":{"show":{"resource":["audio"]}}},{"displayName":"OpenAI API limits the size of the audio file to 25 MB","name":"fileSizeLimitNotice","type":"notice","default":" ","displayOptions":{"show":{"resource":["audio"],"operation":["translate","transcribe"]}}},{"displayName":"Model","name":"model","type":"options","default":"tts-1","options":[{"name":"TTS-1","value":"tts-1"},{"name":"TTS-1-HD","value":"tts-1-hd"}],"displayOptions":{"show":{"operation":["generate"],"resource":["audio"]}}},{"displayName":"Text Input","name":"input","type":"string","placeholder":"e.g. The quick brown fox jumped over the lazy dog","description":"The text to generate audio for. The maximum length is 4096 characters.","default":"","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["generate"],"resource":["audio"]}}},{"displayName":"Voice","name":"voice","type":"options","default":"alloy","description":"The voice to use when generating the audio","options":[{"name":"Alloy","value":"alloy"},{"name":"Echo","value":"echo"},{"name":"Fable","value":"fable"},{"name":"Nova","value":"nova"},{"name":"Onyx","value":"onyx"},{"name":"Shimmer","value":"shimmer"}],"displayOptions":{"show":{"operation":["generate"],"resource":["audio"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Response Format","name":"response_format","type":"options","default":"mp3","options":[{"name":"MP3","value":"mp3"},{"name":"OPUS","value":"opus"},{"name":"AAC","value":"aac"},{"name":"FLAC","value":"flac"}]},{"displayName":"Audio Speed","name":"speed","type":"number","default":1,"typeOptions":{"minValue":0.25,"maxValue":4,"numberPrecision":1}},{"displayName":"Put Output in Field","name":"binaryPropertyOutput","type":"string","default":"data","hint":"The name of the output field to put the binary file data in"}],"displayOptions":{"show":{"operation":["generate"],"resource":["audio"]}}},{"displayName":"Input Data Field Name","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary property which contains the audio file in one of these formats: flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm","displayOptions":{"show":{"operation":["transcribe"],"resource":["audio"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Language of the Audio File","name":"language","type":"string","description":"The language of the input audio. Supplying the input language in <a href=\"https://en.wikipedia.org/wiki/List_of_ISO_639_language_codes\" target=\"_blank\">ISO-639-1</a> format will improve accuracy and latency.","default":""},{"displayName":"Output Randomness (Temperature)","name":"temperature","type":"number","default":0,"typeOptions":{"minValue":0,"maxValue":1,"numberPrecision":1}}],"displayOptions":{"show":{"operation":["transcribe"],"resource":["audio"]}}},{"displayName":"Input Data Field Name","name":"binaryPropertyName","type":"string","default":"data","hint":"The name of the input field containing the binary file data to be processed","placeholder":"e.g. data","description":"Name of the binary property which contains the audio file in one of these formats: flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm","displayOptions":{"show":{"operation":["translate"],"resource":["audio"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Output Randomness (Temperature)","name":"temperature","type":"number","default":0,"typeOptions":{"minValue":0,"maxValue":1,"numberPrecision":1}}],"displayOptions":{"show":{"operation":["translate"],"resource":["audio"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Delete a File","value":"deleteFile","action":"Delete a file","description":"Delete a file from the server"},{"name":"List Files","value":"list","action":"List files","description":"Returns a list of files that belong to the user's organization"},{"name":"Upload a File","value":"upload","action":"Upload a file","description":"Upload a file that can be used across various endpoints"}],"default":"upload","displayOptions":{"show":{"resource":["file"]}}},{"displayName":"Input Data Field Name","name":"binaryPropertyName","type":"string","default":"data","hint":"The name of the input field containing the binary file data to be processed","placeholder":"e.g. data","description":"Name of the binary property which contains the file. The size of individual files can be a maximum of 512 MB or 2 million tokens for Assistants.","displayOptions":{"show":{"operation":["upload"],"resource":["file"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Purpose","name":"purpose","type":"options","default":"assistants","description":"The intended purpose of the uploaded file, the 'Fine-tuning' only supports .jsonl files","options":[{"name":"Assistants","value":"assistants"},{"name":"Fine-Tune","value":"fine-tune"}]}],"displayOptions":{"show":{"operation":["upload"],"resource":["file"]}}},{"displayName":"File","name":"fileId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"fileSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","validation":[{"type":"regex","properties":{"regex":"file-[a-zA-Z0-9]","errorMessage":"Not a valid File ID"}}],"placeholder":"e.g. file-1234567890"}],"displayOptions":{"show":{"operation":["deleteFile"],"resource":["file"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Purpose","name":"purpose","type":"options","default":"any","description":"Only return files with the given purpose","options":[{"name":"Any [Default]","value":"any"},{"name":"Assistants","value":"assistants"},{"name":"Fine-Tune","value":"fine-tune"}]}],"displayOptions":{"show":{"operation":["list"],"resource":["file"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Analyze Image","value":"analyze","action":"Analyze image","description":"Take in images and answer questions about them"},{"name":"Generate an Image","value":"generate","action":"Generate an image","description":"Creates an image from a text prompt"}],"default":"generate","displayOptions":{"show":{"resource":["image"]}}},{"displayName":"Model","name":"model","type":"options","default":"dall-e-3","description":"The model to use for image generation","options":[{"name":"DALL·E 2","value":"dall-e-2"},{"name":"DALL·E 3","value":"dall-e-3"},{"name":"GPT Image 1","value":"gpt-image-1"}],"displayOptions":{"show":{"operation":["generate"],"resource":["image"]}}},{"displayName":"Prompt","name":"prompt","type":"string","placeholder":"e.g. A cute cat eating a dinosaur","description":"A text description of the desired image(s). The maximum length is 1000 characters for dall-e-2 and 4000 characters for dall-e-3.","default":"","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["generate"],"resource":["image"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Number of Images","name":"n","default":1,"description":"Number of images to generate","type":"number","typeOptions":{"minValue":1,"maxValue":10},"displayOptions":{"show":{"/model":["dall-e-2"]}}},{"displayName":"Quality","name":"dalleQuality","type":"options","description":"The quality of the image that will be generated, HD creates images with finer details and greater consistency across the image","options":[{"name":"HD","value":"hd"},{"name":"Standard","value":"standard"}],"displayOptions":{"show":{"/model":["dall-e-3"]}},"default":"standard"},{"displayName":"Quality","name":"quality","type":"options","description":"The quality of the image that will be generated, High creates images with finer details and greater consistency across the image","options":[{"name":"High","value":"high"},{"name":"Medium","value":"medium"},{"name":"Low","value":"low"}],"displayOptions":{"show":{"/model":["gpt-image-1"]}},"default":"medium"},{"displayName":"Resolution","name":"size","type":"options","options":[{"name":"256x256","value":"256x256"},{"name":"512x512","value":"512x512"},{"name":"1024x1024","value":"1024x1024"}],"displayOptions":{"show":{"/model":["dall-e-2"]}},"default":"1024x1024"},{"displayName":"Resolution","name":"size","type":"options","options":[{"name":"1024x1024","value":"1024x1024"},{"name":"1792x1024","value":"1792x1024"},{"name":"1024x1792","value":"1024x1792"}],"displayOptions":{"show":{"/model":["dall-e-3"]}},"default":"1024x1024"},{"displayName":"Resolution","name":"size","type":"options","options":[{"name":"1024x1024","value":"1024x1024"},{"name":"1024x1536","value":"1024x1536"},{"name":"1536x1024","value":"1536x1024"}],"displayOptions":{"show":{"/model":["gpt-image-1"]}},"default":"1024x1024"},{"displayName":"Style","name":"style","type":"options","options":[{"name":"Natural","value":"natural","description":"Produce more natural looking images"},{"name":"Vivid","value":"vivid","description":"Lean towards generating hyper-real and dramatic images"}],"displayOptions":{"show":{"/model":["dall-e-3"]}},"default":"vivid"},{"displayName":"Respond with Image URL(s)","name":"returnImageUrls","type":"boolean","default":false,"description":"Whether to return image URL(s) instead of binary file(s)","displayOptions":{"hide":{"/model":["gpt-image-1"]}}},{"displayName":"Put Output in Field","name":"binaryPropertyOutput","type":"string","default":"data","hint":"The name of the output field to put the binary file data in","displayOptions":{"show":{"returnImageUrls":[false]}}}],"displayOptions":{"show":{"operation":["generate"],"resource":["image"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"imageModelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. gpt-4"}],"displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.4}}],"operation":["analyze"],"resource":["image"]}}},{"displayName":"Text Input","name":"text","type":"string","placeholder":"e.g. What's in this image?","default":"What's in this image?","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Input Type","name":"inputType","type":"options","default":"url","options":[{"name":"Image URL(s)","value":"url"},{"name":"Binary File(s)","value":"base64"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"URL(s)","name":"imageUrls","type":"string","placeholder":"e.g. https://example.com/image.jpeg","description":"URL(s) of the image(s) to analyze, multiple URLs can be added separated by comma","default":"","displayOptions":{"show":{"inputType":["url"],"operation":["analyze"],"resource":["image"]}}},{"displayName":"Input Data Field Name","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary property which contains the image(s)","displayOptions":{"show":{"inputType":["base64"],"operation":["analyze"],"resource":["image"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to simplify the response or not","displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Detail","name":"detail","type":"options","default":"auto","options":[{"name":"Auto","value":"auto","description":"Model will look at the image input size and decide if it should use the low or high setting"},{"name":"Low","value":"low","description":"Return faster responses and consume fewer tokens"},{"name":"High","value":"high","description":"Return more detailed responses, consumes more tokens"}]},{"displayName":"Length of Description (Max Tokens)","description":"Fewer tokens will result in shorter, less detailed image description","name":"maxTokens","type":"number","default":300,"typeOptions":{"minValue":1}}],"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Message a Model","value":"message","action":"Message a model","description":"Create a completion with GPT 3, 4, etc."},{"name":"Classify Text for Violations","value":"classify","action":"Classify text for violations","description":"Check whether content complies with usage policies"}],"default":"message","displayOptions":{"show":{"resource":["text"]}}},{"displayName":"Text Input","name":"input","type":"string","placeholder":"e.g. Sample text goes here","description":"The input text to classify if it is violates the moderation policy","default":"","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["classify"],"resource":["text"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":false,"description":"Whether to return a simplified version of the response instead of the raw data","displayOptions":{"show":{"operation":["classify"],"resource":["text"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Use Stable Model","name":"useStableModel","type":"boolean","default":false,"description":"Whether to use the stable version of the model instead of the latest version, accuracy may be slightly lower"}],"displayOptions":{"show":{"operation":["classify"],"resource":["text"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"modelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. gpt-4"}],"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Messages","name":"messages","type":"fixedCollection","typeOptions":{"sortable":true,"multipleValues":true},"placeholder":"Add Message","default":{"values":[{"content":""}]},"options":[{"displayName":"Values","name":"values","values":[{"displayName":"Prompt","name":"content","type":"string","description":"The content of the message to be send","default":"","placeholder":"e.g. Hello, how can you help me?","typeOptions":{"rows":2}},{"displayName":"Role","name":"role","type":"options","description":"Role in shaping the model's response, it tells the model how it should behave and interact with the user","options":[{"name":"User","value":"user","description":"Send a message as a user and get a response from the model"},{"name":"Assistant","value":"assistant","description":"Tell the model to adopt a specific tone or personality"},{"name":"System","value":"system","description":"Usually used to set the model's behavior or context for the next user message"}],"default":"user"}]}],"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to return a simplified version of the response instead of the raw data","displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Output Content as JSON","name":"jsonOutput","type":"boolean","description":"Whether to attempt to return the response in JSON format. Compatible with GPT-4 Turbo and all GPT-3.5 Turbo models newer than gpt-3.5-turbo-1106.","default":false,"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Hide Tools","name":"hideTools","type":"hidden","default":"hide","displayOptions":{"show":{"modelId":["gpt-3.5-turbo-16k-0613","dall-e-3","text-embedding-3-large","dall-e-2","whisper-1","tts-1-hd-1106","tts-1-hd","gpt-4-0314","text-embedding-3-small","gpt-4-32k-0314","gpt-3.5-turbo-0301","gpt-4-vision-preview","gpt-3.5-turbo-16k","gpt-3.5-turbo-instruct-0914","tts-1","davinci-002","gpt-3.5-turbo-instruct","babbage-002","tts-1-1106","text-embedding-ada-002"],"@version":[{"_cnd":{"gte":1.2}}],"operation":["message"],"resource":["text"]}}},{"displayName":"Connect your own custom n8n tools to this node on the canvas","name":"noticeTools","type":"notice","default":"","displayOptions":{"hide":{"hideTools":["hide"]},"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Frequency Penalty","name":"frequency_penalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim","type":"number"},{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":16,"description":"The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).","type":"number","typeOptions":{"maxValue":32768}},{"displayName":"Number of Completions","name":"n","default":1,"description":"How many completions to generate for each prompt. Note: Because this parameter generates many completions, it can quickly consume your token quota. Use carefully and ensure that you have reasonable settings for max_tokens and stop.","type":"number"},{"displayName":"Presence Penalty","name":"presence_penalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics","type":"number"},{"displayName":"Output Randomness (Temperature)","name":"temperature","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive. We generally recommend altering this or temperature but not both.","type":"number"},{"displayName":"Output Randomness (Top P)","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"An alternative to sampling with temperature, controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"},{"displayName":"Reasoning Effort","name":"reasoning_effort","default":"medium","description":"Controls the amount of reasoning tokens to use. A value of \"low\" will favor speed and economical token usage, \"high\" will favor more complete reasoning at the cost of more tokens generated and slower responses.","type":"options","options":[{"name":"Low","value":"low","description":"Favors speed and economical token usage"},{"name":"Medium","value":"medium","description":"Balance between speed and reasoning accuracy"},{"name":"High","value":"high","description":"Favors more complete reasoning at the cost of more tokens generated and slower responses"}],"displayOptions":{"show":{"/modelId":[{"_cnd":{"regex":"(^o1([-\\d]+)?$)|(^o[3-9].*)|(^gpt-5.*)"}}]}}},{"displayName":"Max Tool Calls Iterations","name":"maxToolsIterations","type":"number","default":15,"description":"The maximum number of tool iteration cycles the LLM will run before stopping. A single iteration can contain multiple tool calls. Set to 0 for no limit.","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.5}}]}}}],"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vendors/OpenAi/openAi.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vendors/OpenAi/openAi.dark.svg"}},
|
|
10
|
-
{"displayName":"Microsoft Agent 365 Trigger","name":"microsoftAgent365Trigger","group":["trigger"],"description":"Trigger for Microsoft Agent 365","codex":{"categories":["Core Nodes"],"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.microsoftAgent365Trigger/"}]}},"version":[1,1.1],"defaults":{"name":"Microsoft Agent 365"},"inputs":"={{\n\t\t\t\t((hasOutputParser, needsFallback) => {\n\t\t\t\t\tfunction getInputs(hasMainInput, hasOutputParser, needsFallback) {\n const getInputData = (inputs) => {\n return inputs.map(({ type, filter, displayName, required }) => {\n const input = {\n type,\n displayName,\n required,\n maxConnections: ['ai_languageModel', 'ai_memory', 'ai_outputParser'].includes(type)\n ? 1\n : undefined,\n };\n if (filter) {\n input.filter = filter;\n }\n return input;\n });\n };\n let specialInputs = [\n {\n type: 'ai_languageModel',\n displayName: 'Chat Model',\n required: true,\n filter: {\n excludedNodes: [\n '@n8n/n8n-nodes-langchain.lmCohere',\n '@n8n/n8n-nodes-langchain.lmOllama',\n 'n8n/n8n-nodes-langchain.lmOpenAi',\n '@n8n/n8n-nodes-langchain.lmOpenHuggingFaceInference',\n ],\n },\n },\n {\n type: 'ai_languageModel',\n displayName: 'Fallback Model',\n required: true,\n filter: {\n excludedNodes: [\n '@n8n/n8n-nodes-langchain.lmCohere',\n '@n8n/n8n-nodes-langchain.lmOllama',\n 'n8n/n8n-nodes-langchain.lmOpenAi',\n '@n8n/n8n-nodes-langchain.lmOpenHuggingFaceInference',\n ],\n },\n },\n {\n displayName: 'Memory',\n type: 'ai_memory',\n },\n {\n displayName: 'Tool',\n type: 'ai_tool',\n },\n {\n displayName: 'Output Parser',\n type: 'ai_outputParser',\n },\n ];\n if (hasOutputParser === false) {\n specialInputs = specialInputs.filter((input) => input.type !== 'ai_outputParser');\n }\n if (needsFallback === false) {\n specialInputs = specialInputs.filter((input) => input.displayName !== 'Fallback Model');\n }\n const mainInputs = hasMainInput ? ['main'] : [];\n return [...mainInputs, ...getInputData(specialInputs)];\n};\n\t\t\t\t\treturn getInputs(false, hasOutputParser, needsFallback);\n\t\t\t\t})($parameter.hasOutputParser === undefined || $parameter.hasOutputParser === true, $parameter.needsFallback !== undefined && $parameter.needsFallback === true)\n\t\t\t}}","outputs":["main"],"triggerPanel":false,"webhooks":[{"name":"default","httpMethod":"POST","responseMode":"onReceived","path":"webhook","ndvHideMethod":true},{"name":"default","httpMethod":"HEAD","responseMode":"onReceived","path":"webhook","ndvHideUrl":true,"ndvHideMethod":true}],"credentials":[{"name":"microsoftAgent365Api","required":true}],"properties":[{"displayName":"This is an early preview for building Agents with Microsoft Agent 365 and n8n. You need to be part of the <a href=\"https://adoption.microsoft.com/copilot/frontier-program/\" target=\"_blank\">Frontier preview program</a> to get early access to Microsoft Agent 365. <a href=\"https://github.com/microsoft/Agent365-Samples/tree/main/nodejs/n8n/sample-agent\" target=\"_blank\">Learn more</a>","name":"previewNotice","type":"notice","default":""},{"displayName":"System Prompt","name":"systemPrompt","type":"string","placeholder":"e.g. You are a friendly assistant that helps people find a weather forecast for a given time and place.","default":"","typeOptions":{"rows":4}},{"displayName":"Connect an <a data-action='openSelectiveNodeCreator' data-action-parameter-connectiontype='ai_outputParser'>output parser</a> on the canvas to specify the output format you require","name":"notice","type":"notice","default":"","displayOptions":{"show":{"hasOutputParser":[true]}}},{"displayName":"Enable Fallback Model","name":"needsFallback","type":"boolean","default":false,"noDataExpression":true,"displayOptions":{"show":{"@version":[{"_cnd":{"gte":2.1}}]}}},{"displayName":"Connect an additional language model on the canvas to use it as a fallback if the main model fails","name":"fallbackNotice","type":"notice","default":"","displayOptions":{"show":{"needsFallback":[true]}}},{"displayName":"Enable Microsoft
|
|
8
|
+
{"displayName":"OpenAI","name":"openAi","group":["transform"],"defaultVersion":2.3,"subtitle":"={{((resource, operation) => {\n if (operation === 'deleteAssistant') {\n return 'Delete Assistant';\n }\n if (operation === 'deleteFile') {\n return 'Delete File';\n }\n if (operation === 'classify') {\n return 'Classify Text';\n }\n if (operation === 'message' && resource === 'text') {\n return 'Message Model';\n }\n const capitalize = (str) => {\n const chars = str.split('');\n chars[0] = chars[0].toUpperCase();\n return chars.join('');\n };\n if (['transcribe', 'translate'].includes(operation)) {\n resource = 'recording';\n }\n if (operation === 'list') {\n resource = resource + 's';\n }\n return `${capitalize(operation)} ${capitalize(resource)}`;\n})($parameter.resource, $parameter.operation)}}","description":"Message an assistant or GPT, analyze images, generate audio, etc.","codex":{"alias":["LangChain","ChatGPT","Sora","DallE","whisper","audio","transcribe","tts","assistant"],"categories":["AI"],"subcategories":{"AI":["Agents","Miscellaneous","Root Nodes"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/app-nodes/n8n-nodes-langchain.openai/"}]}},"builderHint":{"message":"For text generation, reasoning and tools, use AI Agent with OpenAI Chat Model. This OpenAI node is for specialized operations: image generation (DALL-E), audio (Whisper, TTS), and video generation (Sora).","relatedNodes":[{"nodeType":"@n8n/n8n-nodes-langchain.agent","relationHint":"Prefer for most LLM tasks"},{"nodeType":"@n8n/n8n-nodes-langchain.lmChatOpenAi","relationHint":"Prefer for most LLM tasks"}]},"version":[2,2.1,2.2,2.3],"defaults":{"name":"OpenAI"},"inputs":"={{((resource, operation, hideTools, memory) => {\n if (resource === 'assistant' && operation === 'message') {\n const inputs = [\n { type: 'main' },\n { type: 'ai_tool', displayName: 'Tools' },\n ];\n if (memory !== 'threadId') {\n inputs.push({ type: 'ai_memory', displayName: 'Memory', maxConnections: 1 });\n }\n return inputs;\n }\n if (resource === 'text' && (operation === 'message' || operation === 'response')) {\n if (hideTools === 'hide') {\n return ['main'];\n }\n return [{ type: 'main' }, { type: 'ai_tool', displayName: 'Tools' }];\n }\n return ['main'];\n})($parameter.resource, $parameter.operation, $parameter.hideTools, $parameter.memory ?? undefined)}}","outputs":["main"],"credentials":[{"name":"openAiApi","required":true}],"properties":[{"displayName":"Resource","name":"resource","type":"options","noDataExpression":true,"options":[{"name":"Text","value":"text","builderHint":{"message":"For text generation, reasoning and tools, use AI Agent with OpenAI Chat Model instead of this resource."}},{"name":"Image","value":"image"},{"name":"Audio","value":"audio"},{"name":"File","value":"file"},{"name":"Conversation","value":"conversation"},{"name":"Video","value":"video"}],"default":"text"},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Generate Audio","value":"generate","action":"Generate audio","description":"Creates audio from a text prompt"},{"name":"Transcribe a Recording","value":"transcribe","action":"Transcribe a recording","description":"Transcribes audio into text"},{"name":"Translate a Recording","value":"translate","action":"Translate a recording","description":"Translates audio into text in English"}],"default":"generate","displayOptions":{"show":{"resource":["audio"]}}},{"displayName":"OpenAI API limits the size of the audio file to 25 MB","name":"fileSizeLimitNotice","type":"notice","default":" ","displayOptions":{"show":{"resource":["audio"],"operation":["translate","transcribe"]}}},{"displayName":"Model","name":"model","type":"options","default":"tts-1","options":[{"name":"TTS-1","value":"tts-1"},{"name":"TTS-1-HD","value":"tts-1-hd"}],"displayOptions":{"show":{"operation":["generate"],"resource":["audio"]}}},{"displayName":"Text Input","name":"input","type":"string","placeholder":"e.g. The quick brown fox jumped over the lazy dog","description":"The text to generate audio for. The maximum length is 4096 characters.","default":"","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["generate"],"resource":["audio"]}}},{"displayName":"Voice","name":"voice","type":"options","default":"alloy","description":"The voice to use when generating the audio","options":[{"name":"Alloy","value":"alloy"},{"name":"Echo","value":"echo"},{"name":"Fable","value":"fable"},{"name":"Nova","value":"nova"},{"name":"Onyx","value":"onyx"},{"name":"Shimmer","value":"shimmer"}],"displayOptions":{"show":{"operation":["generate"],"resource":["audio"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Response Format","name":"response_format","type":"options","default":"mp3","options":[{"name":"MP3","value":"mp3"},{"name":"OPUS","value":"opus"},{"name":"AAC","value":"aac"},{"name":"FLAC","value":"flac"}]},{"displayName":"Audio Speed","name":"speed","type":"number","default":1,"typeOptions":{"minValue":0.25,"maxValue":4,"numberPrecision":1}},{"displayName":"Put Output in Field","name":"binaryPropertyOutput","type":"string","default":"data","hint":"The name of the output field to put the binary file data in"}],"displayOptions":{"show":{"operation":["generate"],"resource":["audio"]}}},{"displayName":"Input Data Field Name","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary property which contains the audio file in one of these formats: flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm","displayOptions":{"show":{"operation":["transcribe"],"resource":["audio"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Language of the Audio File","name":"language","type":"string","description":"The language of the input audio. Supplying the input language in <a href=\"https://en.wikipedia.org/wiki/List_of_ISO_639_language_codes\" target=\"_blank\">ISO-639-1</a> format will improve accuracy and latency.","default":""},{"displayName":"Output Randomness (Temperature)","name":"temperature","type":"number","default":0,"typeOptions":{"minValue":0,"maxValue":1,"numberPrecision":1}}],"displayOptions":{"show":{"operation":["transcribe"],"resource":["audio"]}}},{"displayName":"Input Data Field Name","name":"binaryPropertyName","type":"string","default":"data","hint":"The name of the input field containing the binary file data to be processed","placeholder":"e.g. data","description":"Name of the binary property which contains the audio file in one of these formats: flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm","displayOptions":{"show":{"operation":["translate"],"resource":["audio"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Output Randomness (Temperature)","name":"temperature","type":"number","default":0,"typeOptions":{"minValue":0,"maxValue":1,"numberPrecision":1}}],"displayOptions":{"show":{"operation":["translate"],"resource":["audio"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Delete a File","value":"deleteFile","action":"Delete a file","description":"Delete a file from the server"},{"name":"List Files","value":"list","action":"List files","description":"Returns a list of files that belong to the user's organization"},{"name":"Upload a File","value":"upload","action":"Upload a file","description":"Upload a file that can be used across various endpoints"}],"default":"upload","displayOptions":{"show":{"resource":["file"]}}},{"displayName":"Input Data Field Name","name":"binaryPropertyName","type":"string","default":"data","hint":"The name of the input field containing the binary file data to be processed","placeholder":"e.g. data","description":"Name of the binary property which contains the file. The size of individual files can be a maximum of 512 MB or 2 million tokens for Assistants.","displayOptions":{"show":{"operation":["upload"],"resource":["file"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Purpose","name":"purpose","type":"options","default":"user_data","description":"The intended purpose of the uploaded file, the 'Fine-tuning' only supports .jsonl files","options":[{"name":"Assistants","value":"assistants"},{"name":"Fine-Tune","value":"fine-tune"},{"name":"Vision","value":"vision"},{"name":"User Data","value":"user_data"}]}],"displayOptions":{"show":{"operation":["upload"],"resource":["file"]}}},{"displayName":"File","name":"fileId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"fileSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","validation":[{"type":"regex","properties":{"regex":"file-[a-zA-Z0-9]","errorMessage":"Not a valid File ID"}}],"placeholder":"e.g. file-1234567890"}],"displayOptions":{"show":{"operation":["deleteFile"],"resource":["file"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Purpose","name":"purpose","type":"options","default":"any","description":"Only return files with the given purpose","options":[{"name":"Any [Default]","value":"any"},{"name":"Assistants","value":"assistants"},{"name":"Fine-Tune","value":"fine-tune"},{"name":"Vision","value":"vision"},{"name":"User Data","value":"user_data"}]}],"displayOptions":{"show":{"operation":["list"],"resource":["file"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Analyze Image","value":"analyze","action":"Analyze image","description":"Take in images and answer questions about them"},{"name":"Generate an Image","value":"generate","action":"Generate an image","description":"Creates an image from a text prompt"},{"name":"Edit Image","value":"edit","action":"Edit image","description":"Edit an image"}],"default":"generate","displayOptions":{"show":{"resource":["image"]}}},{"displayName":"Model","name":"model","type":"options","default":"dall-e-3","description":"The model to use for image generation","options":[{"name":"DALL·E 2","value":"dall-e-2"},{"name":"DALL·E 3","value":"dall-e-3"},{"name":"GPT Image 1","value":"gpt-image-1"}],"displayOptions":{"show":{"@version":[{"_cnd":{"lt":2.2}}],"operation":["generate"],"resource":["image"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":"gpt-image-1-mini"},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"imageGenerateModelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. gpt-4"}],"displayOptions":{"show":{"@version":[{"_cnd":{"gte":2.2}}],"operation":["generate"],"resource":["image"]}}},{"displayName":"Prompt","name":"prompt","type":"string","placeholder":"e.g. A cute cat eating a dinosaur","description":"A text description of the desired image(s). The maximum length is 1000 characters for dall-e-2 and 4000 characters for dall-e-3.","default":"","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["generate"],"resource":["image"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"displayOptions":{"show":{"@version":[{"_cnd":{"lt":2.2}}],"operation":["generate"],"resource":["image"]}},"options":[{"displayName":"Number of Images","name":"n","default":1,"description":"Number of images to generate","type":"number","typeOptions":{"minValue":1,"maxValue":10},"displayOptions":{"show":{"/model":["dall-e-2"]}}},{"displayName":"Quality","name":"dalleQuality","type":"options","description":"The quality of the image that will be generated, HD creates images with finer details and greater consistency across the image","options":[{"name":"HD","value":"hd"},{"name":"Standard","value":"standard"}],"displayOptions":{"show":{"/model":["dall-e-3"]}},"default":"standard"},{"displayName":"Quality","name":"quality","type":"options","description":"The quality of the image that will be generated, High creates images with finer details and greater consistency across the image","options":[{"name":"High","value":"high"},{"name":"Medium","value":"medium"},{"name":"Low","value":"low"}],"displayOptions":{"show":{"/model":[{"_cnd":{"includes":"gpt-image"}}]}},"default":"medium"},{"displayName":"Resolution","name":"size","type":"options","options":[{"name":"256x256","value":"256x256"},{"name":"512x512","value":"512x512"},{"name":"1024x1024","value":"1024x1024"}],"displayOptions":{"show":{"/model":["dall-e-2"]}},"default":"1024x1024"},{"displayName":"Resolution","name":"size","type":"options","options":[{"name":"1024x1024","value":"1024x1024"},{"name":"1792x1024","value":"1792x1024"},{"name":"1024x1792","value":"1024x1792"}],"displayOptions":{"show":{"/model":["dall-e-3"]}},"default":"1024x1024"},{"displayName":"Resolution","name":"size","type":"options","options":[{"name":"1024x1024","value":"1024x1024"},{"name":"1024x1536","value":"1024x1536"},{"name":"1536x1024","value":"1536x1024"}],"displayOptions":{"show":{"/model":[{"_cnd":{"includes":"gpt-image"}}]}},"default":"1024x1024"},{"displayName":"Style","name":"style","type":"options","options":[{"name":"Natural","value":"natural","description":"Produce more natural looking images"},{"name":"Vivid","value":"vivid","description":"Lean towards generating hyper-real and dramatic images"}],"displayOptions":{"show":{"/model":["dall-e-3"]}},"default":"vivid"},{"displayName":"Respond with Image URL(s)","name":"returnImageUrls","type":"boolean","default":false,"description":"Whether to return image URL(s) instead of binary file(s)","displayOptions":{"hide":{"/model":[{"_cnd":{"includes":"gpt-image"}}]}}},{"displayName":"Put Output in Field","name":"binaryPropertyOutput","type":"string","default":"data","hint":"The name of the output field to put the binary file data in","displayOptions":{"show":{"returnImageUrls":[false]}}}]},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"displayOptions":{"show":{"@version":[{"_cnd":{"gte":2.2}}],"operation":["generate"],"resource":["image"]}},"options":[{"displayName":"Number of Images","name":"n","default":1,"description":"Number of images to generate","type":"number","typeOptions":{"minValue":1,"maxValue":10},"displayOptions":{"show":{"/modelId":["dall-e-2"]}}},{"displayName":"Quality","name":"dalleQuality","type":"options","description":"The quality of the image that will be generated, HD creates images with finer details and greater consistency across the image","options":[{"name":"HD","value":"hd"},{"name":"Standard","value":"standard"}],"displayOptions":{"show":{"/modelId":["dall-e-3"]}},"default":"standard"},{"displayName":"Quality","name":"quality","type":"options","description":"The quality of the image that will be generated, High creates images with finer details and greater consistency across the image","options":[{"name":"High","value":"high"},{"name":"Medium","value":"medium"},{"name":"Low","value":"low"}],"displayOptions":{"show":{"/modelId":[{"_cnd":{"includes":"gpt-image"}}]}},"default":"medium"},{"displayName":"Resolution","name":"size","type":"options","options":[{"name":"256x256","value":"256x256"},{"name":"512x512","value":"512x512"},{"name":"1024x1024","value":"1024x1024"}],"displayOptions":{"show":{"/modelId":["dall-e-2"]}},"default":"1024x1024"},{"displayName":"Resolution","name":"size","type":"options","options":[{"name":"1024x1024","value":"1024x1024"},{"name":"1792x1024","value":"1792x1024"},{"name":"1024x1792","value":"1024x1792"}],"displayOptions":{"show":{"/modelId":["dall-e-3"]}},"default":"1024x1024"},{"displayName":"Resolution","name":"size","type":"options","options":[{"name":"1024x1024","value":"1024x1024"},{"name":"1024x1536","value":"1024x1536"},{"name":"1536x1024","value":"1536x1024"}],"displayOptions":{"show":{"/modelId":[{"_cnd":{"includes":"gpt-image"}}]}},"default":"1024x1024"},{"displayName":"Style","name":"style","type":"options","options":[{"name":"Natural","value":"natural","description":"Produce more natural looking images"},{"name":"Vivid","value":"vivid","description":"Lean towards generating hyper-real and dramatic images"}],"displayOptions":{"show":{"/modelId":["dall-e-3"]}},"default":"vivid"},{"displayName":"Respond with Image URL(s)","name":"returnImageUrls","type":"boolean","default":false,"description":"Whether to return image URL(s) instead of binary file(s)","displayOptions":{"hide":{"/modelId":[{"_cnd":{"includes":"gpt-image"}}]}}},{"displayName":"Put Output in Field","name":"binaryPropertyOutput","type":"string","default":"data","hint":"The name of the output field to put the binary file data in","displayOptions":{"show":{"returnImageUrls":[false]}}}]},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"imageModelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. gpt-4"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Text Input","name":"text","type":"string","placeholder":"e.g. What's in this image?","default":"What's in this image?","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Input Type","name":"inputType","type":"options","default":"url","options":[{"name":"Image URL(s)","value":"url"},{"name":"Binary File(s)","value":"base64"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"URL(s)","name":"imageUrls","type":"string","placeholder":"e.g. https://example.com/image.jpeg","description":"URL(s) of the image(s) to analyze, multiple URLs can be added separated by comma","default":"","displayOptions":{"show":{"inputType":["url"],"operation":["analyze"],"resource":["image"]}}},{"displayName":"Input Data Field Name","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary property which contains the image(s)","displayOptions":{"show":{"inputType":["base64"],"operation":["analyze"],"resource":["image"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to simplify the response or not","displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Detail","name":"detail","type":"options","default":"auto","options":[{"name":"Auto","value":"auto","description":"Model will look at the image input size and decide if it should use the low or high setting"},{"name":"Low","value":"low","description":"Return faster responses and consume fewer tokens"},{"name":"High","value":"high","description":"Return more detailed responses, consumes more tokens"}]},{"displayName":"Length of Description (Max Tokens)","description":"Fewer tokens will result in shorter, less detailed image description","name":"maxTokens","type":"number","default":300,"typeOptions":{"minValue":1}}],"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Model","name":"model","type":"options","default":"gpt-image-1","description":"The model to use for image generation","options":[{"name":"DALL·E 2","value":"dall-e-2"},{"name":"GPT Image 1","value":"gpt-image-1"}],"displayOptions":{"show":{"@version":[{"_cnd":{"lt":2.3}}],"operation":["edit"],"resource":["image"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":"gpt-image-1"},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"imageGenerateModelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. gpt-4"}],"displayOptions":{"show":{"@version":[{"_cnd":{"gte":2.3}}],"operation":["edit"],"resource":["image"]}}},{"displayName":"Prompt","name":"prompt","type":"string","required":true,"default":"","description":"A text description of the desired image(s). Maximum 1000 characters for dall-e-2, 32000 characters for gpt-image-1.","placeholder":"A beautiful sunset over mountains","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["edit"],"resource":["image"]}}},{"displayName":"Images","name":"images","type":"fixedCollection","placeholder":"Add Image","typeOptions":{"multipleValues":true,"multipleValueButtonText":"Add Image"},"default":{"values":[{"binaryPropertyName":"data"}]},"description":"Add one or more binary fields to include images with your prompt. Each image should be a png, webp, or jpg file less than 50MB. You can provide up to 16 images.","displayOptions":{"show":{"/model":["gpt-image-1"],"operation":["edit"],"resource":["image"]}},"options":[{"displayName":"Image","name":"values","values":[{"displayName":"Binary Field Name","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","description":"The name of the binary field containing the image data"}]}]},{"displayName":"Images","name":"images","type":"fixedCollection","placeholder":"Add Image","typeOptions":{"multipleValues":true,"multipleValueButtonText":"Add Image"},"default":{"values":[{"binaryPropertyName":"data"}]},"description":"Add one or more binary fields to include images with your prompt. Each image should be a png, webp, or jpg file less than 50MB. You can provide up to 16 images.","displayOptions":{"show":{"/modelId":[{"_cnd":{"includes":"gpt-image"}}],"operation":["edit"],"resource":["image"]}},"options":[{"displayName":"Image","name":"values","values":[{"displayName":"Binary Field Name","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","description":"The name of the binary field containing the image data"}]}]},{"displayName":"Binary Field Name","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary property which contains the image. It should be a square png file less than 4MB.","displayOptions":{"show":{"/model":["dall-e-2"],"operation":["edit"],"resource":["image"]}}},{"displayName":"Binary Field Name","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary property which contains the image. It should be a square png file less than 4MB.","displayOptions":{"show":{"/modelId":[{"_cnd":{"includes":"dall-e"}}],"operation":["edit"],"resource":["image"]}}},{"displayName":"Number of Images","name":"n","type":"number","default":1,"description":"The number of images to generate. Must be between 1 and 10.","typeOptions":{"minValue":1,"maxValue":10},"displayOptions":{"show":{"operation":["edit"],"resource":["image"]}}},{"displayName":"Size","name":"size","type":"options","default":"1024x1024","description":"The size of the generated images","options":[{"name":"256x256","value":"256x256"},{"name":"512x512","value":"512x512"},{"name":"1024x1024","value":"1024x1024"},{"name":"1024x1536 (Portrait)","value":"1024x1536"},{"name":"1536x1024 (Landscape)","value":"1536x1024"},{"name":"Auto","value":"auto"}],"displayOptions":{"show":{"operation":["edit"],"resource":["image"]}}},{"displayName":"Quality","name":"quality","type":"options","default":"auto","description":"The quality of the image that will be generated","options":[{"name":"Auto","value":"auto"},{"name":"High","value":"high"},{"name":"Medium","value":"medium"},{"name":"Low","value":"low"},{"name":"Standard","value":"standard"}],"displayOptions":{"show":{"/model":["gpt-image-1"],"operation":["edit"],"resource":["image"]}}},{"displayName":"Quality","name":"quality","type":"options","default":"auto","description":"The quality of the image that will be generated","options":[{"name":"Auto","value":"auto"},{"name":"High","value":"high"},{"name":"Medium","value":"medium"},{"name":"Low","value":"low"},{"name":"Standard","value":"standard"}],"displayOptions":{"show":{"/modelId":[{"_cnd":{"includes":"gpt-image"}}],"operation":["edit"],"resource":["image"]}}},{"displayName":"Response Format","name":"responseFormat","type":"options","default":"url","description":"The format in which the generated images are returned. URLs are only valid for 60 minutes after generation.","options":[{"name":"URL","value":"url"},{"name":"Base64 JSON","value":"b64_json"}],"displayOptions":{"show":{"/model":["dall-e-2"],"operation":["edit"],"resource":["image"]}}},{"displayName":"Response Format","name":"responseFormat","type":"options","default":"url","description":"The format in which the generated images are returned. URLs are only valid for 60 minutes after generation.","options":[{"name":"URL","value":"url"},{"name":"Base64 JSON","value":"b64_json"}],"displayOptions":{"show":{"/modelId":[{"_cnd":{"includes":"dall-e"}}],"operation":["edit"],"resource":["image"]}}},{"displayName":"Output Format","name":"outputFormat","type":"options","default":"png","description":"The format in which the generated images are returned. Only supported for gpt-image-1.","options":[{"name":"PNG","value":"png"},{"name":"JPEG","value":"jpeg"},{"name":"WebP","value":"webp"}],"displayOptions":{"show":{"/model":["gpt-image-1"],"operation":["edit"],"resource":["image"]}}},{"displayName":"Output Format","name":"outputFormat","type":"options","default":"png","description":"The format in which the generated images are returned. Only supported for gpt-image-1.","options":[{"name":"PNG","value":"png"},{"name":"JPEG","value":"jpeg"},{"name":"WebP","value":"webp"}],"displayOptions":{"show":{"/modelId":[{"_cnd":{"includes":"gpt-image"}}],"operation":["edit"],"resource":["image"]}}},{"displayName":"Output Compression","name":"outputCompression","type":"number","default":100,"description":"The compression level (0-100%) for the generated images. Only supported for gpt-image-1 with webp or jpeg output formats.","typeOptions":{"minValue":0,"maxValue":100},"displayOptions":{"show":{"/model":["gpt-image-1"],"outputFormat":["webp","jpeg"],"operation":["edit"],"resource":["image"]}}},{"displayName":"Output Compression","name":"outputCompression","type":"number","default":100,"description":"The compression level (0-100%) for the generated images. Only supported for gpt-image-1 with webp or jpeg output formats.","typeOptions":{"minValue":0,"maxValue":100},"displayOptions":{"show":{"/modelId":[{"_cnd":{"includes":"gpt-image"}}],"outputFormat":["webp","jpeg"],"operation":["edit"],"resource":["image"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"displayOptions":{"show":{"@version":[{"_cnd":{"lt":2.3}}],"operation":["edit"],"resource":["image"]}},"options":[{"displayName":"User","name":"user","type":"string","default":"","description":"A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse","placeholder":"user-12345"},{"displayName":"Background","name":"background","type":"options","default":"auto","description":"Allows to set transparency for the background of the generated image(s). Only supported for gpt-image-1.","options":[{"name":"Auto","value":"auto"},{"name":"Transparent","value":"transparent"},{"name":"Opaque","value":"opaque"}],"displayOptions":{"show":{"/model":["gpt-image-1"]}}},{"displayName":"Input Fidelity","name":"inputFidelity","type":"options","default":"low","description":"Control how much effort the model will exert to match the style and features of input images. Only supported for gpt-image-1.","options":[{"name":"Low","value":"low"},{"name":"High","value":"high"}],"displayOptions":{"show":{"/model":["gpt-image-1"]}}},{"displayName":"Image Mask","name":"imageMask","type":"string","default":"data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary property which contains the image. An additional image whose fully transparent areas (e.g. where alpha is zero) indicate where image should be edited. If there are multiple images provided, the mask will be applied on the first image. Must be a valid PNG file, less than 4MB, and have the same dimensions as image."}]},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"displayOptions":{"show":{"@version":[{"_cnd":{"gte":2.3}}],"operation":["edit"],"resource":["image"]}},"options":[{"displayName":"User","name":"user","type":"string","default":"","description":"A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse","placeholder":"user-12345"},{"displayName":"Background","name":"background","type":"options","default":"auto","description":"Allows to set transparency for the background of the generated image(s). Only supported for gpt-image-1.","options":[{"name":"Auto","value":"auto"},{"name":"Transparent","value":"transparent"},{"name":"Opaque","value":"opaque"}],"displayOptions":{"show":{"/modelId":[{"_cnd":{"includes":"gpt-image"}}]}}},{"displayName":"Input Fidelity","name":"inputFidelity","type":"options","default":"low","description":"Control how much effort the model will exert to match the style and features of input images. Only supported for gpt-image-1.","options":[{"name":"Low","value":"low"},{"name":"High","value":"high"}],"displayOptions":{"show":{"/modelId":[{"_cnd":{"includes":"gpt-image"}}]}}},{"displayName":"Image Mask","name":"imageMask","type":"string","default":"data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary property which contains the image. An additional image whose fully transparent areas (e.g. where alpha is zero) indicate where image should be edited. If there are multiple images provided, the mask will be applied on the first image. Must be a valid PNG file, less than 4MB, and have the same dimensions as image."}]},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Message a Model","value":"response","action":"Message a model","description":"Generate a model response with GPT 3, 4, 5, etc. using Responses API"},{"name":"Classify Text for Violations","value":"classify","action":"Classify text for violations","description":"Check whether content complies with usage policies"}],"default":"response","displayOptions":{"show":{"resource":["text"]}}},{"displayName":"Text Input","name":"input","type":"string","placeholder":"e.g. Sample text goes here","description":"The input text to classify if it is violates the moderation policy","default":"","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["classify"],"resource":["text"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":false,"description":"Whether to return a simplified version of the response instead of the raw data","displayOptions":{"show":{"operation":["classify"],"resource":["text"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Use Stable Model","name":"useStableModel","type":"boolean","default":false,"description":"Whether to use the stable version of the model instead of the latest version, accuracy may be slightly lower"}],"displayOptions":{"show":{"@version":[{"_cnd":{"lt":2.1}}],"operation":["classify"],"resource":["text"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"modelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. gpt-4"}],"displayOptions":{"show":{"operation":["response"],"resource":["text"]}}},{"displayName":"Messages","name":"responses","type":"fixedCollection","typeOptions":{"sortable":true,"multipleValues":true},"placeholder":"Add Message","default":{"values":[{"type":"text"}]},"options":[{"displayName":"Message","name":"values","values":[{"displayName":"Type","name":"type","type":"options","default":"text","options":[{"name":"Text","value":"text"},{"name":"Image","value":"image"},{"name":"File","value":"file"}]},{"displayName":"Role","name":"role","type":"options","description":"Role in shaping the model's response, it tells the model how it should behave and interact with the user","options":[{"name":"User","value":"user","description":"Send a message as a user and get a response from the model"},{"name":"Assistant","value":"assistant","description":"Tell the model to adopt a specific tone or personality"},{"name":"System","value":"system","description":"Usually used to set the model's behavior or context for the next user message"}],"default":"user"},{"displayName":"Prompt","name":"content","type":"string","description":"The content of the message to be send","default":"","placeholder":"e.g. Hello, how can you help me?","typeOptions":{"rows":2},"displayOptions":{"show":{"type":["text"]}}},{"displayName":"Image Type","name":"imageType","type":"options","default":"url","options":[{"name":"Image URL","value":"url"},{"name":"File ID","value":"fileId"},{"name":"File Data","value":"base64"}],"displayOptions":{"show":{"type":["image"]}}},{"displayName":"Image URL","name":"imageUrl","type":"string","default":"","placeholder":"e.g. https://example.com/image.jpeg","description":"URL of the image to be sent","displayOptions":{"show":{"type":["image"],"imageType":["url"]}}},{"displayName":"Image Data","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary property which contains the image(s)","displayOptions":{"show":{"type":["image"],"imageType":["base64"]}}},{"displayName":"File ID","name":"fileId","type":"string","default":"","description":"ID of the file to be sent","displayOptions":{"show":{"type":["image"],"imageType":["fileId"]}}},{"displayName":"Detail","name":"imageDetail","type":"options","default":"auto","description":"The detail level of the image to be sent to the model","options":[{"name":"Auto","value":"auto"},{"name":"Low","value":"low"},{"name":"High","value":"high"}],"displayOptions":{"show":{"type":["image"]}}},{"displayName":"File Type","name":"fileType","type":"options","default":"url","options":[{"name":"File URL","value":"url"},{"name":"File ID","value":"fileId"},{"name":"File Data","value":"base64"}],"displayOptions":{"show":{"type":["file"]}}},{"displayName":"File URL","name":"fileUrl","type":"string","default":"","placeholder":"e.g. https://example.com/file.pdf","description":"URL of the file to be sent. Accepts base64 encoded files as well.","displayOptions":{"show":{"type":["file"],"fileType":["url"]}}},{"displayName":"File ID","name":"fileId","type":"string","default":"","description":"ID of the file to be sent","displayOptions":{"show":{"type":["file"],"fileType":["fileId"]}}},{"displayName":"File Data","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary property which contains the file","displayOptions":{"show":{"type":["file"],"fileType":["base64"]}}},{"displayName":"File Name","name":"fileName","type":"string","default":"","required":true,"displayOptions":{"show":{"type":["file"],"fileType":["base64"]}}}]}],"displayOptions":{"show":{"operation":["response"],"resource":["text"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to return a simplified version of the response instead of the raw data","displayOptions":{"show":{"operation":["response"],"resource":["text"]}}},{"displayName":"Hide Tools","name":"hideTools","type":"hidden","default":"hide","displayOptions":{"show":{"modelId":["gpt-3.5-turbo-16k-0613","dall-e-3","text-embedding-3-large","dall-e-2","whisper-1","tts-1-hd-1106","tts-1-hd","gpt-4-0314","text-embedding-3-small","gpt-4-32k-0314","gpt-3.5-turbo-0301","gpt-4-vision-preview","gpt-3.5-turbo-16k","gpt-3.5-turbo-instruct-0914","tts-1","davinci-002","gpt-3.5-turbo-instruct","babbage-002","tts-1-1106","text-embedding-ada-002"],"operation":["response"],"resource":["text"]}}},{"displayName":"Connect your own custom n8n tools to this node on the canvas","name":"noticeTools","type":"notice","default":"","displayOptions":{"hide":{"hideTools":["hide"]},"show":{"operation":["response"],"resource":["text"]}}},{"displayName":"Built-in Tools","name":"builtInTools","placeholder":"Add Built-in Tool","type":"collection","default":{},"options":[{"displayName":"Web Search","name":"webSearch","type":"collection","default":{"searchContextSize":"medium"},"options":[{"displayName":"Search Context Size","description":"High level guidance for the amount of context window space to use for the search","name":"searchContextSize","type":"options","default":"medium","options":[{"name":"Low","value":"low"},{"name":"Medium","value":"medium"},{"name":"High","value":"high"}]},{"displayName":"Web Search Allowed Domains","name":"allowedDomains","type":"string","default":"","description":"Comma-separated list of domains to search. Only domains in this list will be searched.","placeholder":"e.g. google.com, wikipedia.org"},{"displayName":"Country","name":"country","type":"string","default":"","placeholder":"e.g. US, GB"},{"displayName":"City","name":"city","type":"string","default":"","placeholder":"e.g. New York, London"},{"displayName":"Region","name":"region","type":"string","default":"","placeholder":"e.g. New York, London"}]},{"displayName":"File Search","name":"fileSearch","type":"collection","default":{"vectorStoreIds":"[]"},"options":[{"displayName":"Vector Store IDs","name":"vectorStoreIds","description":"The vector store IDs to use for the file search. Vector stores are managed via OpenAI Dashboard.","type":"json","default":"[]","required":true},{"displayName":"Filters","name":"filters","type":"json","default":"{}"},{"displayName":"Max Results","name":"maxResults","type":"number","default":1,"typeOptions":{"minValue":1,"maxValue":50}}]},{"displayName":"Code Interpreter","name":"codeInterpreter","type":"boolean","default":true,"description":"Whether to allow the model to execute code in a sandboxed environment"}],"displayOptions":{"show":{"operation":["response"],"resource":["text"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Conversation ID","name":"conversationId","default":"","description":"The conversation that this response belongs to. Input items and output items from this response are automatically added to this conversation after this response completes.","type":"string"},{"displayName":"Include Additional Data","name":"include","default":[],"type":"multiOptions","description":"Specify additional output data to include in the model response","options":[{"name":"Code Interpreter Call Outputs","value":"code_interpreter_call.outputs"},{"name":"Computer Call Output Image URL","value":"computer_call_output.output.image_url"},{"name":"File Search Call Results","value":"file_search_call.results"},{"name":"Message Input Image URL","value":"message.input_image.image_url"},{"name":"Message Output Text Logprobs","value":"message.output_text.logprobs"},{"name":"Reasoning Encrypted Content","value":"reasoning.encrypted_content"},{"name":"Web Search Tool Call Sources","value":"web_search_call.action.sources"}]},{"displayName":"Instructions","name":"instructions","type":"string","default":"","description":"Instructions for the model to follow","typeOptions":{"rows":2}},{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":16,"description":"The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).","type":"number","typeOptions":{"maxValue":32768}},{"displayName":"Max Tool Calls Iterations","name":"maxToolsIterations","type":"number","default":15,"description":"The maximum number of tool iteration cycles the LLM will run before stopping. A single iteration can contain multiple tool calls. Set to 0 for no limit."},{"displayName":"Max Built-in Tool Calls","name":"maxToolCalls","type":"number","default":15,"description":"The maximum number of total calls to built-in tools that can be processed in a response. This maximum number applies across all built-in tool calls, not per individual tool. Any further attempts to call a tool by the model will be ignored."},{"displayName":"Metadata","name":"metadata","type":"json","description":"Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format, and querying for objects via API or the dashboard. Keys are strings with a maximum length of 64 characters. Values are strings with a maximum length of 512 characters.","default":"{}"},{"displayName":"Parallel Tool Calls","name":"parallelToolCalls","type":"boolean","default":false,"description":"Whether to allow parallel tool calls. If true, the model can call multiple tools at once."},{"displayName":"Previous Response ID","name":"previousResponseId","type":"string","default":"","description":"The ID of the previous response to continue from. Cannot be used in conjunction with Conversation ID."},{"displayName":"Prompt","name":"promptConfig","type":"fixedCollection","description":"Configure the reusable prompt template configured via OpenAI Dashboard. <a href=\"https://platform.openai.com/docs/guides/prompt-engineering#reusable-prompts\">Learn more</a>.","default":{"promptOptions":[{"promptId":""}]},"options":[{"displayName":"Prompt","name":"promptOptions","values":[{"displayName":"Prompt ID","name":"promptId","type":"string","default":"","description":"The unique identifier of the prompt template to use"},{"displayName":"Version","name":"version","type":"string","default":"","description":"Optional version of the prompt template"},{"displayName":"Variables","name":"variables","type":"json","default":"{}","description":"Variables to be substituted into the prompt template"}]}]},{"displayName":"Prompt Cache Key","name":"promptCacheKey","type":"string","default":"","description":"Used by OpenAI to cache responses for similar requests to optimize your cache hit rates"},{"displayName":"Reasoning","name":"reasoning","type":"fixedCollection","default":{"reasoningOptions":[{"effort":"medium","summary":"none"}]},"options":[{"displayName":"Reasoning","name":"reasoningOptions","values":[{"displayName":"Effort","name":"effort","type":"options","default":"medium","options":[{"name":"Low","value":"low"},{"name":"Medium","value":"medium"},{"name":"High","value":"high"}]},{"displayName":"Summary","name":"summary","type":"options","default":"auto","description":"A summary of the reasoning performed by the model. This can be useful for debugging and understanding the model's reasoning process.","options":[{"name":"None","value":"none"},{"name":"Auto","value":"auto"},{"name":"Concise","value":"concise"},{"name":"Detailed","value":"detailed"}]}]}]},{"displayName":"Safety Identifier","name":"safetyIdentifier","type":"string","default":"","description":"A stable identifier used to help detect users of your application that may be violating OpenAI's usage policies. The IDs should be a string that uniquely identifies each user."},{"displayName":"Service Tier","name":"serviceTier","type":"options","default":"auto","description":"The service tier to use for the request","options":[{"name":"Auto","value":"auto"},{"name":"Flex","value":"flex"},{"name":"Default","value":"default"},{"name":"Priority","value":"priority"}]},{"displayName":"Store","name":"store","type":"boolean","default":true,"description":"Whether to store the generated model response for later retrieval via API"},{"displayName":"Output Format","name":"textFormat","type":"fixedCollection","default":{"textOptions":[{"type":"text"}]},"options":[{"displayName":"Text","name":"textOptions","values":[{"displayName":"Type","name":"type","type":"options","default":"","options":[{"name":"Text","value":"text"},{"name":"JSON Schema (recommended)","value":"json_schema"},{"name":"JSON Object","value":"json_object"}]},{"displayName":"Verbosity","name":"verbosity","type":"options","default":"medium","options":[{"name":"Low","value":"low"},{"name":"Medium","value":"medium"},{"name":"High","value":"high"}]},{"displayName":"Name","name":"name","type":"string","default":"my_schema","description":"The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length of 64.","displayOptions":{"show":{"type":["json_schema"]}}},{"displayName":"All properties in the schema must be set to \"required\", when using \"strict\" mode.","name":"requiredNotice","type":"notice","default":"","displayOptions":{"show":{"strict":[true]}}},{"displayName":"Schema","name":"schema","type":"json","default":"{\n \"type\": \"object\",\n \"properties\": {\n \"message\": {\n \"type\": \"string\"\n }\n },\n \"additionalProperties\": false,\n \"required\": [\"message\"]\n}","description":"The schema of the response format","displayOptions":{"show":{"type":["json_schema"]}}},{"displayName":"Description","name":"description","type":"string","default":"","description":"The description of the response format","displayOptions":{"show":{"type":["json_schema"]}}},{"displayName":"Strict","name":"strict","type":"boolean","default":false,"description":"Whether to require that the AI will always generate responses that match the provided JSON Schema","displayOptions":{"show":{"type":["json_schema"]}}}]}]},{"displayName":"Top Logprobs","name":"topLogprobs","type":"number","default":0,"description":"An integer between 0 and 20 specifying the number of most likely tokens to return at each token position, each with an associated log probability","typeOptions":{"minValue":0,"maxValue":20}},{"displayName":"Output Randomness (Temperature)","name":"temperature","type":"number","default":1,"description":"What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. We generally recommend altering this or top_p but not both","typeOptions":{"minValue":0,"maxValue":2,"numberPrecision":1}},{"displayName":"Output Randomness (Top P)","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"An alternative to sampling with temperature, controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"},{"displayName":"Truncation","name":"truncation","type":"boolean","default":false,"description":"Whether to truncate the input to the model's context window size. When disabled will throw a 400 error instead."},{"displayName":"Background Mode","name":"backgroundMode","type":"fixedCollection","default":{"values":[{"backgroundMode":true}]},"options":[{"displayName":"Bakground","name":"values","values":[{"displayName":"Background Mode","name":"enabled","type":"boolean","default":false,"description":"Whether to run the model in background mode. If true, the model will run in background mode."},{"displayName":"Timeout","name":"timeout","type":"number","default":300,"description":"The timeout for the background mode in seconds. If 0, the timeout is infinite.","typeOptions":{"minValue":0,"maxValue":3600}}]}]}],"displayOptions":{"show":{"operation":["response"],"resource":["text"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Create","value":"create","action":"Create a conversation","description":"Create a conversation"},{"name":"Get","value":"get","action":"Get a conversation","description":"Get a conversation"},{"name":"Remove","value":"remove","action":"Remove a conversation","description":"Remove a conversation"},{"name":"Update","value":"update","action":"Update a conversation","description":"Update a conversation"}],"default":"create","displayOptions":{"show":{"resource":["conversation"]}}},{"displayName":"Messages","name":"messages","type":"fixedCollection","typeOptions":{"sortable":true,"multipleValues":true},"placeholder":"Add Message","default":{"values":[{"type":"text"}]},"options":[{"displayName":"Message","name":"values","values":[{"displayName":"Role","name":"role","type":"options","description":"Role in shaping the model's response, it tells the model how it should behave and interact with the user","options":[{"name":"User","value":"user","description":"Send a message as a user and get a response from the model"},{"name":"Assistant","value":"assistant","description":"Tell the model to adopt a specific tone or personality"},{"name":"System","value":"system","description":"Usually used to set the model's behavior or context for the next user message"}],"default":"user"},{"displayName":"Prompt","name":"content","type":"string","description":"The content of the message to be send","default":"","placeholder":"e.g. Hello, how can you help me?","typeOptions":{"rows":2},"displayOptions":{}}]}],"displayOptions":{"show":{"operation":["create"],"resource":["conversation"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Metadata","name":"metadata","type":"json","description":"Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format, and querying for objects via API or the dashboard. Keys are strings with a maximum length of 64 characters. Values are strings with a maximum length of 512 characters.","default":"{}"}],"displayOptions":{"show":{"operation":["create"],"resource":["conversation"]}}},{"displayName":"Conversation ID","name":"conversationId","type":"string","default":"","placeholder":"conv_1234567890","description":"The ID of the conversation to delete","required":true,"displayOptions":{"show":{"operation":["remove"],"resource":["conversation"]}}},{"displayName":"Conversation ID","name":"conversationId","type":"string","default":"","placeholder":"conv_1234567890","description":"The ID of the conversation to update","required":true,"displayOptions":{"show":{"operation":["update"],"resource":["conversation"]}}},{"displayName":"Metadata","name":"metadata","type":"json","description":"Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format, and querying for objects via API or the dashboard. Keys are strings with a maximum length of 64 characters. Values are strings with a maximum length of 512 characters.","default":"{}","required":true,"displayOptions":{"show":{"operation":["update"],"resource":["conversation"]}}},{"displayName":"Conversation ID","name":"conversationId","type":"string","default":"","placeholder":"conv_1234567890","description":"The ID of the conversation to retrieve","required":true,"displayOptions":{"show":{"operation":["get"],"resource":["conversation"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Generate","value":"generate","action":"Generate a video","description":"Creates a video from a text prompt"}],"default":"generate","displayOptions":{"show":{"resource":["video"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"videoModelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. gpt-4"}],"displayOptions":{"show":{"operation":["generate"],"resource":["video"]}}},{"displayName":"Prompt","name":"prompt","type":"string","default":"A video of a cat playing with a ball","description":"The prompt to generate a video from","required":true,"typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["generate"],"resource":["video"]}}},{"displayName":"Seconds","name":"seconds","type":"number","default":4,"description":"Clip duration in seconds","required":true,"displayOptions":{"show":{"operation":["generate"],"resource":["video"]}}},{"displayName":"Size","name":"size","type":"options","default":"1280x720","description":"Output resolution formatted as width x height. 1024x1792 and 1792x1024 are only supported by Sora 2 Pro.","options":[{"name":"720x1280","value":"720x1280"},{"name":"1280x720","value":"1280x720"},{"name":"1024x1792","value":"1024x1792"},{"name":"1792x1024","value":"1792x1024"}],"displayOptions":{"show":{"operation":["generate"],"resource":["video"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Reference","description":"Optional image reference that guides generation","name":"binaryPropertyNameReference","type":"string","default":"data","placeholder":"e.g. data"},{"displayName":"Wait Timeout","name":"waitTime","type":"number","default":300,"description":"Time to wait for the video to be generated in seconds","typeOptions":{"minValue":5,"maxValue":7200}},{"displayName":"Output Field Name","name":"fileName","type":"string","default":"data","hint":"The name of the output field to put the binary file data in"}],"displayOptions":{"show":{"operation":["generate"],"resource":["video"]}}}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vendors/OpenAi/openAi.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vendors/OpenAi/openAi.dark.svg"}},
|
|
9
|
+
{"displayName":"OpenAI","name":"openAi","group":["transform"],"defaultVersion":2.3,"subtitle":"={{((resource, operation) => {\n if (operation === 'deleteAssistant') {\n return 'Delete Assistant';\n }\n if (operation === 'deleteFile') {\n return 'Delete File';\n }\n if (operation === 'classify') {\n return 'Classify Text';\n }\n if (operation === 'message' && resource === 'text') {\n return 'Message Model';\n }\n const capitalize = (str) => {\n const chars = str.split('');\n chars[0] = chars[0].toUpperCase();\n return chars.join('');\n };\n if (['transcribe', 'translate'].includes(operation)) {\n resource = 'recording';\n }\n if (operation === 'list') {\n resource = resource + 's';\n }\n return `${capitalize(operation)} ${capitalize(resource)}`;\n})($parameter.resource, $parameter.operation)}}","description":"Message an assistant or GPT, analyze images, generate audio, etc.","codex":{"alias":["LangChain","ChatGPT","Sora","DallE","whisper","audio","transcribe","tts","assistant"],"categories":["AI"],"subcategories":{"AI":["Agents","Miscellaneous","Root Nodes"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/app-nodes/n8n-nodes-langchain.openai/"}]}},"builderHint":{"message":"For text generation, reasoning and tools, use AI Agent with OpenAI Chat Model. This OpenAI node is for specialized operations: image generation (DALL-E), audio (Whisper, TTS), and video generation (Sora).","relatedNodes":[{"nodeType":"@n8n/n8n-nodes-langchain.agent","relationHint":"Prefer for most LLM tasks"},{"nodeType":"@n8n/n8n-nodes-langchain.lmChatOpenAi","relationHint":"Prefer for most LLM tasks"}]},"version":[1,1.1,1.2,1.3,1.4,1.5,1.6,1.7,1.8],"defaults":{"name":"OpenAI"},"inputs":"={{((resource, operation, hideTools, memory) => {\n if (resource === 'assistant' && operation === 'message') {\n const inputs = [\n { type: 'main' },\n { type: 'ai_tool', displayName: 'Tools' },\n ];\n if (memory !== 'threadId') {\n inputs.push({ type: 'ai_memory', displayName: 'Memory', maxConnections: 1 });\n }\n return inputs;\n }\n if (resource === 'text' && (operation === 'message' || operation === 'response')) {\n if (hideTools === 'hide') {\n return ['main'];\n }\n return [{ type: 'main' }, { type: 'ai_tool', displayName: 'Tools' }];\n }\n return ['main'];\n})($parameter.resource, $parameter.operation, $parameter.hideTools, $parameter.memory ?? undefined)}}","outputs":["main"],"credentials":[{"name":"openAiApi","required":true}],"properties":[{"displayName":"Resource","name":"resource","type":"options","noDataExpression":true,"options":[{"name":"Assistant","value":"assistant"},{"name":"Text","value":"text"},{"name":"Image","value":"image"},{"name":"Audio","value":"audio"},{"name":"File","value":"file"}],"default":"text"},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Create an Assistant","value":"create","action":"Create an assistant","description":"Create a new assistant"},{"name":"Delete an Assistant","value":"deleteAssistant","action":"Delete an assistant","description":"Delete an assistant from the account"},{"name":"List Assistants","value":"list","action":"List assistants","description":"List assistants in the organization"},{"name":"Message an Assistant","value":"message","action":"Message an assistant","description":"Send messages to an assistant"},{"name":"Update an Assistant","value":"update","action":"Update an assistant","description":"Update an existing assistant"}],"default":"message","displayOptions":{"show":{"resource":["assistant"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"modelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. gpt-4"}],"displayOptions":{"show":{"operation":["create"],"resource":["assistant"]}}},{"displayName":"Name","name":"name","type":"string","default":"","description":"The name of the assistant. The maximum length is 256 characters.","placeholder":"e.g. My Assistant","required":true,"displayOptions":{"show":{"operation":["create"],"resource":["assistant"]}}},{"displayName":"Description","name":"description","type":"string","default":"","description":"The description of the assistant. The maximum length is 512 characters.","placeholder":"e.g. My personal assistant","displayOptions":{"show":{"operation":["create"],"resource":["assistant"]}}},{"displayName":"Instructions","name":"instructions","type":"string","description":"The system instructions that the assistant uses. The maximum length is 32768 characters.","default":"","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["create"],"resource":["assistant"]}}},{"displayName":"Code Interpreter","name":"codeInterpreter","type":"boolean","default":false,"description":"Whether to enable the code interpreter that allows the assistants to write and run Python code in a sandboxed execution environment, find more <a href=\"https://platform.openai.com/docs/assistants/tools/code-interpreter\" target=\"_blank\">here</a>","displayOptions":{"show":{"operation":["create"],"resource":["assistant"]}}},{"displayName":"Knowledge Retrieval","name":"knowledgeRetrieval","type":"boolean","default":false,"description":"Whether to augments the assistant with knowledge from outside its model, such as proprietary product information or documents, find more <a href=\"https://platform.openai.com/docs/assistants/tools/knowledge-retrieval\" target=\"_blank\">here</a>","displayOptions":{"show":{"operation":["create"],"resource":["assistant"]}}},{"displayName":"Files","name":"file_ids","type":"multiOptions","description":"The files to be used by the assistant, there can be a maximum of 20 files attached to the assistant. You can use expression to pass file IDs as an array or comma-separated string.","typeOptions":{"loadOptionsMethod":"getFiles"},"default":[],"hint":"Add more files by using the 'Upload a File' operation","displayOptions":{"show":{"codeInterpreter":[true],"operation":["create"],"resource":["assistant"]},"hide":{"knowledgeRetrieval":[true]}}},{"displayName":"Files","name":"file_ids","type":"multiOptions","description":"The files to be used by the assistant, there can be a maximum of 20 files attached to the assistant","typeOptions":{"loadOptionsMethod":"getFiles"},"default":[],"hint":"Add more files by using the 'Upload a File' operation","displayOptions":{"show":{"knowledgeRetrieval":[true],"operation":["create"],"resource":["assistant"]},"hide":{"codeInterpreter":[true]}}},{"displayName":"Files","name":"file_ids","type":"multiOptions","description":"The files to be used by the assistant, there can be a maximum of 20 files attached to the assistant","typeOptions":{"loadOptionsMethod":"getFiles"},"default":[],"hint":"Add more files by using the 'Upload a File' operation","displayOptions":{"show":{"knowledgeRetrieval":[true],"codeInterpreter":[true],"operation":["create"],"resource":["assistant"]}}},{"displayName":"Add custom n8n tools when you <i>message</i> your assistant (rather than when creating it)","name":"noticeTools","type":"notice","default":"","displayOptions":{"show":{"operation":["create"],"resource":["assistant"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Output Randomness (Temperature)","name":"temperature","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive. We generally recommend altering this or temperature but not both.","type":"number"},{"displayName":"Output Randomness (Top P)","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"An alternative to sampling with temperature, controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"},{"displayName":"Fail if Assistant Already Exists","name":"failIfExists","type":"boolean","default":false,"description":"Whether to fail an operation if the assistant with the same name already exists"}],"displayOptions":{"show":{"operation":["create"],"resource":["assistant"]}}},{"displayName":"Assistant","name":"assistantId","type":"resourceLocator","description":"Assistant to respond to the message. You can add, modify or remove assistants in the <a href=\"https://platform.openai.com/playground?mode=assistant\" target=\"_blank\">playground</a>.","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"assistantSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. asst_abc123"}],"displayOptions":{"show":{"operation":["deleteAssistant"],"resource":["assistant"]}}},{"displayName":"Assistant","name":"assistantId","type":"resourceLocator","description":"Assistant to respond to the message. You can add, modify or remove assistants in the <a href=\"https://platform.openai.com/playground?mode=assistant\" target=\"_blank\">playground</a>.","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"assistantSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. asst_abc123"}],"displayOptions":{"show":{"operation":["message"],"resource":["assistant"]}}},{"displayName":"Source for Prompt (User Message)","name":"prompt","type":"options","options":[{"name":"Connected Chat Trigger Node","value":"auto","description":"Looks for an input field called 'chatInput' that is coming from a directly connected Chat Trigger"},{"name":"Connected Guardrails Node","value":"guardrails","description":"Looks for an input field called 'guardrailsInput' that is coming from a directly connected Guardrails Node"},{"name":"Define below","value":"define","description":"Use an expression to reference data in previous nodes or enter static text"}],"default":"auto","displayOptions":{"show":{"operation":["message"],"resource":["assistant"]}}},{"displayName":"Prompt (User Message)","name":"text","type":"string","default":"","placeholder":"e.g. Hello, how can you help me?","typeOptions":{"rows":2},"displayOptions":{"show":{"prompt":["define"],"operation":["message"],"resource":["assistant"]}}},{"displayName":"Memory","name":"memory","type":"options","options":[{"name":"Use memory connector","value":"connector","description":"Connect one of the supported memory nodes"},{"name":"Use thread ID","value":"threadId","description":"Specify the ID of the thread to continue"}],"displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.6}}],"operation":["message"],"resource":["assistant"]}},"default":"connector"},{"displayName":"Thread ID","name":"threadId","type":"string","default":"","placeholder":"","description":"The ID of the thread to continue, a new thread will be created if not specified","hint":"If the thread ID is empty or undefined a new thread will be created and included in the response","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.6}}],"memory":["threadId"],"operation":["message"],"resource":["assistant"]}}},{"displayName":"Connect your own custom n8n tools to this node on the canvas","name":"noticeTools","type":"notice","default":"","displayOptions":{"show":{"operation":["message"],"resource":["assistant"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Base URL","name":"baseURL","default":"https://api.openai.com/v1","description":"Override the default base URL for the API","type":"string","displayOptions":{"hide":{"@version":[{"_cnd":{"gte":1.8}}]}}},{"displayName":"Max Retries","name":"maxRetries","default":2,"description":"Maximum number of retries to attempt","type":"number"},{"displayName":"Timeout","name":"timeout","default":10000,"description":"Maximum amount of time a request is allowed to take in milliseconds","type":"number"},{"displayName":"Preserve Original Tools","name":"preserveOriginalTools","type":"boolean","default":true,"description":"Whether to preserve the original tools of the assistant after the execution of this node, otherwise the tools will be replaced with the connected tools, if any, default is true","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.3}}]}}}],"displayOptions":{"show":{"operation":["message"],"resource":["assistant"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to return a simplified version of the response instead of the raw data","displayOptions":{"show":{"operation":["list"],"resource":["assistant"]}}},{"displayName":"Assistant","name":"assistantId","type":"resourceLocator","description":"Assistant to respond to the message. You can add, modify or remove assistants in the <a href=\"https://platform.openai.com/playground?mode=assistant\" target=\"_blank\">playground</a>.","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"assistantSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. asst_abc123"}],"displayOptions":{"show":{"operation":["update"],"resource":["assistant"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Code Interpreter","name":"codeInterpreter","type":"boolean","default":false,"description":"Whether to enable the code interpreter that allows the assistants to write and run Python code in a sandboxed execution environment, find more <a href=\"https://platform.openai.com/docs/assistants/tools/code-interpreter\" target=\"_blank\">here</a>"},{"displayName":"Description","name":"description","type":"string","default":"","description":"The description of the assistant. The maximum length is 512 characters.","placeholder":"e.g. My personal assistant"},{"displayName":"Files","name":"file_ids","type":"multiOptions","description":"The files to be used by the assistant, there can be a maximum of 20 files attached to the assistant. You can use expression to pass file IDs as an array or comma-separated string.","typeOptions":{"loadOptionsMethod":"getFiles"},"default":[],"hint":"Add more files by using the 'Upload a File' operation, any existing files not selected here will be removed."},{"displayName":"Instructions","name":"instructions","type":"string","description":"The system instructions that the assistant uses. The maximum length is 32768 characters.","default":"","typeOptions":{"rows":2}},{"displayName":"Knowledge Retrieval","name":"knowledgeRetrieval","type":"boolean","default":false,"description":"Whether to augments the assistant with knowledge from outside its model, such as proprietary product information or documents, find more <a href=\"https://platform.openai.com/docs/assistants/tools/knowledge-retrieval\" target=\"_blank\">here</a>"},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":false,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"modelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. gpt-4"}]},{"displayName":"Name","name":"name","type":"string","default":"","description":"The name of the assistant. The maximum length is 256 characters.","placeholder":"e.g. My Assistant"},{"displayName":"Remove All Custom Tools (Functions)","name":"removeCustomTools","type":"boolean","default":false,"description":"Whether to remove all custom tools (functions) from the assistant"},{"displayName":"Output Randomness (Temperature)","name":"temperature","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive. We generally recommend altering this or temperature but not both.","type":"number"},{"displayName":"Output Randomness (Top P)","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"An alternative to sampling with temperature, controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"}],"displayOptions":{"show":{"operation":["update"],"resource":["assistant"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Generate Audio","value":"generate","action":"Generate audio","description":"Creates audio from a text prompt"},{"name":"Transcribe a Recording","value":"transcribe","action":"Transcribe a recording","description":"Transcribes audio into text"},{"name":"Translate a Recording","value":"translate","action":"Translate a recording","description":"Translates audio into text in English"}],"default":"generate","displayOptions":{"show":{"resource":["audio"]}}},{"displayName":"OpenAI API limits the size of the audio file to 25 MB","name":"fileSizeLimitNotice","type":"notice","default":" ","displayOptions":{"show":{"resource":["audio"],"operation":["translate","transcribe"]}}},{"displayName":"Model","name":"model","type":"options","default":"tts-1","options":[{"name":"TTS-1","value":"tts-1"},{"name":"TTS-1-HD","value":"tts-1-hd"}],"displayOptions":{"show":{"operation":["generate"],"resource":["audio"]}}},{"displayName":"Text Input","name":"input","type":"string","placeholder":"e.g. The quick brown fox jumped over the lazy dog","description":"The text to generate audio for. The maximum length is 4096 characters.","default":"","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["generate"],"resource":["audio"]}}},{"displayName":"Voice","name":"voice","type":"options","default":"alloy","description":"The voice to use when generating the audio","options":[{"name":"Alloy","value":"alloy"},{"name":"Echo","value":"echo"},{"name":"Fable","value":"fable"},{"name":"Nova","value":"nova"},{"name":"Onyx","value":"onyx"},{"name":"Shimmer","value":"shimmer"}],"displayOptions":{"show":{"operation":["generate"],"resource":["audio"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Response Format","name":"response_format","type":"options","default":"mp3","options":[{"name":"MP3","value":"mp3"},{"name":"OPUS","value":"opus"},{"name":"AAC","value":"aac"},{"name":"FLAC","value":"flac"}]},{"displayName":"Audio Speed","name":"speed","type":"number","default":1,"typeOptions":{"minValue":0.25,"maxValue":4,"numberPrecision":1}},{"displayName":"Put Output in Field","name":"binaryPropertyOutput","type":"string","default":"data","hint":"The name of the output field to put the binary file data in"}],"displayOptions":{"show":{"operation":["generate"],"resource":["audio"]}}},{"displayName":"Input Data Field Name","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary property which contains the audio file in one of these formats: flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm","displayOptions":{"show":{"operation":["transcribe"],"resource":["audio"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Language of the Audio File","name":"language","type":"string","description":"The language of the input audio. Supplying the input language in <a href=\"https://en.wikipedia.org/wiki/List_of_ISO_639_language_codes\" target=\"_blank\">ISO-639-1</a> format will improve accuracy and latency.","default":""},{"displayName":"Output Randomness (Temperature)","name":"temperature","type":"number","default":0,"typeOptions":{"minValue":0,"maxValue":1,"numberPrecision":1}}],"displayOptions":{"show":{"operation":["transcribe"],"resource":["audio"]}}},{"displayName":"Input Data Field Name","name":"binaryPropertyName","type":"string","default":"data","hint":"The name of the input field containing the binary file data to be processed","placeholder":"e.g. data","description":"Name of the binary property which contains the audio file in one of these formats: flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm","displayOptions":{"show":{"operation":["translate"],"resource":["audio"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Output Randomness (Temperature)","name":"temperature","type":"number","default":0,"typeOptions":{"minValue":0,"maxValue":1,"numberPrecision":1}}],"displayOptions":{"show":{"operation":["translate"],"resource":["audio"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Delete a File","value":"deleteFile","action":"Delete a file","description":"Delete a file from the server"},{"name":"List Files","value":"list","action":"List files","description":"Returns a list of files that belong to the user's organization"},{"name":"Upload a File","value":"upload","action":"Upload a file","description":"Upload a file that can be used across various endpoints"}],"default":"upload","displayOptions":{"show":{"resource":["file"]}}},{"displayName":"Input Data Field Name","name":"binaryPropertyName","type":"string","default":"data","hint":"The name of the input field containing the binary file data to be processed","placeholder":"e.g. data","description":"Name of the binary property which contains the file. The size of individual files can be a maximum of 512 MB or 2 million tokens for Assistants.","displayOptions":{"show":{"operation":["upload"],"resource":["file"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Purpose","name":"purpose","type":"options","default":"assistants","description":"The intended purpose of the uploaded file, the 'Fine-tuning' only supports .jsonl files","options":[{"name":"Assistants","value":"assistants"},{"name":"Fine-Tune","value":"fine-tune"}]}],"displayOptions":{"show":{"operation":["upload"],"resource":["file"]}}},{"displayName":"File","name":"fileId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"fileSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","validation":[{"type":"regex","properties":{"regex":"file-[a-zA-Z0-9]","errorMessage":"Not a valid File ID"}}],"placeholder":"e.g. file-1234567890"}],"displayOptions":{"show":{"operation":["deleteFile"],"resource":["file"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Purpose","name":"purpose","type":"options","default":"any","description":"Only return files with the given purpose","options":[{"name":"Any [Default]","value":"any"},{"name":"Assistants","value":"assistants"},{"name":"Fine-Tune","value":"fine-tune"}]}],"displayOptions":{"show":{"operation":["list"],"resource":["file"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Analyze Image","value":"analyze","action":"Analyze image","description":"Take in images and answer questions about them"},{"name":"Generate an Image","value":"generate","action":"Generate an image","description":"Creates an image from a text prompt"}],"default":"generate","displayOptions":{"show":{"resource":["image"]}}},{"displayName":"Model","name":"model","type":"options","default":"dall-e-3","description":"The model to use for image generation","options":[{"name":"DALL·E 2","value":"dall-e-2"},{"name":"DALL·E 3","value":"dall-e-3"},{"name":"GPT Image 1","value":"gpt-image-1"}],"displayOptions":{"show":{"operation":["generate"],"resource":["image"]}}},{"displayName":"Prompt","name":"prompt","type":"string","placeholder":"e.g. A cute cat eating a dinosaur","description":"A text description of the desired image(s). The maximum length is 1000 characters for dall-e-2 and 4000 characters for dall-e-3.","default":"","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["generate"],"resource":["image"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Number of Images","name":"n","default":1,"description":"Number of images to generate","type":"number","typeOptions":{"minValue":1,"maxValue":10},"displayOptions":{"show":{"/model":["dall-e-2"]}}},{"displayName":"Quality","name":"dalleQuality","type":"options","description":"The quality of the image that will be generated, HD creates images with finer details and greater consistency across the image","options":[{"name":"HD","value":"hd"},{"name":"Standard","value":"standard"}],"displayOptions":{"show":{"/model":["dall-e-3"]}},"default":"standard"},{"displayName":"Quality","name":"quality","type":"options","description":"The quality of the image that will be generated, High creates images with finer details and greater consistency across the image","options":[{"name":"High","value":"high"},{"name":"Medium","value":"medium"},{"name":"Low","value":"low"}],"displayOptions":{"show":{"/model":["gpt-image-1"]}},"default":"medium"},{"displayName":"Resolution","name":"size","type":"options","options":[{"name":"256x256","value":"256x256"},{"name":"512x512","value":"512x512"},{"name":"1024x1024","value":"1024x1024"}],"displayOptions":{"show":{"/model":["dall-e-2"]}},"default":"1024x1024"},{"displayName":"Resolution","name":"size","type":"options","options":[{"name":"1024x1024","value":"1024x1024"},{"name":"1792x1024","value":"1792x1024"},{"name":"1024x1792","value":"1024x1792"}],"displayOptions":{"show":{"/model":["dall-e-3"]}},"default":"1024x1024"},{"displayName":"Resolution","name":"size","type":"options","options":[{"name":"1024x1024","value":"1024x1024"},{"name":"1024x1536","value":"1024x1536"},{"name":"1536x1024","value":"1536x1024"}],"displayOptions":{"show":{"/model":["gpt-image-1"]}},"default":"1024x1024"},{"displayName":"Style","name":"style","type":"options","options":[{"name":"Natural","value":"natural","description":"Produce more natural looking images"},{"name":"Vivid","value":"vivid","description":"Lean towards generating hyper-real and dramatic images"}],"displayOptions":{"show":{"/model":["dall-e-3"]}},"default":"vivid"},{"displayName":"Respond with Image URL(s)","name":"returnImageUrls","type":"boolean","default":false,"description":"Whether to return image URL(s) instead of binary file(s)","displayOptions":{"hide":{"/model":["gpt-image-1"]}}},{"displayName":"Put Output in Field","name":"binaryPropertyOutput","type":"string","default":"data","hint":"The name of the output field to put the binary file data in","displayOptions":{"show":{"returnImageUrls":[false]}}}],"displayOptions":{"show":{"operation":["generate"],"resource":["image"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"imageModelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. gpt-4"}],"displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.4}}],"operation":["analyze"],"resource":["image"]}}},{"displayName":"Text Input","name":"text","type":"string","placeholder":"e.g. What's in this image?","default":"What's in this image?","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Input Type","name":"inputType","type":"options","default":"url","options":[{"name":"Image URL(s)","value":"url"},{"name":"Binary File(s)","value":"base64"}],"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"URL(s)","name":"imageUrls","type":"string","placeholder":"e.g. https://example.com/image.jpeg","description":"URL(s) of the image(s) to analyze, multiple URLs can be added separated by comma","default":"","displayOptions":{"show":{"inputType":["url"],"operation":["analyze"],"resource":["image"]}}},{"displayName":"Input Data Field Name","name":"binaryPropertyName","type":"string","default":"data","placeholder":"e.g. data","hint":"The name of the input field containing the binary file data to be processed","description":"Name of the binary property which contains the image(s)","displayOptions":{"show":{"inputType":["base64"],"operation":["analyze"],"resource":["image"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to simplify the response or not","displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Detail","name":"detail","type":"options","default":"auto","options":[{"name":"Auto","value":"auto","description":"Model will look at the image input size and decide if it should use the low or high setting"},{"name":"Low","value":"low","description":"Return faster responses and consume fewer tokens"},{"name":"High","value":"high","description":"Return more detailed responses, consumes more tokens"}]},{"displayName":"Length of Description (Max Tokens)","description":"Fewer tokens will result in shorter, less detailed image description","name":"maxTokens","type":"number","default":300,"typeOptions":{"minValue":1}}],"displayOptions":{"show":{"operation":["analyze"],"resource":["image"]}}},{"displayName":"Operation","name":"operation","type":"options","noDataExpression":true,"options":[{"name":"Message a Model","value":"message","action":"Message a model","description":"Create a completion with GPT 3, 4, etc."},{"name":"Classify Text for Violations","value":"classify","action":"Classify text for violations","description":"Check whether content complies with usage policies"}],"default":"message","displayOptions":{"show":{"resource":["text"]}}},{"displayName":"Text Input","name":"input","type":"string","placeholder":"e.g. Sample text goes here","description":"The input text to classify if it is violates the moderation policy","default":"","typeOptions":{"rows":2},"displayOptions":{"show":{"operation":["classify"],"resource":["text"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":false,"description":"Whether to return a simplified version of the response instead of the raw data","displayOptions":{"show":{"operation":["classify"],"resource":["text"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Use Stable Model","name":"useStableModel","type":"boolean","default":false,"description":"Whether to use the stable version of the model instead of the latest version, accuracy may be slightly lower"}],"displayOptions":{"show":{"operation":["classify"],"resource":["text"]}}},{"displayName":"Model","name":"modelId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"modelSearch","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"e.g. gpt-4"}],"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Messages","name":"messages","type":"fixedCollection","typeOptions":{"sortable":true,"multipleValues":true},"placeholder":"Add Message","default":{"values":[{"content":""}]},"options":[{"displayName":"Values","name":"values","values":[{"displayName":"Prompt","name":"content","type":"string","description":"The content of the message to be send","default":"","placeholder":"e.g. Hello, how can you help me?","typeOptions":{"rows":2}},{"displayName":"Role","name":"role","type":"options","description":"Role in shaping the model's response, it tells the model how it should behave and interact with the user","options":[{"name":"User","value":"user","description":"Send a message as a user and get a response from the model"},{"name":"Assistant","value":"assistant","description":"Tell the model to adopt a specific tone or personality"},{"name":"System","value":"system","description":"Usually used to set the model's behavior or context for the next user message"}],"default":"user"}]}],"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Simplify Output","name":"simplify","type":"boolean","default":true,"description":"Whether to return a simplified version of the response instead of the raw data","displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Output Content as JSON","name":"jsonOutput","type":"boolean","description":"Whether to attempt to return the response in JSON format. Compatible with GPT-4 Turbo and all GPT-3.5 Turbo models newer than gpt-3.5-turbo-1106.","default":false,"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Hide Tools","name":"hideTools","type":"hidden","default":"hide","displayOptions":{"show":{"modelId":["gpt-3.5-turbo-16k-0613","dall-e-3","text-embedding-3-large","dall-e-2","whisper-1","tts-1-hd-1106","tts-1-hd","gpt-4-0314","text-embedding-3-small","gpt-4-32k-0314","gpt-3.5-turbo-0301","gpt-4-vision-preview","gpt-3.5-turbo-16k","gpt-3.5-turbo-instruct-0914","tts-1","davinci-002","gpt-3.5-turbo-instruct","babbage-002","tts-1-1106","text-embedding-ada-002"],"@version":[{"_cnd":{"gte":1.2}}],"operation":["message"],"resource":["text"]}}},{"displayName":"Connect your own custom n8n tools to this node on the canvas","name":"noticeTools","type":"notice","default":"","displayOptions":{"hide":{"hideTools":["hide"]},"show":{"operation":["message"],"resource":["text"]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","type":"collection","default":{},"options":[{"displayName":"Frequency Penalty","name":"frequency_penalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim","type":"number"},{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":16,"description":"The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).","type":"number","typeOptions":{"maxValue":32768}},{"displayName":"Number of Completions","name":"n","default":1,"description":"How many completions to generate for each prompt. Note: Because this parameter generates many completions, it can quickly consume your token quota. Use carefully and ensure that you have reasonable settings for max_tokens and stop.","type":"number"},{"displayName":"Presence Penalty","name":"presence_penalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics","type":"number"},{"displayName":"Output Randomness (Temperature)","name":"temperature","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive. We generally recommend altering this or temperature but not both.","type":"number"},{"displayName":"Output Randomness (Top P)","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"An alternative to sampling with temperature, controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"},{"displayName":"Reasoning Effort","name":"reasoning_effort","default":"medium","description":"Controls the amount of reasoning tokens to use. A value of \"low\" will favor speed and economical token usage, \"high\" will favor more complete reasoning at the cost of more tokens generated and slower responses.","type":"options","options":[{"name":"Low","value":"low","description":"Favors speed and economical token usage"},{"name":"Medium","value":"medium","description":"Balance between speed and reasoning accuracy"},{"name":"High","value":"high","description":"Favors more complete reasoning at the cost of more tokens generated and slower responses"}],"displayOptions":{"show":{"/modelId":[{"_cnd":{"regex":"(^o1([-\\d]+)?$)|(^o[3-9].*)|(^gpt-5.*)"}}]}}},{"displayName":"Max Tool Calls Iterations","name":"maxToolsIterations","type":"number","default":15,"description":"The maximum number of tool iteration cycles the LLM will run before stopping. A single iteration can contain multiple tool calls. Set to 0 for no limit.","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.5}}]}}}],"displayOptions":{"show":{"operation":["message"],"resource":["text"]}}}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vendors/OpenAi/openAi.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vendors/OpenAi/openAi.dark.svg"}},
|
|
10
|
+
{"displayName":"Microsoft Agent 365 Trigger","name":"microsoftAgent365Trigger","group":["trigger"],"description":"Trigger for Microsoft Agent 365","codex":{"categories":["Core Nodes"],"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.microsoftAgent365Trigger/"}]}},"version":[1,1.1],"defaults":{"name":"Microsoft Agent 365"},"inputs":"={{\n\t\t\t\t((hasOutputParser, needsFallback) => {\n\t\t\t\t\tfunction getInputs(hasMainInput, hasOutputParser, needsFallback) {\n const getInputData = (inputs) => {\n return inputs.map(({ type, filter, displayName, required }) => {\n const input = {\n type,\n displayName,\n required,\n maxConnections: ['ai_languageModel', 'ai_memory', 'ai_outputParser'].includes(type)\n ? 1\n : undefined,\n };\n if (filter) {\n input.filter = filter;\n }\n return input;\n });\n };\n let specialInputs = [\n {\n type: 'ai_languageModel',\n displayName: 'Chat Model',\n required: true,\n filter: {\n excludedNodes: [\n '@n8n/n8n-nodes-langchain.lmCohere',\n '@n8n/n8n-nodes-langchain.lmOllama',\n 'n8n/n8n-nodes-langchain.lmOpenAi',\n '@n8n/n8n-nodes-langchain.lmOpenHuggingFaceInference',\n ],\n },\n },\n {\n type: 'ai_languageModel',\n displayName: 'Fallback Model',\n required: true,\n filter: {\n excludedNodes: [\n '@n8n/n8n-nodes-langchain.lmCohere',\n '@n8n/n8n-nodes-langchain.lmOllama',\n 'n8n/n8n-nodes-langchain.lmOpenAi',\n '@n8n/n8n-nodes-langchain.lmOpenHuggingFaceInference',\n ],\n },\n },\n {\n displayName: 'Memory',\n type: 'ai_memory',\n },\n {\n displayName: 'Tool',\n type: 'ai_tool',\n },\n {\n displayName: 'Output Parser',\n type: 'ai_outputParser',\n },\n ];\n if (hasOutputParser === false) {\n specialInputs = specialInputs.filter((input) => input.type !== 'ai_outputParser');\n }\n if (needsFallback === false) {\n specialInputs = specialInputs.filter((input) => input.displayName !== 'Fallback Model');\n }\n const mainInputs = hasMainInput ? ['main'] : [];\n return [...mainInputs, ...getInputData(specialInputs)];\n};\n\t\t\t\t\treturn getInputs(false, hasOutputParser, needsFallback);\n\t\t\t\t})($parameter.hasOutputParser === undefined || $parameter.hasOutputParser === true, $parameter.needsFallback !== undefined && $parameter.needsFallback === true)\n\t\t\t}}","outputs":["main"],"triggerPanel":false,"webhooks":[{"name":"default","httpMethod":"POST","responseMode":"onReceived","path":"webhook","ndvHideMethod":true},{"name":"default","httpMethod":"HEAD","responseMode":"onReceived","path":"webhook","ndvHideUrl":true,"ndvHideMethod":true}],"credentials":[{"name":"microsoftAgent365Api","required":true}],"properties":[{"displayName":"This is an early preview for building Agents with Microsoft Agent 365 and n8n. You need to be part of the <a href=\"https://adoption.microsoft.com/copilot/frontier-program/\" target=\"_blank\">Frontier preview program</a> to get early access to Microsoft Agent 365. <a href=\"https://github.com/microsoft/Agent365-Samples/tree/main/nodejs/n8n/sample-agent\" target=\"_blank\">Learn more</a>","name":"previewNotice","type":"notice","default":""},{"displayName":"System Prompt","name":"systemPrompt","type":"string","placeholder":"e.g. You are a friendly assistant that helps people find a weather forecast for a given time and place.","default":"","typeOptions":{"rows":4}},{"displayName":"Connect an <a data-action='openSelectiveNodeCreator' data-action-parameter-connectiontype='ai_outputParser'>output parser</a> on the canvas to specify the output format you require","name":"notice","type":"notice","default":"","displayOptions":{"show":{"hasOutputParser":[true]}}},{"displayName":"Enable Fallback Model","name":"needsFallback","type":"boolean","default":false,"noDataExpression":true,"displayOptions":{"show":{"@version":[{"_cnd":{"gte":2.1}}]}}},{"displayName":"Connect an additional language model on the canvas to use it as a fallback if the main model fails","name":"fallbackNotice","type":"notice","default":"","displayOptions":{"show":{"needsFallback":[true]}}},{"displayName":"Enable Microsoft Work IQ Tools for A365","name":"useMcpTools","type":"boolean","default":false,"description":"Whether to allow the agent to use Microsoft MCP tools like Calendar, Email, and OneDrive to assist in completing tasks. Requires appropriate permissions in your Microsoft account."},{"displayName":"Tools to Include","name":"include","type":"options","default":"all","displayOptions":{"show":{"useMcpTools":[true]}},"options":[{"name":"All","value":"all"},{"name":"Selected","value":"selected"}]},{"displayName":"Tools to Include","name":"includeTools","type":"multiOptions","default":[],"noDataExpression":true,"options":[{"name":"Admin 365","value":"mcp_Admin365_GraphTools"},{"name":"Admin Tools","value":"mcp_AdminTools"},{"name":"Calendar","value":"mcp_CalendarTools"},{"name":"DA Search","value":"mcp_DASearch"},{"name":"Excel","value":"mcp_ExcelServer"},{"name":"Knowledge","value":"mcp_KnowledgeTools"},{"name":"M365 Copilot","value":"mcp_M365Copilot"},{"name":"Mail","value":"mcp_MailTools"},{"name":"OneDrive","value":"mcp_OneDriveRemoteServer"},{"name":"OneDrive & SharePoint","value":"mcp_ODSPRemoteServer"},{"name":"Planner","value":"mcp_PlannerServer"},{"name":"SharePoint","value":"mcp_SharePointRemoteServer"},{"name":"SharePoint Lists","value":"mcp_SharePointListsTools"},{"name":"Task Personalization","value":"mcp_TaskPersonalizationServer"},{"name":"Teams","value":"mcp_TeamsServer"},{"name":"Teams Canary","value":"mcp_TeamsCanaryServer"},{"name":"Teams V1","value":"mcp_TeamsServerV1"},{"name":"Web Search","value":"mcp_WebSearchTools"},{"name":"Windows 365 Computer Use","value":"mcp_W365ComputerUse"},{"name":"Word","value":"mcp_WordServer"}],"displayOptions":{"show":{"useMcpTools":[true],"include":["selected"]}}},{"displayName":"Require Specific Output Format","name":"hasOutputParser","type":"boolean","default":false,"noDataExpression":true},{"displayName":"Options","name":"options","type":"collection","default":{},"placeholder":"Add Option","options":[{"displayName":"Max Iterations","name":"maxIterations","type":"number","default":10,"description":"The maximum number of iterations the agent will run before stopping"},{"displayName":"Welcome Message","name":"welcomeMessage","type":"string","placeholder":"e.g. Hello! I'm here to help you!","default":"Hello! I'm here to help you!"}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vendors/Microsoft/Agent365.svg"},
|
|
11
11
|
{"displayName":"AI Agent","name":"agent","icon":"node:ai-agent","iconColor":"black","group":["transform"],"description":"Generates an action plan and executes it. Can use external tools.","codex":{"alias":["LangChain","Chat","Conversational","Plan and Execute","ReAct","Tools"],"categories":["AI"],"subcategories":{"AI":["Agents","Root Nodes"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.agent/"}]}},"defaultVersion":3.1,"builderHint":{"relatedNodes":[{"nodeType":"n8n-nodes-base.aggregate","relationHint":"Use to combine multiple items together before the agent"},{"nodeType":"@n8n/n8n-nodes-langchain.outputParserStructured","relationHint":"Attach for structured output; reference fields as $json.output.fieldName for use in subsequent nodes (conditions, storing data)"},{"nodeType":"@n8n/n8n-nodes-langchain.agentTool","relationHint":"For multi-agent systems using orchestrator pattern"},{"nodeType":"@n8n/n8n-nodes-langchain.memoryBufferWindow","relationHint":"Required for conversational workflows - connect memory to every agent that needs to recall previous messages in the conversation"}],"inputs":{"ai_languageModel":{"required":true},"ai_memory":{"required":false},"ai_tool":{"required":false},"ai_outputParser":{"required":false,"displayOptions":{"show":{"hasOutputParser":[true]}}}}},"version":[3,3.1],"defaults":{"name":"AI Agent","color":"#404040"},"inputs":"={{\n\t\t\t\t((hasOutputParser, needsFallback) => {\n\t\t\t\t\tfunction getInputs(hasMainInput, hasOutputParser, needsFallback) {\n const getInputData = (inputs) => {\n return inputs.map(({ type, filter, displayName, required }) => {\n const input = {\n type,\n displayName,\n required,\n maxConnections: ['ai_languageModel', 'ai_memory', 'ai_outputParser'].includes(type)\n ? 1\n : undefined,\n };\n if (filter) {\n input.filter = filter;\n }\n return input;\n });\n };\n let specialInputs = [\n {\n type: 'ai_languageModel',\n displayName: 'Chat Model',\n required: true,\n filter: {\n excludedNodes: [\n '@n8n/n8n-nodes-langchain.lmCohere',\n '@n8n/n8n-nodes-langchain.lmOllama',\n 'n8n/n8n-nodes-langchain.lmOpenAi',\n '@n8n/n8n-nodes-langchain.lmOpenHuggingFaceInference',\n ],\n },\n },\n {\n type: 'ai_languageModel',\n displayName: 'Fallback Model',\n required: true,\n filter: {\n excludedNodes: [\n '@n8n/n8n-nodes-langchain.lmCohere',\n '@n8n/n8n-nodes-langchain.lmOllama',\n 'n8n/n8n-nodes-langchain.lmOpenAi',\n '@n8n/n8n-nodes-langchain.lmOpenHuggingFaceInference',\n ],\n },\n },\n {\n displayName: 'Memory',\n type: 'ai_memory',\n },\n {\n displayName: 'Tool',\n type: 'ai_tool',\n },\n {\n displayName: 'Output Parser',\n type: 'ai_outputParser',\n },\n ];\n if (hasOutputParser === false) {\n specialInputs = specialInputs.filter((input) => input.type !== 'ai_outputParser');\n }\n if (needsFallback === false) {\n specialInputs = specialInputs.filter((input) => input.displayName !== 'Fallback Model');\n }\n const mainInputs = hasMainInput ? ['main'] : [];\n return [...mainInputs, ...getInputData(specialInputs)];\n};\n\t\t\t\t\treturn getInputs(true, hasOutputParser, needsFallback);\n\t\t\t\t})($parameter.hasOutputParser === undefined || $parameter.hasOutputParser === true, $parameter.needsFallback !== undefined && $parameter.needsFallback === true)\n\t\t\t}}","outputs":["main"],"properties":[{"displayName":"Tip: Get a feel for agents with our quick <a href=\"https://docs.n8n.io/advanced-ai/intro-tutorial/\" target=\"_blank\">tutorial</a> or see an <a href=\"/workflows/templates/1954\" target=\"_blank\">example</a> of how this node works","name":"aiAgentStarterCallout","type":"callout","default":""},{"displayName":"Source for Prompt (User Message)","name":"promptType","type":"options","options":[{"name":"Connected Chat Trigger Node","value":"auto","description":"Looks for an input field called 'chatInput' that is coming from a directly connected Chat Trigger"},{"name":"Connected Guardrails Node","value":"guardrails","description":"Looks for an input field called 'guardrailsInput' that is coming from a directly connected Guardrails Node"},{"name":"Define below","value":"define","description":"Use an expression to reference data in previous nodes or enter static text"}],"default":"auto","displayOptions":{"show":{"@version":[{"_cnd":{"lt":3.1}}]}}},{"displayName":"Source for Prompt (User Message)","name":"promptType","type":"options","options":[{"name":"Connected Chat Trigger Node","value":"auto","description":"Looks for an input field called 'chatInput' that is coming from a directly connected Chat Trigger"},{"name":"Define below","value":"define","description":"Use an expression to reference data in previous nodes or enter static text"}],"default":"auto","builderHint":{"message":"Use 'auto' when following a chat trigger, 'define' when custom prompt needed"},"displayOptions":{"show":{"@version":[{"_cnd":{"gte":3.1}}]}}},{"displayName":"Prompt (User Message)","name":"text","type":"string","required":true,"default":"={{ $json.guardrailsInput }}","typeOptions":{"rows":2},"disabledOptions":{"show":{"promptType":["guardrails"]}},"displayOptions":{"show":{"promptType":["guardrails"]}}},{"displayName":"Prompt (User Message)","name":"text","type":"string","required":true,"default":"={{ $json.chatInput }}","typeOptions":{"rows":2},"disabledOptions":{"show":{"promptType":["auto"]}},"displayOptions":{"show":{"promptType":["auto"]}}},{"displayName":"Prompt (User Message)","name":"text","type":"string","required":true,"default":"","placeholder":"e.g. Hello, how can you help me?","typeOptions":{"rows":2},"builderHint":{"placeholderSupported":false,"message":"Use expressions to include dynamic data from previous nodes (e.g., \"={{ $json.input }}\"). Static text prompts ignore incoming data."},"displayOptions":{"show":{"promptType":["define"]}}},{"displayName":"Require Specific Output Format","name":"hasOutputParser","type":"boolean","default":false,"noDataExpression":true},{"displayName":"Connect an <a data-action='openSelectiveNodeCreator' data-action-parameter-connectiontype='ai_outputParser'>output parser</a> on the canvas to specify the output format you require","name":"notice","type":"notice","default":"","displayOptions":{"show":{"hasOutputParser":[true]}}},{"displayName":"Enable Fallback Model","name":"needsFallback","type":"boolean","default":false,"noDataExpression":true},{"displayName":"Connect an additional language model on the canvas to use it as a fallback if the main model fails","name":"fallbackNotice","type":"notice","default":"","displayOptions":{"show":{"needsFallback":[true]}}},{"displayName":"Options","name":"options","type":"collection","default":{},"placeholder":"Add Option","options":[{"displayName":"System Message","name":"systemMessage","type":"string","default":"You are a helpful assistant","description":"The message that will be sent to the agent before the conversation starts","builderHint":{"message":"Must include: agent's purpose, exact names of connected tools, and response instructions"},"typeOptions":{"rows":6}},{"displayName":"Max Iterations","name":"maxIterations","type":"number","default":10,"description":"The maximum number of iterations the agent will run before stopping"},{"displayName":"Return Intermediate Steps","name":"returnIntermediateSteps","type":"boolean","default":false,"description":"Whether or not the output should include intermediate steps the agent took"},{"displayName":"Automatically Passthrough Binary Images","name":"passthroughBinaryImages","type":"boolean","default":true,"description":"Whether or not binary images should be automatically passed through to the agent as image type messages"},{"displayName":"Tracing Metadata","name":"tracingMetadata","type":"fixedCollection","typeOptions":{"multipleValues":true},"default":{},"placeholder":"Add Metadata","description":"Custom metadata added to tracing events","options":[{"displayName":"Metadata","name":"values","values":[{"displayName":"Key","name":"key","type":"string","default":""},{"displayName":"Type","name":"type","type":"options","description":"The field value type","options":[{"name":"Array","value":"arrayValue"},{"name":"Boolean","value":"booleanValue"},{"name":"Number","value":"numberValue"},{"name":"Object","value":"objectValue"},{"name":"String","value":"stringValue"}],"default":"stringValue"},{"displayName":"Value","name":"stringValue","type":"string","default":"","displayOptions":{"show":{"type":["stringValue"]}}},{"displayName":"Value","name":"numberValue","type":"string","default":"","displayOptions":{"show":{"type":["numberValue"]}},"validateType":"number"},{"displayName":"Value","name":"booleanValue","type":"options","default":"true","options":[{"name":"True","value":"true"},{"name":"False","value":"false"}],"displayOptions":{"show":{"type":["booleanValue"]}}},{"displayName":"Value","name":"arrayValue","type":"string","default":"","placeholder":"e.g. [ arrayItem1, arrayItem2, arrayItem3 ]","displayOptions":{"show":{"type":["arrayValue"]}},"validateType":"array"},{"displayName":"Value","name":"objectValue","type":"json","default":"={}","typeOptions":{"rows":2},"displayOptions":{"show":{"type":["objectValue"]}},"validateType":"object"}]}]},{"displayName":"Auto-save highlighted data","name":"autoSaveHighlightedData","type":"boolean","default":true,"description":"Whether to automatically save <a href=\"https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-base.executiondata/\" target=\"_blank\">highlighted data</a>. This data can then be used to filter executions in the Executions view. Available on Pro and Enterprise plans in n8n Cloud, and on Enterprise or registered Community Edition for self-hosted. Defaults to true."},{"displayName":"Enable Streaming","name":"enableStreaming","type":"boolean","default":true,"description":"Whether this agent will stream the response in real-time as it generates text"},{"displayName":"Batch Processing","name":"batching","type":"collection","placeholder":"Add Batch Processing Option","description":"Batch processing options for rate limiting","default":{},"options":[{"displayName":"Batch Size","name":"batchSize","default":1,"type":"number","description":"How many items to process in parallel. This is useful for rate limiting, but might impact the log output ordering."},{"displayName":"Delay Between Batches","name":"delayBetweenBatches","default":0,"type":"number","description":"Delay in milliseconds between batches. This is useful for rate limiting."}]},{"displayName":"Max Tokens To Read From Memory","name":"maxTokensFromMemory","type":"hidden","default":0,"description":"The maximum number of tokens to read from the chat memory history. Set to 0 to read all history."}]}],"hints":[{"message":"You are using streaming responses. Make sure to set the response mode to \"Streaming Response\" on the connected trigger node.","type":"warning","location":"outputPane","whenToDisplay":"afterExecution","displayCondition":"={{ $parameter[\"enableStreaming\"] === true }}"}]},
|
|
12
12
|
{"displayName":"AI Agent","name":"agent","icon":"node:ai-agent","iconColor":"black","group":["transform"],"description":"Generates an action plan and executes it. Can use external tools.","codex":{"alias":["LangChain","Chat","Conversational","Plan and Execute","ReAct","Tools"],"categories":["AI"],"subcategories":{"AI":["Agents","Root Nodes"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.agent/"}]}},"defaultVersion":3.1,"builderHint":{"relatedNodes":[{"nodeType":"n8n-nodes-base.aggregate","relationHint":"Use to combine multiple items together before the agent"},{"nodeType":"@n8n/n8n-nodes-langchain.outputParserStructured","relationHint":"Attach for structured output; reference fields as $json.output.fieldName for use in subsequent nodes (conditions, storing data)"},{"nodeType":"@n8n/n8n-nodes-langchain.agentTool","relationHint":"For multi-agent systems using orchestrator pattern"},{"nodeType":"@n8n/n8n-nodes-langchain.memoryBufferWindow","relationHint":"Required for conversational workflows - connect memory to every agent that needs to recall previous messages in the conversation"}],"inputs":{"ai_languageModel":{"required":true},"ai_memory":{"required":false},"ai_tool":{"required":false},"ai_outputParser":{"required":false,"displayOptions":{"show":{"hasOutputParser":[true]}}}}},"version":[2,2.1,2.2],"defaults":{"name":"AI Agent","color":"#404040"},"inputs":"={{\n\t\t\t\t((hasOutputParser, needsFallback) => {\n\t\t\t\t\tfunction getInputs(hasMainInput, hasOutputParser, needsFallback) {\n const getInputData = (inputs) => {\n return inputs.map(({ type, filter, displayName, required }) => {\n const input = {\n type,\n displayName,\n required,\n maxConnections: ['ai_languageModel', 'ai_memory', 'ai_outputParser'].includes(type)\n ? 1\n : undefined,\n };\n if (filter) {\n input.filter = filter;\n }\n return input;\n });\n };\n let specialInputs = [\n {\n type: 'ai_languageModel',\n displayName: 'Chat Model',\n required: true,\n filter: {\n excludedNodes: [\n '@n8n/n8n-nodes-langchain.lmCohere',\n '@n8n/n8n-nodes-langchain.lmOllama',\n 'n8n/n8n-nodes-langchain.lmOpenAi',\n '@n8n/n8n-nodes-langchain.lmOpenHuggingFaceInference',\n ],\n },\n },\n {\n type: 'ai_languageModel',\n displayName: 'Fallback Model',\n required: true,\n filter: {\n excludedNodes: [\n '@n8n/n8n-nodes-langchain.lmCohere',\n '@n8n/n8n-nodes-langchain.lmOllama',\n 'n8n/n8n-nodes-langchain.lmOpenAi',\n '@n8n/n8n-nodes-langchain.lmOpenHuggingFaceInference',\n ],\n },\n },\n {\n displayName: 'Memory',\n type: 'ai_memory',\n },\n {\n displayName: 'Tool',\n type: 'ai_tool',\n },\n {\n displayName: 'Output Parser',\n type: 'ai_outputParser',\n },\n ];\n if (hasOutputParser === false) {\n specialInputs = specialInputs.filter((input) => input.type !== 'ai_outputParser');\n }\n if (needsFallback === false) {\n specialInputs = specialInputs.filter((input) => input.displayName !== 'Fallback Model');\n }\n const mainInputs = hasMainInput ? ['main'] : [];\n return [...mainInputs, ...getInputData(specialInputs)];\n};\n\t\t\t\t\treturn getInputs(true, hasOutputParser, needsFallback);\n\t\t\t\t})($parameter.hasOutputParser === undefined || $parameter.hasOutputParser === true, $parameter.needsFallback !== undefined && $parameter.needsFallback === true)\n\t\t\t}}","outputs":["main"],"properties":[{"displayName":"Tip: Get a feel for agents with our quick <a href=\"https://docs.n8n.io/advanced-ai/intro-tutorial/\" target=\"_blank\">tutorial</a> or see an <a href=\"/workflows/templates/1954\" target=\"_blank\">example</a> of how this node works","name":"aiAgentStarterCallout","type":"callout","default":""},{"displayName":"Source for Prompt (User Message)","name":"promptType","type":"options","options":[{"name":"Connected Chat Trigger Node","value":"auto","description":"Looks for an input field called 'chatInput' that is coming from a directly connected Chat Trigger"},{"name":"Connected Guardrails Node","value":"guardrails","description":"Looks for an input field called 'guardrailsInput' that is coming from a directly connected Guardrails Node"},{"name":"Define below","value":"define","description":"Use an expression to reference data in previous nodes or enter static text"}],"default":"auto"},{"displayName":"Prompt (User Message)","name":"text","type":"string","required":true,"default":"={{ $json.guardrailsInput }}","typeOptions":{"rows":2},"disabledOptions":{"show":{"promptType":["guardrails"]}},"displayOptions":{"show":{"promptType":["guardrails"]}}},{"displayName":"Prompt (User Message)","name":"text","type":"string","required":true,"default":"={{ $json.chatInput }}","typeOptions":{"rows":2},"disabledOptions":{"show":{"promptType":["auto"]}},"displayOptions":{"show":{"promptType":["auto"]}}},{"displayName":"Prompt (User Message)","name":"text","type":"string","required":true,"default":"","placeholder":"e.g. Hello, how can you help me?","typeOptions":{"rows":2},"builderHint":{"placeholderSupported":false,"message":"Use expressions to include dynamic data from previous nodes (e.g., \"={{ $json.input }}\"). Static text prompts ignore incoming data."},"displayOptions":{"show":{"promptType":["define"]}}},{"displayName":"Require Specific Output Format","name":"hasOutputParser","type":"boolean","default":false,"noDataExpression":true},{"displayName":"Connect an <a data-action='openSelectiveNodeCreator' data-action-parameter-connectiontype='ai_outputParser'>output parser</a> on the canvas to specify the output format you require","name":"notice","type":"notice","default":"","displayOptions":{"show":{"hasOutputParser":[true]}}},{"displayName":"Enable Fallback Model","name":"needsFallback","type":"boolean","default":false,"noDataExpression":true,"displayOptions":{"show":{"@version":[{"_cnd":{"gte":2.1}}]}}},{"displayName":"Connect an additional language model on the canvas to use it as a fallback if the main model fails","name":"fallbackNotice","type":"notice","default":"","displayOptions":{"show":{"needsFallback":[true]}}},{"displayName":"Options","name":"options","type":"collection","default":{},"placeholder":"Add Option","options":[{"displayName":"System Message","name":"systemMessage","type":"string","default":"You are a helpful assistant","description":"The message that will be sent to the agent before the conversation starts","builderHint":{"message":"Must include: agent's purpose, exact names of connected tools, and response instructions"},"typeOptions":{"rows":6}},{"displayName":"Max Iterations","name":"maxIterations","type":"number","default":10,"description":"The maximum number of iterations the agent will run before stopping"},{"displayName":"Return Intermediate Steps","name":"returnIntermediateSteps","type":"boolean","default":false,"description":"Whether or not the output should include intermediate steps the agent took"},{"displayName":"Automatically Passthrough Binary Images","name":"passthroughBinaryImages","type":"boolean","default":true,"description":"Whether or not binary images should be automatically passed through to the agent as image type messages"},{"displayName":"Tracing Metadata","name":"tracingMetadata","type":"fixedCollection","typeOptions":{"multipleValues":true},"default":{},"placeholder":"Add Metadata","description":"Custom metadata added to tracing events","options":[{"displayName":"Metadata","name":"values","values":[{"displayName":"Key","name":"key","type":"string","default":""},{"displayName":"Type","name":"type","type":"options","description":"The field value type","options":[{"name":"Array","value":"arrayValue"},{"name":"Boolean","value":"booleanValue"},{"name":"Number","value":"numberValue"},{"name":"Object","value":"objectValue"},{"name":"String","value":"stringValue"}],"default":"stringValue"},{"displayName":"Value","name":"stringValue","type":"string","default":"","displayOptions":{"show":{"type":["stringValue"]}}},{"displayName":"Value","name":"numberValue","type":"string","default":"","displayOptions":{"show":{"type":["numberValue"]}},"validateType":"number"},{"displayName":"Value","name":"booleanValue","type":"options","default":"true","options":[{"name":"True","value":"true"},{"name":"False","value":"false"}],"displayOptions":{"show":{"type":["booleanValue"]}}},{"displayName":"Value","name":"arrayValue","type":"string","default":"","placeholder":"e.g. [ arrayItem1, arrayItem2, arrayItem3 ]","displayOptions":{"show":{"type":["arrayValue"]}},"validateType":"array"},{"displayName":"Value","name":"objectValue","type":"json","default":"={}","typeOptions":{"rows":2},"displayOptions":{"show":{"type":["objectValue"]}},"validateType":"object"}]}]},{"displayName":"Batch Processing","name":"batching","type":"collection","placeholder":"Add Batch Processing Option","description":"Batch processing options for rate limiting","default":{},"options":[{"displayName":"Batch Size","name":"batchSize","default":1,"type":"number","description":"How many items to process in parallel. This is useful for rate limiting, but might impact the log output ordering."},{"displayName":"Delay Between Batches","name":"delayBetweenBatches","default":0,"type":"number","description":"Delay in milliseconds between batches. This is useful for rate limiting."}]},{"displayName":"Enable Streaming","name":"enableStreaming","type":"boolean","default":true,"description":"Whether this agent will stream the response in real-time as it generates text"}],"displayOptions":{"hide":{"@version":[{"_cnd":{"lt":2.2}}]}}},{"displayName":"Options","name":"options","type":"collection","default":{},"placeholder":"Add Option","options":[{"displayName":"System Message","name":"systemMessage","type":"string","default":"You are a helpful assistant","description":"The message that will be sent to the agent before the conversation starts","builderHint":{"message":"Must include: agent's purpose, exact names of connected tools, and response instructions"},"typeOptions":{"rows":6}},{"displayName":"Max Iterations","name":"maxIterations","type":"number","default":10,"description":"The maximum number of iterations the agent will run before stopping"},{"displayName":"Return Intermediate Steps","name":"returnIntermediateSteps","type":"boolean","default":false,"description":"Whether or not the output should include intermediate steps the agent took"},{"displayName":"Automatically Passthrough Binary Images","name":"passthroughBinaryImages","type":"boolean","default":true,"description":"Whether or not binary images should be automatically passed through to the agent as image type messages"},{"displayName":"Tracing Metadata","name":"tracingMetadata","type":"fixedCollection","typeOptions":{"multipleValues":true},"default":{},"placeholder":"Add Metadata","description":"Custom metadata added to tracing events","options":[{"displayName":"Metadata","name":"values","values":[{"displayName":"Key","name":"key","type":"string","default":""},{"displayName":"Type","name":"type","type":"options","description":"The field value type","options":[{"name":"Array","value":"arrayValue"},{"name":"Boolean","value":"booleanValue"},{"name":"Number","value":"numberValue"},{"name":"Object","value":"objectValue"},{"name":"String","value":"stringValue"}],"default":"stringValue"},{"displayName":"Value","name":"stringValue","type":"string","default":"","displayOptions":{"show":{"type":["stringValue"]}}},{"displayName":"Value","name":"numberValue","type":"string","default":"","displayOptions":{"show":{"type":["numberValue"]}},"validateType":"number"},{"displayName":"Value","name":"booleanValue","type":"options","default":"true","options":[{"name":"True","value":"true"},{"name":"False","value":"false"}],"displayOptions":{"show":{"type":["booleanValue"]}}},{"displayName":"Value","name":"arrayValue","type":"string","default":"","placeholder":"e.g. [ arrayItem1, arrayItem2, arrayItem3 ]","displayOptions":{"show":{"type":["arrayValue"]}},"validateType":"array"},{"displayName":"Value","name":"objectValue","type":"json","default":"={}","typeOptions":{"rows":2},"displayOptions":{"show":{"type":["objectValue"]}},"validateType":"object"}]}]},{"displayName":"Batch Processing","name":"batching","type":"collection","placeholder":"Add Batch Processing Option","description":"Batch processing options for rate limiting","default":{},"options":[{"displayName":"Batch Size","name":"batchSize","default":1,"type":"number","description":"How many items to process in parallel. This is useful for rate limiting, but might impact the log output ordering."},{"displayName":"Delay Between Batches","name":"delayBetweenBatches","default":0,"type":"number","description":"Delay in milliseconds between batches. This is useful for rate limiting."}]}],"displayOptions":{"show":{"@version":[{"_cnd":{"lt":2.2}}]}}}],"hints":[{"message":"You are using streaming responses. Make sure to set the response mode to \"Streaming Response\" on the connected trigger node.","type":"warning","location":"outputPane","whenToDisplay":"afterExecution","displayCondition":"={{ $parameter[\"enableStreaming\"] === true }}"}]},
|
|
13
13
|
{"version":[1,1.1,1.2,1.3,1.4,1.5,1.6,1.7,1.8,1.9],"displayName":"AI Agent","name":"agent","icon":"node:ai-agent","iconColor":"black","group":["transform"],"description":"Generates an action plan and executes it. Can use external tools.","codex":{"alias":["LangChain","Chat","Conversational","Plan and Execute","ReAct","Tools"],"categories":["AI"],"subcategories":{"AI":["Agents","Root Nodes"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.agent/"}]}},"defaultVersion":3.1,"builderHint":{"relatedNodes":[{"nodeType":"n8n-nodes-base.aggregate","relationHint":"Use to combine multiple items together before the agent"},{"nodeType":"@n8n/n8n-nodes-langchain.outputParserStructured","relationHint":"Attach for structured output; reference fields as $json.output.fieldName for use in subsequent nodes (conditions, storing data)"},{"nodeType":"@n8n/n8n-nodes-langchain.agentTool","relationHint":"For multi-agent systems using orchestrator pattern"},{"nodeType":"@n8n/n8n-nodes-langchain.memoryBufferWindow","relationHint":"Required for conversational workflows - connect memory to every agent that needs to recall previous messages in the conversation"}],"inputs":{"ai_languageModel":{"required":true},"ai_memory":{"required":false},"ai_tool":{"required":false},"ai_outputParser":{"required":false,"displayOptions":{"show":{"hasOutputParser":[true]}}}}},"defaults":{"name":"AI Agent","color":"#404040"},"inputs":"={{\n\t\t\t\t((agent, hasOutputParser) => {\n\t\t\t\t\tfunction getInputs(agent, hasOutputParser) {\n const getInputData = (inputs) => {\n const displayNames = {\n ai_languageModel: 'Model',\n ai_memory: 'Memory',\n ai_tool: 'Tool',\n ai_outputParser: 'Output Parser',\n };\n return inputs.map(({ type, filter }) => {\n const isModelType = type === 'ai_languageModel';\n let displayName = type in displayNames ? displayNames[type] : undefined;\n if (isModelType &&\n ['openAiFunctionsAgent', 'toolsAgent', 'conversationalAgent'].includes(agent)) {\n displayName = 'Chat Model';\n }\n const input = {\n type,\n displayName,\n required: isModelType,\n maxConnections: ['ai_languageModel', 'ai_memory', 'ai_outputParser'].includes(type)\n ? 1\n : undefined,\n };\n if (filter) {\n input.filter = filter;\n }\n return input;\n });\n };\n let specialInputs = [];\n if (agent === 'conversationalAgent') {\n specialInputs = [\n {\n type: 'ai_languageModel',\n filter: {\n nodes: [\n '@n8n/n8n-nodes-langchain.lmChatAnthropic',\n '@n8n/n8n-nodes-langchain.lmChatAwsBedrock',\n '@n8n/n8n-nodes-langchain.lmChatGroq',\n '@n8n/n8n-nodes-langchain.lmChatLemonade',\n '@n8n/n8n-nodes-langchain.lmChatOllama',\n '@n8n/n8n-nodes-langchain.lmChatOpenAi',\n '@n8n/n8n-nodes-langchain.lmChatGoogleGemini',\n '@n8n/n8n-nodes-langchain.lmChatGoogleVertex',\n '@n8n/n8n-nodes-langchain.lmChatMistralCloud',\n '@n8n/n8n-nodes-langchain.lmChatMinimax',\n '@n8n/n8n-nodes-langchain.lmChatMoonshot',\n '@n8n/n8n-nodes-langchain.lmChatAzureOpenAi',\n '@n8n/n8n-nodes-langchain.lmChatDeepSeek',\n '@n8n/n8n-nodes-langchain.lmChatOpenRouter',\n '@n8n/n8n-nodes-langchain.lmChatVercelAiGateway',\n '@n8n/n8n-nodes-langchain.lmChatXAiGrok',\n '@n8n/n8n-nodes-langchain.modelSelector',\n ],\n },\n },\n {\n type: 'ai_memory',\n },\n {\n type: 'ai_tool',\n },\n {\n type: 'ai_outputParser',\n },\n ];\n }\n else if (agent === 'toolsAgent') {\n specialInputs = [\n {\n type: 'ai_languageModel',\n filter: {\n nodes: [\n '@n8n/n8n-nodes-langchain.lmChatAnthropic',\n '@n8n/n8n-nodes-langchain.lmChatAzureOpenAi',\n '@n8n/n8n-nodes-langchain.lmChatAwsBedrock',\n '@n8n/n8n-nodes-langchain.lmChatLemonade',\n '@n8n/n8n-nodes-langchain.lmChatMistralCloud',\n '@n8n/n8n-nodes-langchain.lmChatMinimax',\n '@n8n/n8n-nodes-langchain.lmChatMoonshot',\n '@n8n/n8n-nodes-langchain.lmChatOllama',\n '@n8n/n8n-nodes-langchain.lmChatOpenAi',\n '@n8n/n8n-nodes-langchain.lmChatGroq',\n '@n8n/n8n-nodes-langchain.lmChatGoogleVertex',\n '@n8n/n8n-nodes-langchain.lmChatGoogleGemini',\n '@n8n/n8n-nodes-langchain.lmChatDeepSeek',\n '@n8n/n8n-nodes-langchain.lmChatOpenRouter',\n '@n8n/n8n-nodes-langchain.lmChatVercelAiGateway',\n '@n8n/n8n-nodes-langchain.lmChatXAiGrok',\n ],\n },\n },\n {\n type: 'ai_memory',\n },\n {\n type: 'ai_tool',\n required: true,\n },\n {\n type: 'ai_outputParser',\n },\n ];\n }\n else if (agent === 'openAiFunctionsAgent') {\n specialInputs = [\n {\n type: 'ai_languageModel',\n filter: {\n nodes: [\n '@n8n/n8n-nodes-langchain.lmChatOpenAi',\n '@n8n/n8n-nodes-langchain.lmChatAzureOpenAi',\n ],\n },\n },\n {\n type: 'ai_memory',\n },\n {\n type: 'ai_tool',\n required: true,\n },\n {\n type: 'ai_outputParser',\n },\n ];\n }\n else if (agent === 'reActAgent') {\n specialInputs = [\n {\n type: 'ai_languageModel',\n },\n {\n type: 'ai_tool',\n },\n {\n type: 'ai_outputParser',\n },\n ];\n }\n else if (agent === 'sqlAgent') {\n specialInputs = [\n {\n type: 'ai_languageModel',\n },\n {\n type: 'ai_memory',\n },\n ];\n }\n else if (agent === 'planAndExecuteAgent') {\n specialInputs = [\n {\n type: 'ai_languageModel',\n },\n {\n type: 'ai_tool',\n },\n {\n type: 'ai_outputParser',\n },\n ];\n }\n if (hasOutputParser === false) {\n specialInputs = specialInputs.filter((input) => input.type !== 'ai_outputParser');\n }\n return ['main', ...getInputData(specialInputs)];\n};\n\t\t\t\t\treturn getInputs(agent, hasOutputParser)\n\t\t\t\t})($parameter.agent, $parameter.hasOutputParser === undefined || $parameter.hasOutputParser === true)\n\t\t\t}}","outputs":["main"],"credentials":[{"name":"mySql","required":true,"testedBy":"mysqlConnectionTest","displayOptions":{"show":{"agent":["sqlAgent"],"/dataSource":["mysql"]}}},{"name":"postgres","required":true,"displayOptions":{"show":{"agent":["sqlAgent"],"/dataSource":["postgres"]}}}],"properties":[{"displayName":"Tip: Get a feel for agents with our quick <a href=\"https://docs.n8n.io/advanced-ai/intro-tutorial/\" target=\"_blank\">tutorial</a> or see an <a href=\"/templates/1954\" target=\"_blank\">example</a> of how this node works","name":"aiAgentStarterCallout","type":"callout","default":"","displayOptions":{"show":{"agent":["conversationalAgent","toolsAgent"]}}},{"displayName":"This node is using Agent that has been deprecated. Please switch to using 'Tools Agent' instead.","name":"deprecated","type":"notice","default":"","displayOptions":{"show":{"agent":["conversationalAgent","openAiFunctionsAgent","planAndExecuteAgent","reActAgent","sqlAgent"]}}},{"displayName":"Agent","name":"agent","type":"options","noDataExpression":true,"options":[{"name":"Conversational Agent","value":"conversationalAgent","description":"Describes tools in the system prompt and parses JSON responses for tool calls. More flexible but potentially less reliable than the Tools Agent. Suitable for simpler interactions or with models not supporting structured schemas."},{"name":"OpenAI Functions Agent","value":"openAiFunctionsAgent","description":"Leverages OpenAI's function calling capabilities to precisely select and execute tools. Excellent for tasks requiring structured outputs when working with OpenAI models."},{"name":"Plan and Execute Agent","value":"planAndExecuteAgent","description":"Creates a high-level plan for complex tasks and then executes each step. Suitable for multi-stage problems or when a strategic approach is needed."},{"name":"ReAct Agent","value":"reActAgent","description":"Combines reasoning and action in an iterative process. Effective for tasks that require careful analysis and step-by-step problem-solving."},{"name":"SQL Agent","value":"sqlAgent","description":"Specializes in interacting with SQL databases. Ideal for data analysis tasks, generating queries, or extracting insights from structured data."}],"default":"conversationalAgent","displayOptions":{"show":{"@version":[{"_cnd":{"lte":1.5}}]}}},{"displayName":"Agent","name":"agent","type":"options","noDataExpression":true,"options":[{"name":"Tools Agent","value":"toolsAgent","description":"Utilizes structured tool schemas for precise and reliable tool selection and execution. Recommended for complex tasks requiring accurate and consistent tool usage, but only usable with models that support tool calling."},{"name":"Conversational Agent","value":"conversationalAgent","description":"Describes tools in the system prompt and parses JSON responses for tool calls. More flexible but potentially less reliable than the Tools Agent. Suitable for simpler interactions or with models not supporting structured schemas."},{"name":"OpenAI Functions Agent","value":"openAiFunctionsAgent","description":"Leverages OpenAI's function calling capabilities to precisely select and execute tools. Excellent for tasks requiring structured outputs when working with OpenAI models."},{"name":"Plan and Execute Agent","value":"planAndExecuteAgent","description":"Creates a high-level plan for complex tasks and then executes each step. Suitable for multi-stage problems or when a strategic approach is needed."},{"name":"ReAct Agent","value":"reActAgent","description":"Combines reasoning and action in an iterative process. Effective for tasks that require careful analysis and step-by-step problem-solving."},{"name":"SQL Agent","value":"sqlAgent","description":"Specializes in interacting with SQL databases. Ideal for data analysis tasks, generating queries, or extracting insights from structured data."}],"default":"toolsAgent","displayOptions":{"show":{"@version":[{"_cnd":{"between":{"from":1.6,"to":1.7}}}]}}},{"displayName":"Agent","name":"agent","type":"hidden","noDataExpression":true,"options":[{"name":"Tools Agent","value":"toolsAgent","description":"Utilizes structured tool schemas for precise and reliable tool selection and execution. Recommended for complex tasks requiring accurate and consistent tool usage, but only usable with models that support tool calling."},{"name":"Conversational Agent","value":"conversationalAgent","description":"Describes tools in the system prompt and parses JSON responses for tool calls. More flexible but potentially less reliable than the Tools Agent. Suitable for simpler interactions or with models not supporting structured schemas."},{"name":"OpenAI Functions Agent","value":"openAiFunctionsAgent","description":"Leverages OpenAI's function calling capabilities to precisely select and execute tools. Excellent for tasks requiring structured outputs when working with OpenAI models."},{"name":"Plan and Execute Agent","value":"planAndExecuteAgent","description":"Creates a high-level plan for complex tasks and then executes each step. Suitable for multi-stage problems or when a strategic approach is needed."},{"name":"ReAct Agent","value":"reActAgent","description":"Combines reasoning and action in an iterative process. Effective for tasks that require careful analysis and step-by-step problem-solving."},{"name":"SQL Agent","value":"sqlAgent","description":"Specializes in interacting with SQL databases. Ideal for data analysis tasks, generating queries, or extracting insights from structured data."}],"default":"toolsAgent","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.8}}]}}},{"displayName":"Source for Prompt (User Message)","name":"promptType","type":"options","options":[{"name":"Connected Chat Trigger Node","value":"auto","description":"Looks for an input field called 'chatInput' that is coming from a directly connected Chat Trigger"},{"name":"Connected Guardrails Node","value":"guardrails","description":"Looks for an input field called 'guardrailsInput' that is coming from a directly connected Guardrails Node"},{"name":"Define below","value":"define","description":"Use an expression to reference data in previous nodes or enter static text"}],"default":"auto","displayOptions":{"hide":{"@version":[{"_cnd":{"lte":1.2}}],"agent":["sqlAgent"]}}},{"displayName":"Prompt (User Message)","name":"text","type":"string","required":true,"default":"={{ $json.guardrailsInput }}","typeOptions":{"rows":2},"disabledOptions":{"show":{"promptType":["guardrails"]}},"displayOptions":{"show":{"promptType":["guardrails"],"@version":[{"_cnd":{"gte":1.7}}]}}},{"displayName":"Prompt (User Message)","name":"text","type":"string","required":true,"default":"={{ $json.chatInput }}","typeOptions":{"rows":2},"disabledOptions":{"show":{"promptType":["auto"]}},"displayOptions":{"show":{"promptType":["auto"],"@version":[{"_cnd":{"gte":1.7}}]},"hide":{"agent":["sqlAgent"]}}},{"displayName":"Prompt (User Message)","name":"text","type":"string","required":true,"default":"","placeholder":"e.g. Hello, how can you help me?","typeOptions":{"rows":2},"builderHint":{"placeholderSupported":false,"message":"Use expressions to include dynamic data from previous nodes (e.g., \"={{ $json.input }}\"). Static text prompts ignore incoming data."},"displayOptions":{"show":{"promptType":["define"]},"hide":{"agent":["sqlAgent"]}}},{"displayName":"For more reliable structured output parsing, consider using the Tools agent","name":"notice","type":"notice","default":"","displayOptions":{"show":{"hasOutputParser":[true],"agent":["conversationalAgent","reActAgent","planAndExecuteAgent","openAiFunctionsAgent"]}}},{"displayName":"Require Specific Output Format","name":"hasOutputParser","type":"boolean","default":false,"noDataExpression":true,"displayOptions":{"hide":{"@version":[{"_cnd":{"lte":1.2}}],"agent":["sqlAgent"]}}},{"displayName":"Connect an <a data-action='openSelectiveNodeCreator' data-action-parameter-connectiontype='ai_outputParser'>output parser</a> on the canvas to specify the output format you require","name":"notice","type":"notice","default":"","displayOptions":{"show":{"hasOutputParser":[true],"agent":["toolsAgent"]}}},{"displayName":"Options","name":"options","type":"collection","displayOptions":{"show":{"agent":["toolsAgent"]}},"default":{},"placeholder":"Add Option","options":[{"displayName":"System Message","name":"systemMessage","type":"string","default":"You are a helpful assistant","description":"The message that will be sent to the agent before the conversation starts","builderHint":{"message":"Must include: agent's purpose, exact names of connected tools, and response instructions"},"typeOptions":{"rows":6}},{"displayName":"Max Iterations","name":"maxIterations","type":"number","default":10,"description":"The maximum number of iterations the agent will run before stopping"},{"displayName":"Return Intermediate Steps","name":"returnIntermediateSteps","type":"boolean","default":false,"description":"Whether or not the output should include intermediate steps the agent took"},{"displayName":"Automatically Passthrough Binary Images","name":"passthroughBinaryImages","type":"boolean","default":true,"description":"Whether or not binary images should be automatically passed through to the agent as image type messages"},{"displayName":"Tracing Metadata","name":"tracingMetadata","type":"fixedCollection","typeOptions":{"multipleValues":true},"default":{},"placeholder":"Add Metadata","description":"Custom metadata added to tracing events","options":[{"displayName":"Metadata","name":"values","values":[{"displayName":"Key","name":"key","type":"string","default":""},{"displayName":"Type","name":"type","type":"options","description":"The field value type","options":[{"name":"Array","value":"arrayValue"},{"name":"Boolean","value":"booleanValue"},{"name":"Number","value":"numberValue"},{"name":"Object","value":"objectValue"},{"name":"String","value":"stringValue"}],"default":"stringValue"},{"displayName":"Value","name":"stringValue","type":"string","default":"","displayOptions":{"show":{"type":["stringValue"]}}},{"displayName":"Value","name":"numberValue","type":"string","default":"","displayOptions":{"show":{"type":["numberValue"]}},"validateType":"number"},{"displayName":"Value","name":"booleanValue","type":"options","default":"true","options":[{"name":"True","value":"true"},{"name":"False","value":"false"}],"displayOptions":{"show":{"type":["booleanValue"]}}},{"displayName":"Value","name":"arrayValue","type":"string","default":"","placeholder":"e.g. [ arrayItem1, arrayItem2, arrayItem3 ]","displayOptions":{"show":{"type":["arrayValue"]}},"validateType":"array"},{"displayName":"Value","name":"objectValue","type":"json","default":"={}","typeOptions":{"rows":2},"displayOptions":{"show":{"type":["objectValue"]}},"validateType":"object"}]}]}]},{"displayName":"Text","name":"text","type":"string","required":true,"displayOptions":{"show":{"agent":["conversationalAgent"],"@version":[1]}},"default":"={{ $json.input }}"},{"displayName":"Text","name":"text","type":"string","required":true,"displayOptions":{"show":{"agent":["conversationalAgent"],"@version":[1.1]}},"default":"={{ $json.chat_input }}"},{"displayName":"Text","name":"text","type":"string","required":true,"displayOptions":{"show":{"agent":["conversationalAgent"],"@version":[1.2]}},"default":"={{ $json.chatInput }}"},{"displayName":"Options","name":"options","type":"collection","displayOptions":{"show":{"agent":["conversationalAgent"]}},"default":{},"placeholder":"Add Option","options":[{"displayName":"Human Message","name":"humanMessage","type":"string","default":"TOOLS\n------\nAssistant can ask the user to use tools to look up information that may be helpful in answering the users original question. The tools the human can use are:\n\n{tools}\n\n{format_instructions}\n\nUSER'S INPUT\n--------------------\nHere is the user's input (remember to respond with a markdown code snippet of a json blob with a single action, and NOTHING else):\n\n{{input}}","description":"The message that will provide the agent with a list of tools to use","typeOptions":{"rows":6}},{"displayName":"System Message","name":"systemMessage","type":"string","default":"Assistant is a large language model trained by OpenAI.\n\nAssistant is designed to be able to assist with a wide range of tasks, from answering simple questions to providing in-depth explanations and discussions on a wide range of topics. As a language model, Assistant is able to generate human-like text based on the input it receives, allowing it to engage in natural-sounding conversations and provide responses that are coherent and relevant to the topic at hand.\n\nAssistant is constantly learning and improving, and its capabilities are constantly evolving. It is able to process and understand large amounts of text, and can use this knowledge to provide accurate and informative responses to a wide range of questions. Additionally, Assistant is able to generate its own text based on the input it receives, allowing it to engage in discussions and provide explanations and descriptions on a wide range of topics.\n\nOverall, Assistant is a powerful system that can help with a wide range of tasks and provide valuable insights and information on a wide range of topics. Whether you need help with a specific question or just want to have a conversation about a particular topic, Assistant is here to assist.","description":"The message that will be sent to the agent before the conversation starts","typeOptions":{"rows":6}},{"displayName":"Max Iterations","name":"maxIterations","type":"number","default":10,"description":"The maximum number of iterations the agent will run before stopping"},{"displayName":"Return Intermediate Steps","name":"returnIntermediateSteps","type":"boolean","default":false,"description":"Whether or not the output should include intermediate steps the agent took"},{"displayName":"Tracing Metadata","name":"tracingMetadata","type":"fixedCollection","typeOptions":{"multipleValues":true},"default":{},"placeholder":"Add Metadata","description":"Custom metadata added to tracing events","options":[{"displayName":"Metadata","name":"values","values":[{"displayName":"Key","name":"key","type":"string","default":""},{"displayName":"Type","name":"type","type":"options","description":"The field value type","options":[{"name":"Array","value":"arrayValue"},{"name":"Boolean","value":"booleanValue"},{"name":"Number","value":"numberValue"},{"name":"Object","value":"objectValue"},{"name":"String","value":"stringValue"}],"default":"stringValue"},{"displayName":"Value","name":"stringValue","type":"string","default":"","displayOptions":{"show":{"type":["stringValue"]}}},{"displayName":"Value","name":"numberValue","type":"string","default":"","displayOptions":{"show":{"type":["numberValue"]}},"validateType":"number"},{"displayName":"Value","name":"booleanValue","type":"options","default":"true","options":[{"name":"True","value":"true"},{"name":"False","value":"false"}],"displayOptions":{"show":{"type":["booleanValue"]}}},{"displayName":"Value","name":"arrayValue","type":"string","default":"","placeholder":"e.g. [ arrayItem1, arrayItem2, arrayItem3 ]","displayOptions":{"show":{"type":["arrayValue"]}},"validateType":"array"},{"displayName":"Value","name":"objectValue","type":"json","default":"={}","typeOptions":{"rows":2},"displayOptions":{"show":{"type":["objectValue"]}},"validateType":"object"}]}]}]},{"displayName":"Text","name":"text","type":"string","required":true,"displayOptions":{"show":{"agent":["openAiFunctionsAgent"],"@version":[1]}},"default":"={{ $json.input }}"},{"displayName":"Text","name":"text","type":"string","required":true,"displayOptions":{"show":{"agent":["openAiFunctionsAgent"],"@version":[1.1]}},"default":"={{ $json.chat_input }}"},{"displayName":"Text","name":"text","type":"string","required":true,"displayOptions":{"show":{"agent":["openAiFunctionsAgent"],"@version":[1.2]}},"default":"={{ $json.chatInput }}"},{"displayName":"Options","name":"options","type":"collection","displayOptions":{"show":{"agent":["openAiFunctionsAgent"]}},"default":{},"placeholder":"Add Option","options":[{"displayName":"System Message","name":"systemMessage","type":"string","default":"You are a helpful AI assistant.","description":"The message that will be sent to the agent before the conversation starts","typeOptions":{"rows":6}},{"displayName":"Max Iterations","name":"maxIterations","type":"number","default":10,"description":"The maximum number of iterations the agent will run before stopping"},{"displayName":"Return Intermediate Steps","name":"returnIntermediateSteps","type":"boolean","default":false,"description":"Whether or not the output should include intermediate steps the agent took"},{"displayName":"Tracing Metadata","name":"tracingMetadata","type":"fixedCollection","typeOptions":{"multipleValues":true},"default":{},"placeholder":"Add Metadata","description":"Custom metadata added to tracing events","options":[{"displayName":"Metadata","name":"values","values":[{"displayName":"Key","name":"key","type":"string","default":""},{"displayName":"Type","name":"type","type":"options","description":"The field value type","options":[{"name":"Array","value":"arrayValue"},{"name":"Boolean","value":"booleanValue"},{"name":"Number","value":"numberValue"},{"name":"Object","value":"objectValue"},{"name":"String","value":"stringValue"}],"default":"stringValue"},{"displayName":"Value","name":"stringValue","type":"string","default":"","displayOptions":{"show":{"type":["stringValue"]}}},{"displayName":"Value","name":"numberValue","type":"string","default":"","displayOptions":{"show":{"type":["numberValue"]}},"validateType":"number"},{"displayName":"Value","name":"booleanValue","type":"options","default":"true","options":[{"name":"True","value":"true"},{"name":"False","value":"false"}],"displayOptions":{"show":{"type":["booleanValue"]}}},{"displayName":"Value","name":"arrayValue","type":"string","default":"","placeholder":"e.g. [ arrayItem1, arrayItem2, arrayItem3 ]","displayOptions":{"show":{"type":["arrayValue"]}},"validateType":"array"},{"displayName":"Value","name":"objectValue","type":"json","default":"={}","typeOptions":{"rows":2},"displayOptions":{"show":{"type":["objectValue"]}},"validateType":"object"}]}]}]},{"displayName":"Text","name":"text","type":"string","required":true,"displayOptions":{"show":{"agent":["reActAgent"],"@version":[1]}},"default":"={{ $json.input }}"},{"displayName":"Text","name":"text","type":"string","required":true,"displayOptions":{"show":{"agent":["reActAgent"],"@version":[1.1]}},"default":"={{ $json.chat_input }}"},{"displayName":"Text","name":"text","type":"string","required":true,"displayOptions":{"show":{"agent":["reActAgent"],"@version":[1.2]}},"default":"={{ $json.chatInput }}"},{"displayName":"Options","name":"options","type":"collection","displayOptions":{"show":{"agent":["reActAgent"]}},"default":{},"placeholder":"Add Option","options":[{"displayName":"Human Message Template","name":"humanMessageTemplate","type":"string","default":"{input}\n\n{agent_scratchpad}","description":"String to use directly as the human message template","typeOptions":{"rows":6}},{"displayName":"Prefix Message","name":"prefix","type":"string","default":"Answer the following questions as best you can. You have access to the following tools:","description":"String to put before the list of tools","typeOptions":{"rows":6}},{"displayName":"Suffix Message for Chat Model","name":"suffixChat","type":"string","default":"Begin! Reminder to always use the exact characters `Final Answer` when responding.","description":"String to put after the list of tools that will be used if chat model is used","typeOptions":{"rows":6}},{"displayName":"Suffix Message for Regular Model","name":"suffix","type":"string","default":"Begin!\n\n\tQuestion: {input}\n\tThought:{agent_scratchpad}","description":"String to put after the list of tools that will be used if regular model is used","typeOptions":{"rows":6}},{"displayName":"Max Iterations","name":"maxIterations","type":"number","default":10,"description":"The maximum number of iterations the agent will run before stopping"},{"displayName":"Return Intermediate Steps","name":"returnIntermediateSteps","type":"boolean","default":false,"description":"Whether or not the output should include intermediate steps the agent took"},{"displayName":"Tracing Metadata","name":"tracingMetadata","type":"fixedCollection","typeOptions":{"multipleValues":true},"default":{},"placeholder":"Add Metadata","description":"Custom metadata added to tracing events","options":[{"displayName":"Metadata","name":"values","values":[{"displayName":"Key","name":"key","type":"string","default":""},{"displayName":"Type","name":"type","type":"options","description":"The field value type","options":[{"name":"Array","value":"arrayValue"},{"name":"Boolean","value":"booleanValue"},{"name":"Number","value":"numberValue"},{"name":"Object","value":"objectValue"},{"name":"String","value":"stringValue"}],"default":"stringValue"},{"displayName":"Value","name":"stringValue","type":"string","default":"","displayOptions":{"show":{"type":["stringValue"]}}},{"displayName":"Value","name":"numberValue","type":"string","default":"","displayOptions":{"show":{"type":["numberValue"]}},"validateType":"number"},{"displayName":"Value","name":"booleanValue","type":"options","default":"true","options":[{"name":"True","value":"true"},{"name":"False","value":"false"}],"displayOptions":{"show":{"type":["booleanValue"]}}},{"displayName":"Value","name":"arrayValue","type":"string","default":"","placeholder":"e.g. [ arrayItem1, arrayItem2, arrayItem3 ]","displayOptions":{"show":{"type":["arrayValue"]}},"validateType":"array"},{"displayName":"Value","name":"objectValue","type":"json","default":"={}","typeOptions":{"rows":2},"displayOptions":{"show":{"type":["objectValue"]}},"validateType":"object"}]}]}]},{"displayName":"Data Source","name":"dataSource","type":"options","displayOptions":{"show":{"agent":["sqlAgent"],"@version":[{"_cnd":{"lt":1.4}}]}},"default":"sqlite","description":"SQL database to connect to","options":[{"name":"MySQL","value":"mysql","description":"Connect to a MySQL database"},{"name":"Postgres","value":"postgres","description":"Connect to a Postgres database"},{"name":"SQLite","value":"sqlite","description":"Use SQLite by connecting a database file as binary input"}]},{"displayName":"Data Source","name":"dataSource","type":"options","displayOptions":{"show":{"agent":["sqlAgent"],"@version":[{"_cnd":{"gte":1.4}}]}},"default":"postgres","description":"SQL database to connect to","options":[{"name":"MySQL","value":"mysql","description":"Connect to a MySQL database"},{"name":"Postgres","value":"postgres","description":"Connect to a Postgres database"},{"name":"SQLite","value":"sqlite","description":"Use SQLite by connecting a database file as binary input"}]},{"displayName":"Credentials","name":"credentials","type":"credentials","default":""},{"displayName":"Pass the SQLite database into this node as binary data, e.g. by inserting a 'Read/Write Files from Disk' node beforehand","name":"sqLiteFileNotice","type":"notice","default":"","displayOptions":{"show":{"agent":["sqlAgent"],"dataSource":["sqlite"]}}},{"displayName":"Input Binary Field","name":"binaryPropertyName","type":"string","default":"data","required":true,"placeholder":"e.g data","hint":"The name of the input binary field containing the file to be extracted","displayOptions":{"show":{"agent":["sqlAgent"],"dataSource":["sqlite"]}}},{"displayName":"Prompt","name":"input","type":"string","displayOptions":{"show":{"agent":["sqlAgent"],"@version":[{"_cnd":{"lte":1.2}}]}},"default":"","required":true,"typeOptions":{"rows":5}},{"displayName":"Source for Prompt (User Message)","name":"promptType","type":"options","options":[{"name":"Connected Chat Trigger Node","value":"auto","description":"Looks for an input field called 'chatInput' that is coming from a directly connected Chat Trigger"},{"name":"Connected Guardrails Node","value":"guardrails","description":"Looks for an input field called 'guardrailsInput' that is coming from a directly connected Guardrails Node"},{"name":"Define below","value":"define","description":"Use an expression to reference data in previous nodes or enter static text"}],"default":"auto","displayOptions":{"hide":{"@version":[{"_cnd":{"lte":1.2}}]},"show":{"agent":["sqlAgent"]}}},{"displayName":"Prompt (User Message)","name":"text","type":"string","required":true,"default":"={{ $json.guardrailsInput }}","typeOptions":{"rows":2},"disabledOptions":{"show":{"promptType":["guardrails"]}},"displayOptions":{"show":{"promptType":["guardrails"],"@version":[{"_cnd":{"gte":1.7}}],"agent":["sqlAgent"]}}},{"displayName":"Prompt (User Message)","name":"text","type":"string","required":true,"default":"={{ $json.chatInput }}","typeOptions":{"rows":2},"disabledOptions":{"show":{"promptType":["auto"]}},"displayOptions":{"show":{"promptType":["auto"],"@version":[{"_cnd":{"gte":1.7}}],"agent":["sqlAgent"]}}},{"displayName":"Prompt (User Message)","name":"text","type":"string","required":true,"default":"","placeholder":"e.g. Hello, how can you help me?","typeOptions":{"rows":2},"builderHint":{"placeholderSupported":false,"message":"Use expressions to include dynamic data from previous nodes (e.g., \"={{ $json.input }}\"). Static text prompts ignore incoming data."},"displayOptions":{"show":{"promptType":["define"],"agent":["sqlAgent"]}}},{"displayName":"Options","name":"options","type":"collection","displayOptions":{"show":{"agent":["sqlAgent"]}},"default":{},"placeholder":"Add Option","options":[{"displayName":"Ignored Tables","name":"ignoredTables","type":"string","default":"","description":"Comma-separated list of tables to ignore from the database. If empty, no tables are ignored."},{"displayName":"Include Sample Rows","name":"includedSampleRows","type":"number","description":"Number of sample rows to include in the prompt to the agent. It helps the agent to understand the schema of the database but it also increases the amount of tokens used.","default":3},{"displayName":"Included Tables","name":"includedTables","type":"string","default":"","description":"Comma-separated list of tables to include in the database. If empty, all tables are included."},{"displayName":"Prefix Prompt","name":"prefixPrompt","type":"string","default":"You are an agent designed to interact with an SQL database.\nGiven an input question, create a syntactically correct {dialect} query to run, then look at the results of the query and return the answer.\nUnless the user specifies a specific number of examples they wish to obtain, always limit your query to at most {top_k} results using the LIMIT clause.\nYou can order the results by a relevant column to return the most interesting examples in the database.\nNever query for all the columns from a specific table, only ask for a the few relevant columns given the question.\nYou have access to tools for interacting with the database.\nOnly use the below tools. Only use the information returned by the below tools to construct your final answer.\nYou MUST double check your query before executing it. If you get an error while executing a query, rewrite the query and try again.\n\nDO NOT make any DML statements (INSERT, UPDATE, DELETE, DROP etc.) to the database.\n\nIf the question does not seem related to the database, just return \"I don't know\" as the answer.","description":"Prefix prompt to use for the agent","typeOptions":{"rows":10}},{"displayName":"Suffix Prompt","name":"suffixPrompt","type":"string","default":"Begin!\nChat History:\n{chatHistory}\n\nQuestion: {input}\nThought: I should look at the tables in the database to see what I can query.\n{agent_scratchpad}","description":"Suffix prompt to use for the agent","typeOptions":{"rows":4}},{"displayName":"Limit","name":"topK","type":"number","default":10,"description":"The maximum number of results to return"},{"displayName":"Tracing Metadata","name":"tracingMetadata","type":"fixedCollection","typeOptions":{"multipleValues":true},"default":{},"placeholder":"Add Metadata","description":"Custom metadata added to tracing events","options":[{"displayName":"Metadata","name":"values","values":[{"displayName":"Key","name":"key","type":"string","default":""},{"displayName":"Type","name":"type","type":"options","description":"The field value type","options":[{"name":"Array","value":"arrayValue"},{"name":"Boolean","value":"booleanValue"},{"name":"Number","value":"numberValue"},{"name":"Object","value":"objectValue"},{"name":"String","value":"stringValue"}],"default":"stringValue"},{"displayName":"Value","name":"stringValue","type":"string","default":"","displayOptions":{"show":{"type":["stringValue"]}}},{"displayName":"Value","name":"numberValue","type":"string","default":"","displayOptions":{"show":{"type":["numberValue"]}},"validateType":"number"},{"displayName":"Value","name":"booleanValue","type":"options","default":"true","options":[{"name":"True","value":"true"},{"name":"False","value":"false"}],"displayOptions":{"show":{"type":["booleanValue"]}}},{"displayName":"Value","name":"arrayValue","type":"string","default":"","placeholder":"e.g. [ arrayItem1, arrayItem2, arrayItem3 ]","displayOptions":{"show":{"type":["arrayValue"]}},"validateType":"array"},{"displayName":"Value","name":"objectValue","type":"json","default":"={}","typeOptions":{"rows":2},"displayOptions":{"show":{"type":["objectValue"]}},"validateType":"object"}]}]}]},{"displayName":"Text","name":"text","type":"string","required":true,"displayOptions":{"show":{"agent":["planAndExecuteAgent"],"@version":[1]}},"default":"={{ $json.input }}"},{"displayName":"Text","name":"text","type":"string","required":true,"displayOptions":{"show":{"agent":["planAndExecuteAgent"],"@version":[1.1]}},"default":"={{ $json.chat_input }}"},{"displayName":"Text","name":"text","type":"string","required":true,"displayOptions":{"show":{"agent":["planAndExecuteAgent"],"@version":[1.2]}},"default":"={{ $json.chatInput }}"},{"displayName":"Options","name":"options","type":"collection","displayOptions":{"show":{"agent":["planAndExecuteAgent"]}},"default":{},"placeholder":"Add Option","options":[{"displayName":"Human Message Template","name":"humanMessageTemplate","type":"string","default":"Previous steps: {previous_steps}\n\nCurrent objective: {current_step}\n\n{agent_scratchpad}\n\nYou may extract and combine relevant data from your previous steps when responding to me.","description":"The message that will be sent to the agent during each step execution","typeOptions":{"rows":6}},{"displayName":"Tracing Metadata","name":"tracingMetadata","type":"fixedCollection","typeOptions":{"multipleValues":true},"default":{},"placeholder":"Add Metadata","description":"Custom metadata added to tracing events","options":[{"displayName":"Metadata","name":"values","values":[{"displayName":"Key","name":"key","type":"string","default":""},{"displayName":"Type","name":"type","type":"options","description":"The field value type","options":[{"name":"Array","value":"arrayValue"},{"name":"Boolean","value":"booleanValue"},{"name":"Number","value":"numberValue"},{"name":"Object","value":"objectValue"},{"name":"String","value":"stringValue"}],"default":"stringValue"},{"displayName":"Value","name":"stringValue","type":"string","default":"","displayOptions":{"show":{"type":["stringValue"]}}},{"displayName":"Value","name":"numberValue","type":"string","default":"","displayOptions":{"show":{"type":["numberValue"]}},"validateType":"number"},{"displayName":"Value","name":"booleanValue","type":"options","default":"true","options":[{"name":"True","value":"true"},{"name":"False","value":"false"}],"displayOptions":{"show":{"type":["booleanValue"]}}},{"displayName":"Value","name":"arrayValue","type":"string","default":"","placeholder":"e.g. [ arrayItem1, arrayItem2, arrayItem3 ]","displayOptions":{"show":{"type":["arrayValue"]}},"validateType":"array"},{"displayName":"Value","name":"objectValue","type":"json","default":"={}","typeOptions":{"rows":2},"displayOptions":{"show":{"type":["objectValue"]}},"validateType":"object"}]}]}]}]},
|
|
@@ -29,31 +29,31 @@
|
|
|
29
29
|
{"displayName":"Embeddings Cohere","name":"embeddingsCohere","group":["transform"],"version":1,"description":"Use Cohere Embeddings","defaults":{"name":"Embeddings Cohere"},"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"={{ $credentials.host }}"},"credentials":[{"name":"cohereApi","required":true}],"codex":{"categories":["AI"],"subcategories":{"AI":["Embeddings"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.embeddingscohere/"}]}},"inputs":[],"outputs":["ai_embedding"],"outputNames":["Embeddings"],"properties":[{"displayName":"This node must be connected to a vector store. <a data-action='openSelectiveNodeCreator' data-action-parameter-connectiontype='ai_vectorStore'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Each model is using different dimensional density for embeddings. Please make sure to use the same dimensionality for your vector store. The default model is using 768-dimensional embeddings.","name":"notice","type":"notice","default":""},{"displayName":"Model","name":"modelName","type":"options","description":"The model which will generate the embeddings. <a href=\"https://docs.cohere.com/docs/models\">Learn more</a>.","default":"embed-english-v2.0","options":[{"name":"Embed-English-Light-v2.0 (1024 Dimensions)","value":"embed-english-light-v2.0"},{"name":"Embed-English-Light-v3.0 (384 Dimensions)","value":"embed-english-light-v3.0"},{"name":"Embed-English-v2.0 (4096 Dimensions)","value":"embed-english-v2.0"},{"name":"Embed-English-v3.0 (1024 Dimensions)","value":"embed-english-v3.0"},{"name":"Embed-Multilingual-Light-v3.0 (384 Dimensions)","value":"embed-multilingual-light-v3.0"},{"name":"Embed-Multilingual-v2.0 (768 Dimensions)","value":"embed-multilingual-v2.0"},{"name":"Embed-Multilingual-v3.0 (1024 Dimensions)","value":"embed-multilingual-v3.0"}]}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/embeddings/EmbeddingsCohere/cohere.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/embeddings/EmbeddingsCohere/cohere.dark.svg"}},
|
|
30
30
|
{"displayName":"Embeddings AWS Bedrock","name":"embeddingsAwsBedrock","credentials":[{"name":"aws","required":true}],"group":["transform"],"version":1,"description":"Use Embeddings AWS Bedrock","defaults":{"name":"Embeddings AWS Bedrock"},"codex":{"categories":["AI"],"subcategories":{"AI":["Embeddings"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.embeddingsawsbedrock/"}]}},"inputs":[],"outputs":["ai_embedding"],"outputNames":["Embeddings"],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"=https://bedrock.{{$credentials?.region ?? \"eu-central-1\"}}.amazonaws.com"},"properties":[{"displayName":"This node must be connected to a vector store. <a data-action='openSelectiveNodeCreator' data-action-parameter-connectiontype='ai_vectorStore'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Model","name":"model","type":"options","description":"The model which will generate the completion. <a href=\"https://docs.aws.amazon.com/bedrock/latest/userguide/foundation-models.html\">Learn more</a>.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/foundation-models?byInferenceType=ON_DEMAND&byOutputModality=EMBEDDING"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"modelSummaries"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.modelName}}","description":"={{$responseItem.modelArn}}","value":"={{$responseItem.modelId}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"default":""}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/embeddings/EmbeddingsAwsBedrock/bedrock.svg"},
|
|
31
31
|
{"displayName":"Embeddings Azure OpenAI","name":"embeddingsAzureOpenAi","credentials":[{"name":"azureOpenAiApi","required":true}],"group":["transform"],"version":1,"description":"Use Embeddings Azure OpenAI","defaults":{"name":"Embeddings Azure OpenAI"},"codex":{"categories":["AI"],"subcategories":{"AI":["Embeddings"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.embeddingsazureopenai/"}]}},"inputs":[],"outputs":["ai_embedding"],"outputNames":["Embeddings"],"properties":[{"displayName":"This node must be connected to a vector store. <a data-action='openSelectiveNodeCreator' data-action-parameter-connectiontype='ai_vectorStore'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Model (Deployment) Name","name":"model","type":"string","description":"The name of the model(deployment) to use","default":""},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Batch Size","name":"batchSize","default":512,"typeOptions":{"maxValue":2048},"description":"Maximum number of documents to send in each request","type":"number"},{"displayName":"Strip New Lines","name":"stripNewLines","default":true,"description":"Whether to strip new lines from the input text","type":"boolean"},{"displayName":"Timeout","name":"timeout","default":-1,"description":"Maximum amount of time a request is allowed to take in seconds. Set to -1 for no timeout.","type":"number"},{"displayName":"Dimensions","name":"dimensions","default":1536,"description":"The number of dimensions the resulting output embeddings should have. Only supported in text-embedding-3 and later models.","type":"options","options":[{"name":"256","value":256},{"name":"512","value":512},{"name":"1024","value":1024},{"name":"1536","value":1536},{"name":"3072","value":3072}]}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/embeddings/EmbeddingsAzureOpenAi/azure.svg"},
|
|
32
|
-
{"displayName":"Embeddings Google Gemini","name":"embeddingsGoogleGemini","group":["transform"],"version":1,"description":"Use Google Gemini Embeddings","defaults":{"name":"Embeddings Google Gemini"},"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"={{ $credentials.host }}"},"credentials":[{"name":"googlePalmApi","required":true}],"codex":{"categories":["AI"],"subcategories":{"AI":["Embeddings"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.embeddingsgooglegemini/"}]}},"inputs":[],"outputs":["ai_embedding"],"outputNames":["Embeddings"],"properties":[{"displayName":"This node must be connected to a vector store. <a data-action='openSelectiveNodeCreator' data-action-parameter-connectiontype='ai_vectorStore'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Each model is using different dimensional density for embeddings. Please make sure to use the same dimensionality for your vector store. The default model is using 768-dimensional embeddings.","name":"notice","type":"notice","default":""},{"displayName":"Model","name":"modelName","type":"options","description":"The model which will generate the embeddings. <a href=\"https://developers.generativeai.google/api/rest/generativelanguage/models/list\">Learn more</a>.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/v1beta/models"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"models"}},{"type":"filter","properties":{"pass":"={{ $responseItem.name.includes('embedding') }}"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.name}}","value":"={{$responseItem.name}}","description":"={{$responseItem.description}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"default":"models/
|
|
32
|
+
{"displayName":"Embeddings Google Gemini","name":"embeddingsGoogleGemini","group":["transform"],"version":1,"description":"Use Google Gemini Embeddings","defaults":{"name":"Embeddings Google Gemini"},"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"={{ $credentials.host }}"},"credentials":[{"name":"googlePalmApi","required":true}],"codex":{"categories":["AI"],"subcategories":{"AI":["Embeddings"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.embeddingsgooglegemini/"}]}},"inputs":[],"outputs":["ai_embedding"],"outputNames":["Embeddings"],"properties":[{"displayName":"This node must be connected to a vector store. <a data-action='openSelectiveNodeCreator' data-action-parameter-connectiontype='ai_vectorStore'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Each model is using different dimensional density for embeddings. Please make sure to use the same dimensionality for your vector store. The default model is using 768-dimensional embeddings.","name":"notice","type":"notice","default":""},{"displayName":"Model","name":"modelName","type":"options","description":"The model which will generate the embeddings. <a href=\"https://developers.generativeai.google/api/rest/generativelanguage/models/list\">Learn more</a>.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/v1beta/models"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"models"}},{"type":"filter","properties":{"pass":"={{ $responseItem.name.includes('embedding') }}"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.name}}","value":"={{$responseItem.name}}","description":"={{$responseItem.description}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"default":"models/gemini-embedding-001"}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/embeddings/EmbeddingsGoogleGemini/google.svg"},
|
|
33
33
|
{"displayName":"Embeddings Google Vertex","name":"embeddingsGoogleVertex","group":["transform"],"version":1,"description":"Use Google Vertex Embeddings","defaults":{"name":"Embeddings Google Vertex"},"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"={{ $credentials.host }}"},"credentials":[{"name":"googleApi","required":true}],"codex":{"categories":["AI"],"subcategories":{"AI":["Embeddings"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.embeddingsgooglevertex/"}]}},"inputs":[],"outputs":["ai_embedding"],"outputNames":["Embeddings"],"properties":[{"displayName":"This node must be connected to a vector store. <a data-action='openSelectiveNodeCreator' data-action-parameter-connectiontype='ai_vectorStore'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Each model is using different dimensional density for embeddings. Please make sure to use the same dimensionality for your vector store. The default model is using 768-dimensional embeddings. You can find available models <a href=\"https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/text-embeddings-api\">here</a>.","name":"notice","type":"notice","default":""},{"displayName":"Project ID","name":"projectId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"description":"Select or enter your Google Cloud project ID","modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"gcpProjectsList"}},{"displayName":"ID","name":"id","type":"string"}]},{"displayName":"Model Name","name":"modelName","type":"string","description":"The model which will generate the embeddings. <a href=\"https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/text-embeddings-api\">Learn more</a>.","default":"text-embedding-005"}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/embeddings/EmbeddingsGoogleVertex/google.svg"},
|
|
34
34
|
{"displayName":"Embeddings Hugging Face Inference","name":"embeddingsHuggingFaceInference","group":["transform"],"version":1,"description":"Use HuggingFace Inference Embeddings","defaults":{"name":"Embeddings HuggingFace Inference"},"credentials":[{"name":"huggingFaceApi","required":true}],"codex":{"categories":["AI"],"subcategories":{"AI":["Embeddings"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.embeddingshuggingfaceinference/"}]}},"inputs":[],"outputs":["ai_embedding"],"outputNames":["Embeddings"],"properties":[{"displayName":"This node must be connected to a vector store. <a data-action='openSelectiveNodeCreator' data-action-parameter-connectiontype='ai_vectorStore'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Each model is using different dimensional density for embeddings. Please make sure to use the same dimensionality for your vector store. The default model is using 768-dimensional embeddings.","name":"notice","type":"notice","default":""},{"displayName":"Model Name","name":"modelName","type":"string","default":"sentence-transformers/distilbert-base-nli-mean-tokens","description":"The model name to use from HuggingFace library"},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Custom Inference Endpoint","name":"endpointUrl","default":"","description":"Custom endpoint URL","type":"string"},{"displayName":"Provider","name":"provider","type":"options","options":[{"value":"black-forest-labs","name":"black-forest-labs"},{"value":"cerebras","name":"cerebras"},{"value":"cohere","name":"cohere"},{"value":"fal-ai","name":"fal-ai"},{"value":"featherless-ai","name":"featherless-ai"},{"value":"fireworks-ai","name":"fireworks-ai"},{"value":"groq","name":"groq"},{"value":"hf-inference","name":"hf-inference"},{"value":"hyperbolic","name":"hyperbolic"},{"value":"nebius","name":"nebius"},{"value":"novita","name":"novita"},{"value":"nscale","name":"nscale"},{"value":"openai","name":"openai"},{"value":"ovhcloud","name":"ovhcloud"},{"value":"replicate","name":"replicate"},{"value":"sambanova","name":"sambanova"},{"value":"together","name":"together"},{"value":"auto","name":"auto"}],"default":"auto"}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/embeddings/EmbeddingsHuggingFaceInference/huggingface.svg"},
|
|
35
35
|
{"displayName":"Embeddings Mistral Cloud","name":"embeddingsMistralCloud","credentials":[{"name":"mistralCloudApi","required":true}],"group":["transform"],"version":1,"description":"Use Embeddings Mistral Cloud","defaults":{"name":"Embeddings Mistral Cloud"},"codex":{"categories":["AI"],"subcategories":{"AI":["Embeddings"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.embeddingsmistralcloud/"}]}},"inputs":[],"outputs":["ai_embedding"],"outputNames":["Embeddings"],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"https://api.mistral.ai/v1"},"properties":[{"displayName":"This node must be connected to a vector store. <a data-action='openSelectiveNodeCreator' data-action-parameter-connectiontype='ai_vectorStore'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Model","name":"model","type":"options","description":"The model which will compute the embeddings. <a href=\"https://docs.mistral.ai/platform/endpoints/\">Learn more</a>.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/models"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"data"}},{"type":"filter","properties":{"pass":"={{ $responseItem.id.includes('embed') }}"}},{"type":"setKeyValue","properties":{"name":"={{ $responseItem.id }}","value":"={{ $responseItem.id }}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"default":"mistral-embed"},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Batch Size","name":"batchSize","default":512,"typeOptions":{"maxValue":2048},"description":"Maximum number of documents to send in each request","type":"number"},{"displayName":"Strip New Lines","name":"stripNewLines","default":true,"description":"Whether to strip new lines from the input text","type":"boolean"}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/embeddings/EmbeddingsMistralCloud/mistral.svg"},
|
|
36
36
|
{"displayName":"Embeddings OpenAI","name":"embeddingsOpenAi","credentials":[{"name":"openAiApi","required":true}],"group":["transform"],"version":[1,1.1,1.2],"description":"Use Embeddings OpenAI","defaults":{"name":"Embeddings OpenAI"},"codex":{"categories":["AI"],"subcategories":{"AI":["Embeddings"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.embeddingsopenai/"}]}},"inputs":[],"outputs":["ai_embedding"],"outputNames":["Embeddings"],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"={{ $parameter.options?.baseURL?.split(\"/\").slice(0,-1).join(\"/\") || $credentials.url?.split(\"/\").slice(0,-1).join(\"/\") || \"https://api.openai.com\" }}"},"properties":[{"displayName":"This node must be connected to a vector store. <a data-action='openSelectiveNodeCreator' data-action-parameter-connectiontype='ai_vectorStore'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Model","name":"model","type":"options","description":"The model which will generate the embeddings. <a href=\"https://platform.openai.com/docs/models/overview\">Learn more</a>.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"={{ $parameter.options?.baseURL?.split(\"/\").slice(-1).pop() || $credentials?.url?.split(\"/\").slice(-1).pop() || \"v1\" }}/models"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"data"}},{"type":"filter","properties":{"pass":"={{\n\t\t\t\t\t\t\t\t\t($parameter.options?.baseURL && !$parameter.options?.baseURL?.startsWith('https://api.openai.com/')) ||\n\t\t\t\t\t\t\t\t\t($credentials?.url && !$credentials.url.startsWith('https://api.openai.com/')) ||\n\t\t\t\t\t\t\t\t\t$responseItem.id.includes('embed')\n\t\t\t\t\t\t\t\t}}"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.id}}","value":"={{$responseItem.id}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"default":"text-embedding-ada-002","displayOptions":{"show":{"@version":[1]}}},{"displayName":"Model","name":"model","type":"options","description":"The model which will generate the embeddings. <a href=\"https://platform.openai.com/docs/models/overview\">Learn more</a>.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"={{ $parameter.options?.baseURL?.split(\"/\").slice(-1).pop() || $credentials?.url?.split(\"/\").slice(-1).pop() || \"v1\" }}/models"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"data"}},{"type":"filter","properties":{"pass":"={{\n\t\t\t\t\t\t\t\t\t($parameter.options?.baseURL && !$parameter.options?.baseURL?.startsWith('https://api.openai.com/')) ||\n\t\t\t\t\t\t\t\t\t($credentials?.url && !$credentials.url.startsWith('https://api.openai.com/')) ||\n\t\t\t\t\t\t\t\t\t$responseItem.id.includes('embed')\n\t\t\t\t\t\t\t\t}}"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.id}}","value":"={{$responseItem.id}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"default":"text-embedding-3-small","displayOptions":{"hide":{"@version":[1]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Dimensions","name":"dimensions","default":1536,"description":"The number of dimensions the resulting output embeddings should have. Only supported in text-embedding-3 and later models.","type":"options","options":[{"name":"256","value":256},{"name":"512","value":512},{"name":"1024","value":1024},{"name":"1536","value":1536},{"name":"3072","value":3072}]},{"displayName":"Base URL","name":"baseURL","default":"https://api.openai.com/v1","description":"Override the default base URL for the API","type":"string","displayOptions":{"hide":{"@version":[{"_cnd":{"gte":1.2}}]}}},{"displayName":"Batch Size","name":"batchSize","default":512,"typeOptions":{"maxValue":2048},"description":"Maximum number of documents to send in each request","type":"number"},{"displayName":"Strip New Lines","name":"stripNewLines","default":true,"description":"Whether to strip new lines from the input text","type":"boolean"},{"displayName":"Timeout","name":"timeout","default":-1,"description":"Maximum amount of time a request is allowed to take in seconds. Set to -1 for no timeout.","type":"number"},{"displayName":"Encoding Format","name":"encodingFormat","type":"options","description":"The format to return the embeddings in","default":"float","options":[{"name":"Float","value":"float"},{"name":"Base64","value":"base64"}]}]}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/embeddings/EmbeddingsOpenAI/openAiLight.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/embeddings/EmbeddingsOpenAI/openAiLight.dark.svg"}},
|
|
37
37
|
{"displayName":"Embeddings Lemonade","name":"embeddingsLemonade","group":["transform"],"version":1,"description":"Use Lemonade Embeddings","defaults":{"name":"Embeddings Lemonade"},"credentials":[{"name":"lemonadeApi","required":true}],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"={{ $credentials.baseUrl.replace(new RegExp(\"/$\"), \"\") }}"},"codex":{"categories":["AI"],"subcategories":{"AI":["Embeddings"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.embeddingslemonade/"}]}},"inputs":[],"outputs":["ai_embedding"],"outputNames":["Embeddings"],"properties":[{"displayName":"This node must be connected to a vector store. <a data-action='openSelectiveNodeCreator' data-action-parameter-connectiontype='ai_vectorStore'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Model","name":"model","type":"options","default":"","description":"The model which will generate the completion. Models are loaded and managed through the Lemonade server.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/models"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"data"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.id}}","value":"={{$responseItem.id}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"required":true}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/embeddings/EmbeddingsLemonade/lemonade.svg"},
|
|
38
38
|
{"displayName":"Embeddings Ollama","name":"embeddingsOllama","group":["transform"],"version":1,"description":"Use Ollama Embeddings","defaults":{"name":"Embeddings Ollama"},"credentials":[{"name":"ollamaApi","required":true}],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"={{ $credentials.baseUrl.replace(new RegExp(\"/$\"), \"\") }}"},"codex":{"categories":["AI"],"subcategories":{"AI":["Embeddings"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.embeddingsollama/"}]}},"inputs":[],"outputs":["ai_embedding"],"outputNames":["Embeddings"],"properties":[{"displayName":"This node must be connected to a vector store. <a data-action='openSelectiveNodeCreator' data-action-parameter-connectiontype='ai_vectorStore'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Model","name":"model","type":"options","default":"llama3.2","description":"The model which will generate the completion. To download models, visit <a href=\"https://ollama.ai/library\">Ollama Models Library</a>.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/api/tags"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"models"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.name}}","value":"={{$responseItem.name}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"required":true}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/embeddings/EmbeddingsOllama/ollama.svg"},
|
|
39
|
-
{"displayName":"Alibaba Cloud Chat Model","name":"lmChatAlibabaCloud","group":["transform"],"version":[1],"description":"For advanced usage with an AI chain","defaults":{"name":"Alibaba Cloud Chat Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Chat Models (Recommended)"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatalibabacloud/"}]},"alias":["qwen","dashscope","alibaba","model studio"]},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"alibabaCloudApi","required":true}],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"={{ $credentials?.url + \"/compatible-mode/v1\" }}"},"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"If using JSON response format, you must include word \"json\" in the prompt in your chain or agent.","name":"notice","type":"notice","default":"","displayOptions":{"show":{"/options.responseFormat":["json_object"]}}},{"displayName":"Model","name":"model","type":"options","description":"The model which will generate the completion. <a href=\"https://www.alibabacloud.com/help/en/model-studio/getting-started/models\">Learn more</a>.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/models"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"data"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.id}}","value":"={{$responseItem.id}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"default":"qwen-plus"},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Frequency Penalty","name":"frequencyPenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim","type":"number"},{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":-1,"description":"The maximum number of tokens to generate in the completion. The limit depends on the selected model.","type":"number"},{"displayName":"Response Format","name":"responseFormat","default":"text","type":"options","options":[{"name":"Text","value":"text","description":"Regular text response"},{"name":"JSON","value":"json_object","description":"Enables JSON mode, which should guarantee the message the model generates is valid JSON"}]},{"displayName":"Presence Penalty","name":"presencePenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics","type":"number"},{"displayName":"Sampling Temperature","name":"temperature","default":0.7,"typeOptions":{"maxValue":2,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"},{"displayName":"Timeout","name":"timeout","default":360000,"description":"Maximum amount of time a request is allowed to take in milliseconds","type":"number"},{"displayName":"Max Retries","name":"maxRetries","default":2,"description":"Maximum number of retries to attempt","type":"number"},{"displayName":"Top P","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatAlibabaCloud/alibaba.svg"},
|
|
40
|
-
{"displayName":"Anthropic Chat Model","name":"lmChatAnthropic","group":["transform"],"version":[1,1.1,1.2,1.3],"defaultVersion":1.
|
|
39
|
+
{"displayName":"Alibaba Cloud Chat Model","name":"lmChatAlibabaCloud","group":["transform"],"version":[1],"description":"For advanced usage with an AI chain","defaults":{"name":"Alibaba Cloud Chat Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Chat Models (Recommended)"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatalibabacloud/"}]},"alias":["qwen","dashscope","alibaba","model studio"]},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"alibabaCloudApi","required":true}],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"={{ $credentials?.url + \"/compatible-mode/v1\" }}"},"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"If using JSON response format, you must include word \"json\" in the prompt in your chain or agent.","name":"notice","type":"notice","default":"","displayOptions":{"show":{"/options.responseFormat":["json_object"]}}},{"displayName":"Model","name":"model","type":"options","description":"The model which will generate the completion. <a href=\"https://www.alibabacloud.com/help/en/model-studio/getting-started/models\">Learn more</a>.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/models"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"data"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.id}}","value":"={{$responseItem.id}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"default":"qwen-plus","builderHint":{"message":"Default to the latest Qwen flagship (qwen3.6-max-preview or qwen3.6-plus). Use qwen-plus for cost-efficient builds. Avoid qwen-turbo, Qwen 3.5 and earlier, and older dated snapshots."}},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Frequency Penalty","name":"frequencyPenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim","type":"number"},{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":-1,"description":"The maximum number of tokens to generate in the completion. The limit depends on the selected model.","type":"number"},{"displayName":"Response Format","name":"responseFormat","default":"text","type":"options","options":[{"name":"Text","value":"text","description":"Regular text response"},{"name":"JSON","value":"json_object","description":"Enables JSON mode, which should guarantee the message the model generates is valid JSON"}]},{"displayName":"Presence Penalty","name":"presencePenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics","type":"number"},{"displayName":"Sampling Temperature","name":"temperature","default":0.7,"typeOptions":{"maxValue":2,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"},{"displayName":"Timeout","name":"timeout","default":360000,"description":"Maximum amount of time a request is allowed to take in milliseconds","type":"number"},{"displayName":"Max Retries","name":"maxRetries","default":2,"description":"Maximum number of retries to attempt","type":"number"},{"displayName":"Top P","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatAlibabaCloud/alibaba.svg"},
|
|
40
|
+
{"displayName":"Anthropic Chat Model","name":"lmChatAnthropic","group":["transform"],"version":[1,1.1,1.2,1.3,1.4],"defaultVersion":1.4,"description":"Language Model Anthropic","defaults":{"name":"Anthropic Chat Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Chat Models (Recommended)"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatanthropic/"}]},"alias":["claude","sonnet","opus"]},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"anthropicApi","required":true}],"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Model","name":"model","type":"options","options":[{"name":"Claude 3.5 Sonnet(20241022)","value":"claude-3-5-sonnet-20241022"},{"name":"Claude 3 Opus(20240229)","value":"claude-3-opus-20240229"},{"name":"Claude 3.5 Sonnet(20240620)","value":"claude-3-5-sonnet-20240620"},{"name":"Claude 3 Sonnet(20240229)","value":"claude-3-sonnet-20240229"},{"name":"Claude 3.5 Haiku(20241022)","value":"claude-3-5-haiku-20241022"},{"name":"Claude 3 Haiku(20240307)","value":"claude-3-haiku-20240307"},{"name":"LEGACY: Claude 2","value":"claude-2"},{"name":"LEGACY: Claude 2.1","value":"claude-2.1"},{"name":"LEGACY: Claude Instant 1.2","value":"claude-instant-1.2"},{"name":"LEGACY: Claude Instant 1","value":"claude-instant-1"}],"description":"The model which will generate the completion. <a href=\"https://docs.anthropic.com/claude/docs/models-overview\">Learn more</a>.","default":"claude-2","builderHint":{"message":"Default to claude-sonnet-4-6 (latest Sonnet); use claude-opus-4-7 when the user needs the most capable model. Never use Claude Sonnet 4.5, Claude 3.x, Claude 2, or LEGACY options — those are superseded and are not valid choices."},"displayOptions":{"show":{"@version":[1]}}},{"displayName":"Model","name":"model","type":"options","options":[{"name":"Claude 3.5 Sonnet(20241022)","value":"claude-3-5-sonnet-20241022"},{"name":"Claude 3 Opus(20240229)","value":"claude-3-opus-20240229"},{"name":"Claude 3.5 Sonnet(20240620)","value":"claude-3-5-sonnet-20240620"},{"name":"Claude 3 Sonnet(20240229)","value":"claude-3-sonnet-20240229"},{"name":"Claude 3.5 Haiku(20241022)","value":"claude-3-5-haiku-20241022"},{"name":"Claude 3 Haiku(20240307)","value":"claude-3-haiku-20240307"},{"name":"LEGACY: Claude 2","value":"claude-2"},{"name":"LEGACY: Claude 2.1","value":"claude-2.1"},{"name":"LEGACY: Claude Instant 1.2","value":"claude-instant-1.2"},{"name":"LEGACY: Claude Instant 1","value":"claude-instant-1"}],"description":"The model which will generate the completion. <a href=\"https://docs.anthropic.com/claude/docs/models-overview\">Learn more</a>.","default":"claude-3-sonnet-20240229","builderHint":{"message":"Default to claude-sonnet-4-6 (latest Sonnet); use claude-opus-4-7 when the user needs the most capable model. Never use Claude Sonnet 4.5, Claude 3.x, Claude 2, or LEGACY options — those are superseded and are not valid choices."},"displayOptions":{"show":{"@version":[1.1]}}},{"displayName":"Model","name":"model","type":"options","options":[{"name":"Claude 3.5 Sonnet(20241022)","value":"claude-3-5-sonnet-20241022"},{"name":"Claude 3 Opus(20240229)","value":"claude-3-opus-20240229"},{"name":"Claude 3.5 Sonnet(20240620)","value":"claude-3-5-sonnet-20240620"},{"name":"Claude 3 Sonnet(20240229)","value":"claude-3-sonnet-20240229"},{"name":"Claude 3.5 Haiku(20241022)","value":"claude-3-5-haiku-20241022"},{"name":"Claude 3 Haiku(20240307)","value":"claude-3-haiku-20240307"}],"description":"The model which will generate the completion. <a href=\"https://docs.anthropic.com/claude/docs/models-overview\">Learn more</a>.","default":"claude-3-5-sonnet-20240620","builderHint":{"message":"Default to claude-sonnet-4-6 (latest Sonnet); use claude-opus-4-7 when the user needs the most capable model. Never use Claude Sonnet 4.5, Claude 3.x, Claude 2, or LEGACY options — those are superseded and are not valid choices."},"displayOptions":{"show":{"@version":[{"_cnd":{"lte":1.2}}]}}},{"displayName":"Model","name":"model","type":"resourceLocator","default":{"mode":"list","value":"claude-sonnet-4-5-20250929","cachedResultName":"Claude Sonnet 4.5"},"builderHint":{"message":"Default to claude-sonnet-4-6 (latest Sonnet); use claude-opus-4-7 when the user needs the most capable model. Never use Claude Sonnet 4.5, Claude 3.x, Claude 2, or LEGACY options — those are superseded and are not valid choices."},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","placeholder":"Select a model...","typeOptions":{"searchListMethod":"searchModels","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"Claude Sonnet"}],"description":"The model. Choose from the list, or specify an ID. <a href=\"https://docs.anthropic.com/claude/docs/models-overview\">Learn more</a>.","displayOptions":{"show":{"@version":[1.3]}}},{"displayName":"Model","name":"model","type":"resourceLocator","default":{"mode":"list","value":"claude-sonnet-4-6","cachedResultName":"Claude Sonnet 4.6"},"builderHint":{"message":"Default to claude-sonnet-4-6 (latest Sonnet); use claude-opus-4-7 when the user needs the most capable model. Never use Claude Sonnet 4.5, Claude 3.x, Claude 2, or LEGACY options — those are superseded and are not valid choices."},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","placeholder":"Select a model...","typeOptions":{"searchListMethod":"searchModels","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"Claude Sonnet"}],"description":"The model. Choose from the list, or specify an ID. <a href=\"https://docs.anthropic.com/claude/docs/models-overview\">Learn more</a>.","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.4}}]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Maximum Number of Tokens","name":"maxTokensToSample","default":4096,"description":"The maximum number of tokens to generate in the completion","type":"number"},{"displayName":"Sampling Temperature","name":"temperature","default":0.7,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number","displayOptions":{"hide":{"thinking":[true]}}},{"displayName":"Top K","name":"topK","default":-1,"typeOptions":{"maxValue":1,"minValue":-1,"numberPrecision":1},"description":"Used to remove \"long tail\" low probability responses. Defaults to -1, which disables it.","type":"number","displayOptions":{"hide":{"thinking":[true]}}},{"displayName":"Top P","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number","displayOptions":{"hide":{"thinking":[true]}}},{"displayName":"Enable Thinking","name":"thinking","type":"boolean","default":false,"description":"Whether to enable thinking mode for the model"},{"displayName":"Thinking Budget (Tokens)","name":"thinkingBudget","type":"number","default":1024,"description":"The maximum number of tokens to use for thinking","displayOptions":{"show":{"thinking":[true]}}}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LMChatAnthropic/anthropic.svg"},
|
|
41
41
|
{"displayName":"Azure OpenAI Chat Model","name":"lmChatAzureOpenAi","group":["transform"],"version":1,"description":"For advanced usage with an AI chain","defaults":{"name":"Azure OpenAI Chat Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Chat Models (Recommended)"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatazureopenai/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"azureOpenAiApi","required":true,"displayOptions":{"show":{"authentication":["azureOpenAiApi"]}}},{"name":"azureEntraCognitiveServicesOAuth2Api","required":true,"displayOptions":{"show":{"authentication":["azureEntraCognitiveServicesOAuth2Api"]}}}],"properties":[{"displayName":"Authentication","name":"authentication","type":"options","default":"azureOpenAiApi","options":[{"name":"API Key","value":"azureOpenAiApi"},{"name":"Azure Entra ID (OAuth2)","value":"azureEntraCognitiveServicesOAuth2Api"}]},{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"If using JSON response format, you must include word \"json\" in the prompt in your chain or agent. Also, make sure to select latest models released post November 2023.","name":"notice","type":"notice","default":"","displayOptions":{"show":{"/options.responseFormat":["json_object"]}}},{"displayName":"Model (Deployment) Name","name":"model","type":"string","description":"The name of the model(deployment) to use (e.g., gpt-4, gpt-35-turbo)","required":true,"default":""},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Frequency Penalty","name":"frequencyPenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim","type":"number"},{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":-1,"description":"The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768). Use -1 for default.","type":"number","typeOptions":{"maxValue":128000}},{"displayName":"Response Format","name":"responseFormat","default":"text","type":"options","options":[{"name":"Text","value":"text","description":"Regular text response"},{"name":"JSON","value":"json_object","description":"Enables JSON mode, which should guarantee the message the model generates is valid JSON"}]},{"displayName":"Presence Penalty","name":"presencePenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics","type":"number"},{"displayName":"Sampling Temperature","name":"temperature","default":0.7,"typeOptions":{"maxValue":2,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"},{"displayName":"Timeout (Ms)","name":"timeout","default":60000,"description":"Maximum amount of time a request is allowed to take in milliseconds","type":"number"},{"displayName":"Max Retries","name":"maxRetries","default":2,"description":"Maximum number of retries to attempt on failure","type":"number"},{"displayName":"Top P","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatAzureOpenAi/azure.svg"},
|
|
42
|
-
{"displayName":"AWS Bedrock Chat Model","name":"lmChatAwsBedrock","group":["transform"],"version":[1,1.1],"description":"Language Model AWS Bedrock","defaults":{"name":"AWS Bedrock Chat Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Chat Models (Recommended)"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatawsbedrock/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"aws","required":true}],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"=https://bedrock.{{$credentials?.region ?? \"eu-central-1\"}}.amazonaws.com"},"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Model Source","name":"modelSource","type":"options","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.1}}]}},"options":[{"name":"On-Demand Models","value":"onDemand","description":"Standard foundation models with on-demand pricing"},{"name":"Inference Profiles","value":"inferenceProfile","description":"Cross-region inference profiles (required for models like Claude Sonnet 4 and others)"}],"default":"onDemand","description":"Choose between on-demand foundation models or inference profiles"},{"displayName":"Model","name":"model","type":"options","allowArbitraryValues":true,"description":"The model which will generate the completion. <a href=\"https://docs.aws.amazon.com/bedrock/latest/userguide/foundation-models.html\">Learn more</a>.","displayOptions":{"hide":{"modelSource":["inferenceProfile"]}},"typeOptions":{"loadOptionsDependsOn":["modelSource"],"loadOptions":{"routing":{"request":{"method":"GET","url":"/foundation-models?&byOutputModality=TEXT&byInferenceType=ON_DEMAND"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"modelSummaries"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.modelName}}","description":"={{$responseItem.modelArn}}","value":"={{$responseItem.modelId}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"default":""},{"displayName":"Model","name":"model","type":"options","allowArbitraryValues":true,"description":"The inference profile which will generate the completion. <a href=\"https://docs.aws.amazon.com/bedrock/latest/userguide/inference-profiles-use.html\">Learn more</a>.","displayOptions":{"show":{"modelSource":["inferenceProfile"]}},"typeOptions":{"loadOptionsDependsOn":["modelSource"],"loadOptions":{"routing":{"request":{"method":"GET","url":"/inference-profiles?maxResults=1000"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"inferenceProfileSummaries"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.inferenceProfileName}}","description":"={{$responseItem.description || $responseItem.inferenceProfileArn}}","value":"={{$responseItem.inferenceProfileId}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"default":""},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Maximum Number of Tokens","name":"maxTokensToSample","default":2000,"description":"The maximum number of tokens to generate in the completion","type":"number"},{"displayName":"Sampling Temperature","name":"temperature","default":0.7,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatAwsBedrock/bedrock.svg"},
|
|
43
|
-
{"displayName":"Cohere Chat Model","name":"lmChatCohere","group":["transform"],"version":[1],"description":"For advanced usage with an AI chain","defaults":{"name":"Cohere Chat Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Chat Models (Recommended)"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatcohere/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"cohereApi","required":true}],"requestDefaults":{"baseURL":"={{$credentials?.url}}","headers":{"accept":"application/json","authorization":"=Bearer {{$credentials?.apiKey}}"}},"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Model","name":"model","type":"options","description":"The model which will generate the completion. <a href=\"https://docs.cohere.com/docs/models\">Learn more</a>.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/v1/models?page_size=100&endpoint=chat"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"models"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.name}}","value":"={{$responseItem.name}}","description":"={{$responseItem.description}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"default":"command-a-03-2025"},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Sampling Temperature","name":"temperature","default":0.7,"typeOptions":{"maxValue":2,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"},{"displayName":"Max Retries","name":"maxRetries","default":2,"description":"Maximum number of retries to attempt","type":"number"}]}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatCohere/cohere.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatCohere/cohere.dark.svg"}},
|
|
44
|
-
{"displayName":"DeepSeek Chat Model","name":"lmChatDeepSeek","group":["transform"],"version":[1],"description":"For advanced usage with an AI chain","defaults":{"name":"DeepSeek Chat Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Chat Models (Recommended)"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatdeepseek/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"deepSeekApi","required":true}],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"={{ $credentials?.url }}"},"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"If using JSON response format, you must include word \"json\" in the prompt in your chain or agent. Also, make sure to select latest models released post November 2023.","name":"notice","type":"notice","default":"","displayOptions":{"show":{"/options.responseFormat":["json_object"]}}},{"displayName":"Model","name":"model","type":"options","description":"The model which will generate the completion. <a href=\"https://api-docs.deepseek.com/quick_start/pricing\">Learn more</a>.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/models"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"data"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.id}}","value":"={{$responseItem.id}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"default":"deepseek-chat"},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Frequency Penalty","name":"frequencyPenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim","type":"number"},{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":-1,"description":"The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).","type":"number","typeOptions":{"maxValue":32768}},{"displayName":"Response Format","name":"responseFormat","default":"text","type":"options","options":[{"name":"Text","value":"text","description":"Regular text response"},{"name":"JSON","value":"json_object","description":"Enables JSON mode, which should guarantee the message the model generates is valid JSON"}]},{"displayName":"Presence Penalty","name":"presencePenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics","type":"number"},{"displayName":"Sampling Temperature","name":"temperature","default":0.7,"typeOptions":{"maxValue":2,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"},{"displayName":"Timeout","name":"timeout","default":360000,"description":"Maximum amount of time a request is allowed to take in milliseconds","type":"number"},{"displayName":"Max Retries","name":"maxRetries","default":2,"description":"Maximum number of retries to attempt","type":"number"},{"displayName":"Top P","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatDeepSeek/deepseek.svg"},
|
|
45
|
-
{"displayName":"Google Gemini Chat Model","name":"lmChatGoogleGemini","group":["transform"],"version":1,"description":"Chat Model Google Gemini","defaults":{"name":"Google Gemini Chat Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Chat Models (Recommended)"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatgooglegemini/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"googlePalmApi","required":true}],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"={{ $credentials.host }}"},"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Model","name":"modelName","type":"options","description":"The model which will generate the completion. <a href=\"https://developers.generativeai.google/api/rest/generativelanguage/models/list\">Learn more</a>.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/v1beta/models"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"models"}},{"type":"filter","properties":{"pass":"={{ !$responseItem.name.includes('embedding') }}"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.name}}","value":"={{$responseItem.name}}","description":"={{$responseItem.description}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"default":"models/gemini-2.5-flash"},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Maximum Number of Tokens","name":"maxOutputTokens","default":2048,"description":"The maximum number of tokens to generate in the completion","type":"number"},{"displayName":"Sampling Temperature","name":"temperature","default":0.4,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"},{"displayName":"Top K","name":"topK","default":32,"typeOptions":{"maxValue":40,"minValue":-1,"numberPrecision":1},"description":"Used to remove \"long tail\" low probability responses. Defaults to -1, which disables it.","type":"number"},{"displayName":"Top P","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"},{"displayName":"Safety Settings","name":"safetySettings","type":"fixedCollection","typeOptions":{"multipleValues":true},"default":{"values":{"category":"HARM_CATEGORY_HARASSMENT","threshold":"HARM_BLOCK_THRESHOLD_UNSPECIFIED"}},"placeholder":"Add Option","options":[{"name":"values","displayName":"Values","values":[{"displayName":"Safety Category","name":"category","type":"options","description":"The category of harmful content to block","default":"HARM_CATEGORY_UNSPECIFIED","options":[{"value":"HARM_CATEGORY_HARASSMENT","name":"HARM_CATEGORY_HARASSMENT","description":"Harassment content"},{"value":"HARM_CATEGORY_HATE_SPEECH","name":"HARM_CATEGORY_HATE_SPEECH","description":"Hate speech and content"},{"value":"HARM_CATEGORY_SEXUALLY_EXPLICIT","name":"HARM_CATEGORY_SEXUALLY_EXPLICIT","description":"Sexually explicit content"},{"value":"HARM_CATEGORY_DANGEROUS_CONTENT","name":"HARM_CATEGORY_DANGEROUS_CONTENT","description":"Dangerous content"}]},{"displayName":"Safety Threshold","name":"threshold","type":"options","description":"The threshold of harmful content to block","default":"HARM_BLOCK_THRESHOLD_UNSPECIFIED","options":[{"value":"HARM_BLOCK_THRESHOLD_UNSPECIFIED","name":"HARM_BLOCK_THRESHOLD_UNSPECIFIED","description":"Threshold is unspecified"},{"value":"BLOCK_LOW_AND_ABOVE","name":"BLOCK_LOW_AND_ABOVE","description":"Content with NEGLIGIBLE will be allowed"},{"value":"BLOCK_MEDIUM_AND_ABOVE","name":"BLOCK_MEDIUM_AND_ABOVE","description":"Content with NEGLIGIBLE and LOW will be allowed"},{"value":"BLOCK_ONLY_HIGH","name":"BLOCK_ONLY_HIGH","description":"Content with NEGLIGIBLE, LOW, and MEDIUM will be allowed"},{"value":"BLOCK_NONE","name":"BLOCK_NONE","description":"All content will be allowed"}]}]}]}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatGoogleGemini/google.svg"},
|
|
46
|
-
{"displayName":"Google Vertex Chat Model","name":"lmChatGoogleVertex","group":["transform"],"version":1,"description":"Chat Model Google Vertex","defaults":{"name":"Google Vertex Chat Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Chat Models (Recommended)"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatgooglevertex/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"googleApi","required":true}],"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Project ID","name":"projectId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"description":"Select or enter your Google Cloud project ID","modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"gcpProjectsList"}},{"displayName":"ID","name":"id","type":"string"}]},{"displayName":"Model Name","name":"modelName","type":"string","description":"The model which will generate the completion. <a href=\"https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models\">Learn more</a>.","default":"gemini-2.5-flash"},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Maximum Number of Tokens","name":"maxOutputTokens","default":2048,"description":"The maximum number of tokens to generate in the completion","type":"number"},{"displayName":"Sampling Temperature","name":"temperature","default":0.4,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"},{"displayName":"Top K","name":"topK","default":32,"typeOptions":{"maxValue":40,"minValue":-1,"numberPrecision":1},"description":"Used to remove \"long tail\" low probability responses. Defaults to -1, which disables it.","type":"number"},{"displayName":"Top P","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"},{"displayName":"Safety Settings","name":"safetySettings","type":"fixedCollection","typeOptions":{"multipleValues":true},"default":{"values":{"category":"HARM_CATEGORY_HARASSMENT","threshold":"HARM_BLOCK_THRESHOLD_UNSPECIFIED"}},"placeholder":"Add Option","options":[{"name":"values","displayName":"Values","values":[{"displayName":"Safety Category","name":"category","type":"options","description":"The category of harmful content to block","default":"HARM_CATEGORY_UNSPECIFIED","options":[{"value":"HARM_CATEGORY_HARASSMENT","name":"HARM_CATEGORY_HARASSMENT","description":"Harassment content"},{"value":"HARM_CATEGORY_HATE_SPEECH","name":"HARM_CATEGORY_HATE_SPEECH","description":"Hate speech and content"},{"value":"HARM_CATEGORY_SEXUALLY_EXPLICIT","name":"HARM_CATEGORY_SEXUALLY_EXPLICIT","description":"Sexually explicit content"},{"value":"HARM_CATEGORY_DANGEROUS_CONTENT","name":"HARM_CATEGORY_DANGEROUS_CONTENT","description":"Dangerous content"}]},{"displayName":"Safety Threshold","name":"threshold","type":"options","description":"The threshold of harmful content to block","default":"HARM_BLOCK_THRESHOLD_UNSPECIFIED","options":[{"value":"HARM_BLOCK_THRESHOLD_UNSPECIFIED","name":"HARM_BLOCK_THRESHOLD_UNSPECIFIED","description":"Threshold is unspecified"},{"value":"BLOCK_LOW_AND_ABOVE","name":"BLOCK_LOW_AND_ABOVE","description":"Content with NEGLIGIBLE will be allowed"},{"value":"BLOCK_MEDIUM_AND_ABOVE","name":"BLOCK_MEDIUM_AND_ABOVE","description":"Content with NEGLIGIBLE and LOW will be allowed"},{"value":"BLOCK_ONLY_HIGH","name":"BLOCK_ONLY_HIGH","description":"Content with NEGLIGIBLE, LOW, and MEDIUM will be allowed"},{"value":"BLOCK_NONE","name":"BLOCK_NONE","description":"All content will be allowed"}]}]}]},{"displayName":"Thinking Budget","name":"thinkingBudget","default":-1,"description":"Controls reasoning tokens for thinking models. Set to 0 to disable automatic thinking. Set to -1 for dynamic thinking (default).","type":"number","typeOptions":{"minValue":-1,"numberPrecision":0}}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatGoogleVertex/google.svg"},
|
|
47
|
-
{"displayName":"Groq Chat Model","name":"lmChatGroq","group":["transform"],"version":1,"description":"Language Model Groq","defaults":{"name":"Groq Chat Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Chat Models (Recommended)"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatgroq/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"groqApi","required":true}],"requestDefaults":{"baseURL":"https://api.groq.com/openai/v1"},"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Model","name":"model","type":"options","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/models"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"data"}},{"type":"filter","properties":{"pass":"={{ $responseItem.active === true && $responseItem.object === \"model\" }}"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.id}}","value":"={{$responseItem.id}}"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"description":"The model which will generate the completion. <a href=\"https://console.groq.com/docs/models\">Learn more</a>.","default":"llama3-8b-8192"},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Maximum Number of Tokens","name":"maxTokensToSample","default":4096,"description":"The maximum number of tokens to generate in the completion","type":"number"},{"displayName":"Sampling Temperature","name":"temperature","default":0.7,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatGroq/groq.svg"},
|
|
48
|
-
{"displayName":"Mistral Cloud Chat Model","name":"lmChatMistralCloud","group":["transform"],"version":1,"description":"For advanced usage with an AI chain","defaults":{"name":"Mistral Cloud Chat Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Chat Models (Recommended)"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatmistralcloud/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"mistralCloudApi","required":true}],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"https://api.mistral.ai/v1"},"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Model","name":"model","type":"options","description":"The model which will generate the completion. <a href=\"https://docs.mistral.ai/platform/endpoints/\">Learn more</a>.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/models"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"data"}},{"type":"filter","properties":{"pass":"={{ !$responseItem.id.includes('embed') }}"}},{"type":"setKeyValue","properties":{"name":"={{ $responseItem.id }}","value":"={{ $responseItem.id }}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"default":"mistral-small"},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":-1,"description":"The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).","type":"number","typeOptions":{"maxValue":32768}},{"displayName":"Sampling Temperature","name":"temperature","default":0.7,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"},{"displayName":"Max Retries","name":"maxRetries","default":2,"description":"Maximum number of retries to attempt","type":"number"},{"displayName":"Top P","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"},{"displayName":"Enable Safe Mode","name":"safeMode","default":false,"type":"boolean","description":"Whether to inject a safety prompt before all conversations"},{"displayName":"Random Seed","name":"randomSeed","type":"number","description":"The seed to use for random sampling. If set, different calls will generate deterministic results."}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatMistralCloud/mistral.svg"},
|
|
49
|
-
{"displayName":"MiniMax Chat Model","name":"lmChatMinimax","group":["transform"],"version":[1],"description":"For advanced usage with an AI chain","defaults":{"name":"MiniMax Chat Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Chat Models (Recommended)"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatminimax/"}]},"alias":["minimax"]},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"minimaxApi","required":true}],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"={{ $credentials?.url }}"},"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Model","name":"model","type":"options","description":"The model which will generate the completion. <a href=\"https://platform.minimax.io/docs/api-reference/text-openai-api\">Learn more</a>.","options":[{"name":"MiniMax-M2","value":"MiniMax-M2"},{"name":"MiniMax-M2.1","value":"MiniMax-M2.1"},{"name":"MiniMax-M2.1-Highspeed","value":"MiniMax-M2.1-highspeed"},{"name":"MiniMax-M2.5","value":"MiniMax-M2.5"},{"name":"MiniMax-M2.5-Highspeed","value":"MiniMax-M2.5-highspeed"},{"name":"MiniMax-M2.7","value":"MiniMax-M2.7"},{"name":"MiniMax-M2.7-Highspeed","value":"MiniMax-M2.7-highspeed"}],"default":"MiniMax-M2.7"},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Hide Thinking","name":"hideThinking","default":true,"type":"boolean","description":"Whether to strip chain-of-thought reasoning from the response, returning only the final answer"},{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":-1,"description":"The maximum number of tokens to generate in the completion. The limit depends on the selected model.","type":"number"},{"displayName":"Sampling Temperature","name":"temperature","default":0.7,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"},{"displayName":"Timeout","name":"timeout","default":360000,"description":"Maximum amount of time a request is allowed to take in milliseconds","type":"number"},{"displayName":"Max Retries","name":"maxRetries","default":2,"description":"Maximum number of retries to attempt","type":"number"},{"displayName":"Top P","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatMinimax/minimax.svg"},
|
|
50
|
-
{"displayName":"Moonshot Kimi Chat Model","name":"lmChatMoonshot","group":["transform"],"version":[1],"description":"For advanced usage with an AI chain","defaults":{"name":"Moonshot Kimi Chat Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Chat Models (Recommended)"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatmoonshot/"}]},"alias":["kimi","moonshot"]},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"moonshotApi","required":true}],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"={{ $credentials?.url }}"},"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"If using JSON response format, you must include word \"json\" in the prompt in your chain or agent.","name":"notice","type":"notice","default":"","displayOptions":{"show":{"/options.responseFormat":["json_object"]}}},{"displayName":"Model","name":"model","type":"options","description":"The model which will generate the completion. <a href=\"https://platform.kimi.ai/docs/api/chat\">Learn more</a>.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/models"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"data"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.id}}","value":"={{$responseItem.id}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"default":"kimi-k2.5"},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Frequency Penalty","name":"frequencyPenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim","type":"number"},{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":-1,"description":"The maximum number of tokens to generate in the completion. The limit depends on the selected model.","type":"number"},{"displayName":"Response Format","name":"responseFormat","default":"text","type":"options","options":[{"name":"Text","value":"text","description":"Regular text response"},{"name":"JSON","value":"json_object","description":"Enables JSON mode, which should guarantee the message the model generates is valid JSON"}]},{"displayName":"Presence Penalty","name":"presencePenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics","type":"number"},{"displayName":"Sampling Temperature","name":"temperature","default":0.7,"typeOptions":{"maxValue":2,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"},{"displayName":"Timeout","name":"timeout","default":360000,"description":"Maximum amount of time a request is allowed to take in milliseconds","type":"number"},{"displayName":"Max Retries","name":"maxRetries","default":2,"description":"Maximum number of retries to attempt","type":"number"},{"displayName":"Top P","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"}]}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatMoonshot/moonshot.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatMoonshot/moonshot.dark.svg"}},
|
|
42
|
+
{"displayName":"AWS Bedrock Chat Model","name":"lmChatAwsBedrock","group":["transform"],"version":[1,1.1],"description":"Language Model AWS Bedrock","defaults":{"name":"AWS Bedrock Chat Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Chat Models (Recommended)"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatawsbedrock/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"aws","required":true}],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"=https://bedrock.{{$credentials?.region ?? \"eu-central-1\"}}.amazonaws.com"},"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Model Source","name":"modelSource","type":"options","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.1}}]}},"options":[{"name":"On-Demand Models","value":"onDemand","description":"Standard foundation models with on-demand pricing"},{"name":"Inference Profiles","value":"inferenceProfile","description":"Cross-region inference profiles (required for models like Claude Sonnet 4 and others)"}],"default":"onDemand","description":"Choose between on-demand foundation models or inference profiles"},{"displayName":"Model","name":"model","type":"options","allowArbitraryValues":true,"description":"The model which will generate the completion. <a href=\"https://docs.aws.amazon.com/bedrock/latest/userguide/foundation-models.html\">Learn more</a>.","displayOptions":{"hide":{"modelSource":["inferenceProfile"]}},"typeOptions":{"loadOptionsDependsOn":["modelSource"],"loadOptions":{"routing":{"request":{"method":"GET","url":"/foundation-models?&byOutputModality=TEXT&byInferenceType=ON_DEMAND"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"modelSummaries"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.modelName}}","description":"={{$responseItem.modelArn}}","value":"={{$responseItem.modelId}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"default":"","builderHint":{"message":"Default to the latest Claude Sonnet on Bedrock (anthropic.claude-sonnet-4-6 family). For Claude Sonnet 4+, switch Model Source to Inference Profiles. Avoid claude-sonnet-4-5, claude-3.x, and non-Claude legacy models unless requested."}},{"displayName":"Model","name":"model","type":"options","allowArbitraryValues":true,"description":"The inference profile which will generate the completion. <a href=\"https://docs.aws.amazon.com/bedrock/latest/userguide/inference-profiles-use.html\">Learn more</a>.","displayOptions":{"show":{"modelSource":["inferenceProfile"]}},"typeOptions":{"loadOptionsDependsOn":["modelSource"],"loadOptions":{"routing":{"request":{"method":"GET","url":"/inference-profiles?maxResults=1000"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"inferenceProfileSummaries"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.inferenceProfileName}}","description":"={{$responseItem.description || $responseItem.inferenceProfileArn}}","value":"={{$responseItem.inferenceProfileId}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"default":"","builderHint":{"message":"Default to the latest Claude Sonnet inference profile (anthropic.claude-sonnet-4-6 family). Avoid claude-sonnet-4-5 and claude-3.x profiles unless specifically requested."}},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Maximum Number of Tokens","name":"maxTokensToSample","default":2000,"description":"The maximum number of tokens to generate in the completion","type":"number"},{"displayName":"Sampling Temperature","name":"temperature","default":0.7,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatAwsBedrock/bedrock.svg"},
|
|
43
|
+
{"displayName":"Cohere Chat Model","name":"lmChatCohere","group":["transform"],"version":[1],"description":"For advanced usage with an AI chain","defaults":{"name":"Cohere Chat Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Chat Models (Recommended)"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatcohere/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"cohereApi","required":true}],"requestDefaults":{"baseURL":"={{$credentials?.url}}","headers":{"accept":"application/json","authorization":"=Bearer {{$credentials?.apiKey}}"}},"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Model","name":"model","type":"options","description":"The model which will generate the completion. <a href=\"https://docs.cohere.com/docs/models\">Learn more</a>.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/v1/models?page_size=100&endpoint=chat"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"models"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.name}}","value":"={{$responseItem.name}}","description":"={{$responseItem.description}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"default":"command-a-03-2025","builderHint":{"message":"Default to the latest Cohere Command A model (command-a-03-2025). Avoid command-r and command-light legacy variants."}},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Sampling Temperature","name":"temperature","default":0.7,"typeOptions":{"maxValue":2,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"},{"displayName":"Max Retries","name":"maxRetries","default":2,"description":"Maximum number of retries to attempt","type":"number"}]}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatCohere/cohere.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatCohere/cohere.dark.svg"}},
|
|
44
|
+
{"displayName":"DeepSeek Chat Model","name":"lmChatDeepSeek","group":["transform"],"version":[1],"description":"For advanced usage with an AI chain","defaults":{"name":"DeepSeek Chat Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Chat Models (Recommended)"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatdeepseek/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"deepSeekApi","required":true}],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"={{ $credentials?.url }}"},"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"If using JSON response format, you must include word \"json\" in the prompt in your chain or agent. Also, make sure to select latest models released post November 2023.","name":"notice","type":"notice","default":"","displayOptions":{"show":{"/options.responseFormat":["json_object"]}}},{"displayName":"Model","name":"model","type":"options","description":"The model which will generate the completion. <a href=\"https://api-docs.deepseek.com/quick_start/pricing\">Learn more</a>.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/models"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"data"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.id}}","value":"={{$responseItem.id}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"default":"deepseek-chat","builderHint":{"message":"Default to the latest DeepSeek (deepseek-chat = V3.2 non-thinking, deepseek-reasoner = V3.2 thinking / R-series reasoning). Avoid older V3 and R1 snapshots."}},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Frequency Penalty","name":"frequencyPenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim","type":"number"},{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":-1,"description":"The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).","type":"number","typeOptions":{"maxValue":32768}},{"displayName":"Response Format","name":"responseFormat","default":"text","type":"options","options":[{"name":"Text","value":"text","description":"Regular text response"},{"name":"JSON","value":"json_object","description":"Enables JSON mode, which should guarantee the message the model generates is valid JSON"}]},{"displayName":"Presence Penalty","name":"presencePenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics","type":"number"},{"displayName":"Sampling Temperature","name":"temperature","default":0.7,"typeOptions":{"maxValue":2,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"},{"displayName":"Timeout","name":"timeout","default":360000,"description":"Maximum amount of time a request is allowed to take in milliseconds","type":"number"},{"displayName":"Max Retries","name":"maxRetries","default":2,"description":"Maximum number of retries to attempt","type":"number"},{"displayName":"Top P","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatDeepSeek/deepseek.svg"},
|
|
45
|
+
{"displayName":"Google Gemini Chat Model","name":"lmChatGoogleGemini","group":["transform"],"version":[1,1.1],"description":"Chat Model Google Gemini","defaults":{"name":"Google Gemini Chat Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Chat Models (Recommended)"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatgooglegemini/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"googlePalmApi","required":true}],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"={{ $credentials.host }}"},"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Model","name":"modelName","type":"options","description":"The model which will generate the completion. <a href=\"https://developers.generativeai.google/api/rest/generativelanguage/models/list\">Learn more</a>.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/v1beta/models"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"models"}},{"type":"filter","properties":{"pass":"={{ !$responseItem.name.includes('embedding') }}"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.name}}","value":"={{$responseItem.name}}","description":"={{$responseItem.description}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"default":"models/gemini-2.5-flash","builderHint":{"message":"Default to the latest flagship Gemini (models/gemini-3.1-pro-preview). Use models/gemini-3.1-flash-lite for cost-efficient builds. Avoid Gemini 2.x, 1.x, and earlier."},"displayOptions":{"show":{"@version":[{"_cnd":{"eq":1}}]}}},{"displayName":"Model","name":"modelName","type":"options","description":"The model which will generate the completion. <a href=\"https://developers.generativeai.google/api/rest/generativelanguage/models/list\">Learn more</a>.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/v1beta/models"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"models"}},{"type":"filter","properties":{"pass":"={{ !$responseItem.name.includes('embedding') }}"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.name}}","value":"={{$responseItem.name}}","description":"={{$responseItem.description}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"default":"models/gemini-3-flash-preview","builderHint":{"message":"Default to the latest flagship Gemini (models/gemini-3.1-pro-preview). Use models/gemini-3.1-flash-lite for cost-efficient builds. Avoid Gemini 2.x, 1.x, and earlier."},"displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.1}}]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Maximum Number of Tokens","name":"maxOutputTokens","default":2048,"description":"The maximum number of tokens to generate in the completion","type":"number"},{"displayName":"Sampling Temperature","name":"temperature","default":0.4,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"},{"displayName":"Top K","name":"topK","default":32,"typeOptions":{"maxValue":40,"minValue":-1,"numberPrecision":1},"description":"Used to remove \"long tail\" low probability responses. Defaults to -1, which disables it.","type":"number"},{"displayName":"Top P","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"},{"displayName":"Safety Settings","name":"safetySettings","type":"fixedCollection","typeOptions":{"multipleValues":true},"default":{"values":{"category":"HARM_CATEGORY_HARASSMENT","threshold":"HARM_BLOCK_THRESHOLD_UNSPECIFIED"}},"placeholder":"Add Option","options":[{"name":"values","displayName":"Values","values":[{"displayName":"Safety Category","name":"category","type":"options","description":"The category of harmful content to block","default":"HARM_CATEGORY_UNSPECIFIED","options":[{"value":"HARM_CATEGORY_HARASSMENT","name":"HARM_CATEGORY_HARASSMENT","description":"Harassment content"},{"value":"HARM_CATEGORY_HATE_SPEECH","name":"HARM_CATEGORY_HATE_SPEECH","description":"Hate speech and content"},{"value":"HARM_CATEGORY_SEXUALLY_EXPLICIT","name":"HARM_CATEGORY_SEXUALLY_EXPLICIT","description":"Sexually explicit content"},{"value":"HARM_CATEGORY_DANGEROUS_CONTENT","name":"HARM_CATEGORY_DANGEROUS_CONTENT","description":"Dangerous content"}]},{"displayName":"Safety Threshold","name":"threshold","type":"options","description":"The threshold of harmful content to block","default":"HARM_BLOCK_THRESHOLD_UNSPECIFIED","options":[{"value":"HARM_BLOCK_THRESHOLD_UNSPECIFIED","name":"HARM_BLOCK_THRESHOLD_UNSPECIFIED","description":"Threshold is unspecified"},{"value":"BLOCK_LOW_AND_ABOVE","name":"BLOCK_LOW_AND_ABOVE","description":"Content with NEGLIGIBLE will be allowed"},{"value":"BLOCK_MEDIUM_AND_ABOVE","name":"BLOCK_MEDIUM_AND_ABOVE","description":"Content with NEGLIGIBLE and LOW will be allowed"},{"value":"BLOCK_ONLY_HIGH","name":"BLOCK_ONLY_HIGH","description":"Content with NEGLIGIBLE, LOW, and MEDIUM will be allowed"},{"value":"BLOCK_NONE","name":"BLOCK_NONE","description":"All content will be allowed"}]}]}]}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatGoogleGemini/google.svg"},
|
|
46
|
+
{"displayName":"Google Vertex Chat Model","name":"lmChatGoogleVertex","group":["transform"],"version":1,"description":"Chat Model Google Vertex","defaults":{"name":"Google Vertex Chat Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Chat Models (Recommended)"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatgooglevertex/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"googleApi","required":true}],"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Project ID","name":"projectId","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"description":"Select or enter your Google Cloud project ID","modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"gcpProjectsList"}},{"displayName":"ID","name":"id","type":"string"}]},{"displayName":"Model Name","name":"modelName","type":"string","description":"The model which will generate the completion. <a href=\"https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models\">Learn more</a>.","default":"gemini-2.5-flash","builderHint":{"message":"Default to the latest flagship Gemini on Vertex (gemini-3.1-pro). Use gemini-3.1-flash-lite for cost-efficient builds. Avoid Gemini 2.x, 1.x, and earlier."}},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Maximum Number of Tokens","name":"maxOutputTokens","default":2048,"description":"The maximum number of tokens to generate in the completion","type":"number"},{"displayName":"Sampling Temperature","name":"temperature","default":0.4,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"},{"displayName":"Top K","name":"topK","default":32,"typeOptions":{"maxValue":40,"minValue":-1,"numberPrecision":1},"description":"Used to remove \"long tail\" low probability responses. Defaults to -1, which disables it.","type":"number"},{"displayName":"Top P","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"},{"displayName":"Safety Settings","name":"safetySettings","type":"fixedCollection","typeOptions":{"multipleValues":true},"default":{"values":{"category":"HARM_CATEGORY_HARASSMENT","threshold":"HARM_BLOCK_THRESHOLD_UNSPECIFIED"}},"placeholder":"Add Option","options":[{"name":"values","displayName":"Values","values":[{"displayName":"Safety Category","name":"category","type":"options","description":"The category of harmful content to block","default":"HARM_CATEGORY_UNSPECIFIED","options":[{"value":"HARM_CATEGORY_HARASSMENT","name":"HARM_CATEGORY_HARASSMENT","description":"Harassment content"},{"value":"HARM_CATEGORY_HATE_SPEECH","name":"HARM_CATEGORY_HATE_SPEECH","description":"Hate speech and content"},{"value":"HARM_CATEGORY_SEXUALLY_EXPLICIT","name":"HARM_CATEGORY_SEXUALLY_EXPLICIT","description":"Sexually explicit content"},{"value":"HARM_CATEGORY_DANGEROUS_CONTENT","name":"HARM_CATEGORY_DANGEROUS_CONTENT","description":"Dangerous content"}]},{"displayName":"Safety Threshold","name":"threshold","type":"options","description":"The threshold of harmful content to block","default":"HARM_BLOCK_THRESHOLD_UNSPECIFIED","options":[{"value":"HARM_BLOCK_THRESHOLD_UNSPECIFIED","name":"HARM_BLOCK_THRESHOLD_UNSPECIFIED","description":"Threshold is unspecified"},{"value":"BLOCK_LOW_AND_ABOVE","name":"BLOCK_LOW_AND_ABOVE","description":"Content with NEGLIGIBLE will be allowed"},{"value":"BLOCK_MEDIUM_AND_ABOVE","name":"BLOCK_MEDIUM_AND_ABOVE","description":"Content with NEGLIGIBLE and LOW will be allowed"},{"value":"BLOCK_ONLY_HIGH","name":"BLOCK_ONLY_HIGH","description":"Content with NEGLIGIBLE, LOW, and MEDIUM will be allowed"},{"value":"BLOCK_NONE","name":"BLOCK_NONE","description":"All content will be allowed"}]}]}]},{"displayName":"Thinking Budget","name":"thinkingBudget","default":-1,"description":"Controls reasoning tokens for thinking models. Set to 0 to disable automatic thinking. Set to -1 for dynamic thinking (default).","type":"number","typeOptions":{"minValue":-1,"numberPrecision":0}}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatGoogleVertex/google.svg"},
|
|
47
|
+
{"displayName":"Groq Chat Model","name":"lmChatGroq","group":["transform"],"version":1,"description":"Language Model Groq","defaults":{"name":"Groq Chat Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Chat Models (Recommended)"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatgroq/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"groqApi","required":true}],"requestDefaults":{"baseURL":"https://api.groq.com/openai/v1"},"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Model","name":"model","type":"options","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/models"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"data"}},{"type":"filter","properties":{"pass":"={{ $responseItem.active === true && $responseItem.object === \"model\" }}"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.id}}","value":"={{$responseItem.id}}"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"description":"The model which will generate the completion. <a href=\"https://console.groq.com/docs/models\">Learn more</a>.","default":"llama3-8b-8192","builderHint":{"message":"Default to a flagship model on Groq (openai/gpt-oss-120b, llama-3.3-70b-versatile, or moonshotai/kimi-k2-instruct-0905). Avoid the legacy llama3-8b-8192 default and older llama3/llama-2 variants."}},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Maximum Number of Tokens","name":"maxTokensToSample","default":4096,"description":"The maximum number of tokens to generate in the completion","type":"number"},{"displayName":"Sampling Temperature","name":"temperature","default":0.7,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatGroq/groq.svg"},
|
|
48
|
+
{"displayName":"Mistral Cloud Chat Model","name":"lmChatMistralCloud","group":["transform"],"version":1,"description":"For advanced usage with an AI chain","defaults":{"name":"Mistral Cloud Chat Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Chat Models (Recommended)"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatmistralcloud/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"mistralCloudApi","required":true}],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"https://api.mistral.ai/v1"},"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Model","name":"model","type":"options","description":"The model which will generate the completion. <a href=\"https://docs.mistral.ai/platform/endpoints/\">Learn more</a>.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/models"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"data"}},{"type":"filter","properties":{"pass":"={{ !$responseItem.id.includes('embed') }}"}},{"type":"setKeyValue","properties":{"name":"={{ $responseItem.id }}","value":"={{ $responseItem.id }}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"default":"mistral-small","builderHint":{"message":"Default to the latest flagship Mistral (mistral-large-2512, aka Mistral Large 3). Use mistral-small for cost-efficient builds. Avoid older dated snapshots and Medium/Small 2.x."}},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":-1,"description":"The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).","type":"number","typeOptions":{"maxValue":32768}},{"displayName":"Sampling Temperature","name":"temperature","default":0.7,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"},{"displayName":"Max Retries","name":"maxRetries","default":2,"description":"Maximum number of retries to attempt","type":"number"},{"displayName":"Top P","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"},{"displayName":"Enable Safe Mode","name":"safeMode","default":false,"type":"boolean","description":"Whether to inject a safety prompt before all conversations"},{"displayName":"Random Seed","name":"randomSeed","type":"number","description":"The seed to use for random sampling. If set, different calls will generate deterministic results."}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatMistralCloud/mistral.svg"},
|
|
49
|
+
{"displayName":"MiniMax Chat Model","name":"lmChatMinimax","group":["transform"],"version":[1],"description":"For advanced usage with an AI chain","defaults":{"name":"MiniMax Chat Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Chat Models (Recommended)"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatminimax/"}]},"alias":["minimax"]},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"minimaxApi","required":true}],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"={{ $credentials?.url }}"},"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Model","name":"model","type":"options","description":"The model which will generate the completion. <a href=\"https://platform.minimax.io/docs/api-reference/text-openai-api\">Learn more</a>.","options":[{"name":"MiniMax-M2","value":"MiniMax-M2"},{"name":"MiniMax-M2.1","value":"MiniMax-M2.1"},{"name":"MiniMax-M2.1-Highspeed","value":"MiniMax-M2.1-highspeed"},{"name":"MiniMax-M2.5","value":"MiniMax-M2.5"},{"name":"MiniMax-M2.5-Highspeed","value":"MiniMax-M2.5-highspeed"},{"name":"MiniMax-M2.7","value":"MiniMax-M2.7"},{"name":"MiniMax-M2.7-Highspeed","value":"MiniMax-M2.7-highspeed"}],"default":"MiniMax-M2.7","builderHint":{"message":"Default to the latest MiniMax-M2.x flagship (MiniMax-M2.7). Avoid MiniMax-M2 and earlier."}},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Hide Thinking","name":"hideThinking","default":true,"type":"boolean","description":"Whether to strip chain-of-thought reasoning from the response, returning only the final answer"},{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":-1,"description":"The maximum number of tokens to generate in the completion. The limit depends on the selected model.","type":"number"},{"displayName":"Sampling Temperature","name":"temperature","default":0.7,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"},{"displayName":"Timeout","name":"timeout","default":360000,"description":"Maximum amount of time a request is allowed to take in milliseconds","type":"number"},{"displayName":"Max Retries","name":"maxRetries","default":2,"description":"Maximum number of retries to attempt","type":"number"},{"displayName":"Top P","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatMinimax/minimax.svg"},
|
|
50
|
+
{"displayName":"Moonshot Kimi Chat Model","name":"lmChatMoonshot","group":["transform"],"version":[1,1.1],"description":"For advanced usage with an AI chain","defaults":{"name":"Moonshot Kimi Chat Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Chat Models (Recommended)"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatmoonshot/"}]},"alias":["kimi","moonshot"]},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"moonshotApi","required":true}],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"={{ $credentials?.url }}"},"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"If using JSON response format, you must include word \"json\" in the prompt in your chain or agent.","name":"notice","type":"notice","default":"","displayOptions":{"show":{"/options.responseFormat":["json_object"]}}},{"displayName":"Model","name":"model","type":"options","description":"The model which will generate the completion. <a href=\"https://platform.kimi.ai/docs/api/chat\">Learn more</a>.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/models"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"data"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.id}}","value":"={{$responseItem.id}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"default":"kimi-k2.5","builderHint":{"message":"Default to the latest Kimi model (kimi-k2.6). Avoid kimi-k2.5, kimi-k2, kimi-k1, and earlier."},"displayOptions":{"show":{"@version":[1]}}},{"displayName":"Model","name":"model","type":"options","description":"The model which will generate the completion. <a href=\"https://platform.kimi.ai/docs/api/chat\">Learn more</a>.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/models"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"data"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.id}}","value":"={{$responseItem.id}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"default":"kimi-k2.6","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.1}}]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Frequency Penalty","name":"frequencyPenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim","type":"number"},{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":-1,"description":"The maximum number of tokens to generate in the completion. The limit depends on the selected model.","type":"number"},{"displayName":"Response Format","name":"responseFormat","default":"text","type":"options","options":[{"name":"Text","value":"text","description":"Regular text response"},{"name":"JSON","value":"json_object","description":"Enables JSON mode, which should guarantee the message the model generates is valid JSON"}]},{"displayName":"Presence Penalty","name":"presencePenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics","type":"number"},{"displayName":"Sampling Temperature","name":"temperature","default":0.7,"typeOptions":{"maxValue":2,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"},{"displayName":"Timeout","name":"timeout","default":360000,"description":"Maximum amount of time a request is allowed to take in milliseconds","type":"number"},{"displayName":"Max Retries","name":"maxRetries","default":2,"description":"Maximum number of retries to attempt","type":"number"},{"displayName":"Top P","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"}]}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatMoonshot/moonshot.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatMoonshot/moonshot.dark.svg"}},
|
|
51
51
|
{"displayName":"Lemonade Chat Model","name":"lmChatLemonade","group":["transform"],"version":1,"description":"Language Model Lemonade Chat","defaults":{"name":"Lemonade Chat Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Chat Models (Recommended)"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatlemonade/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"lemonadeApi","required":true}],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"={{ $credentials.baseUrl.replace(new RegExp(\"/$\"), \"\") }}"},"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Model","name":"model","type":"options","default":"","description":"The model which will generate the completion. Models are loaded and managed through the Lemonade server.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/models"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"data"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.id}}","value":"={{$responseItem.id}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"required":true},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Sampling Temperature","name":"temperature","default":0.7,"typeOptions":{"maxValue":2,"minValue":0,"numberPrecision":1},"description":"Controls the randomness of the generated text. Lower values make the output more focused and deterministic, while higher values make it more diverse and random.","type":"number"},{"displayName":"Top P","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Chooses from the smallest possible set of tokens whose cumulative probability exceeds the probability top_p. Helps generate more human-like text by reducing repetitions.","type":"number"},{"displayName":"Frequency Penalty","name":"frequencyPenalty","type":"number","default":0,"typeOptions":{"minValue":-2,"maxValue":2,"numberPrecision":1},"description":"Adjusts the penalty for tokens that have already appeared in the generated text. Positive values discourage repetition, negative values encourage it."},{"displayName":"Presence Penalty","name":"presencePenalty","type":"number","default":0,"typeOptions":{"minValue":-2,"maxValue":2,"numberPrecision":1},"description":"Adjusts the penalty for tokens based on their presence in the generated text so far. Positive values penalize tokens that have already appeared, encouraging diversity."},{"displayName":"Max Tokens to Generate","name":"maxTokens","type":"number","default":-1,"description":"The maximum number of tokens to generate. Set to -1 for no limit. Be cautious when setting this to a large value, as it can lead to very long outputs."},{"displayName":"Stop Sequences","name":"stop","type":"string","default":"","description":"Comma-separated list of sequences where the model will stop generating text"}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LMChatLemonade/lemonade.svg"},
|
|
52
52
|
{"displayName":"Ollama Chat Model","name":"lmChatOllama","group":["transform"],"version":1,"description":"Language Model Ollama","defaults":{"name":"Ollama Chat Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Chat Models (Recommended)"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatollama/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"ollamaApi","required":true}],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"={{ $credentials.baseUrl.replace(new RegExp(\"/$\"), \"\") }}"},"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Model","name":"model","type":"options","default":"llama3.2","description":"The model which will generate the completion. To download models, visit <a href=\"https://ollama.ai/library\">Ollama Models Library</a>.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/api/tags"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"models"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.name}}","value":"={{$responseItem.name}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"required":true},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Sampling Temperature","name":"temperature","default":0.7,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls the randomness of the generated text. Lower values make the output more focused and deterministic, while higher values make it more diverse and random.","type":"number"},{"displayName":"Top K","name":"topK","default":-1,"typeOptions":{"maxValue":100,"minValue":-1,"numberPrecision":1},"description":"Limits the number of highest probability vocabulary tokens to consider at each step. A higher value increases diversity but may reduce coherence. Set to -1 to disable.","type":"number"},{"displayName":"Top P","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Chooses from the smallest possible set of tokens whose cumulative probability exceeds the probability top_p. Helps generate more human-like text by reducing repetitions.","type":"number"},{"displayName":"Frequency Penalty","name":"frequencyPenalty","type":"number","default":0,"typeOptions":{"minValue":0},"description":"Adjusts the penalty for tokens that have already appeared in the generated text. Higher values discourage repetition."},{"displayName":"Keep Alive","name":"keepAlive","type":"string","default":"5m","description":"Specifies the duration to keep the loaded model in memory after use. Useful for frequently used models. Format: 1h30m (1 hour 30 minutes)."},{"displayName":"Low VRAM Mode","name":"lowVram","type":"boolean","default":false,"description":"Whether to Activate low VRAM mode, which reduces memory usage at the cost of slower generation speed. Useful for GPUs with limited memory."},{"displayName":"Main GPU ID","name":"mainGpu","type":"number","default":0,"description":"Specifies the ID of the GPU to use for the main computation. Only change this if you have multiple GPUs."},{"displayName":"Context Batch Size","name":"numBatch","type":"number","default":512,"description":"Sets the batch size for prompt processing. Larger batch sizes may improve generation speed but increase memory usage."},{"displayName":"Context Length","name":"numCtx","type":"number","default":2048,"description":"The maximum number of tokens to use as context for generating the next token. Smaller values reduce memory usage, while larger values provide more context to the model."},{"displayName":"Number of GPUs","name":"numGpu","type":"number","default":-1,"description":"Specifies the number of GPUs to use for parallel processing. Set to -1 for auto-detection."},{"displayName":"Max Tokens to Generate","name":"numPredict","type":"number","default":-1,"description":"The maximum number of tokens to generate. Set to -1 for no limit. Be cautious when setting this to a large value, as it can lead to very long outputs."},{"displayName":"Number of CPU Threads","name":"numThread","type":"number","default":0,"description":"Specifies the number of CPU threads to use for processing. Set to 0 for auto-detection."},{"displayName":"Penalize Newlines","name":"penalizeNewline","type":"boolean","default":true,"description":"Whether the model will be less likely to generate newline characters, encouraging longer continuous sequences of text"},{"displayName":"Presence Penalty","name":"presencePenalty","type":"number","default":0,"description":"Adjusts the penalty for tokens based on their presence in the generated text so far. Positive values penalize tokens that have already appeared, encouraging diversity."},{"displayName":"Repetition Penalty","name":"repeatPenalty","type":"number","default":1,"description":"Adjusts the penalty factor for repeated tokens. Higher values more strongly discourage repetition. Set to 1.0 to disable repetition penalty."},{"displayName":"Use Memory Locking","name":"useMLock","type":"boolean","default":false,"description":"Whether to lock the model in memory to prevent swapping. This can improve performance but requires sufficient available memory."},{"displayName":"Use Memory Mapping","name":"useMMap","type":"boolean","default":true,"description":"Whether to use memory mapping for loading the model. This can reduce memory usage but may impact performance. Recommended to keep enabled."},{"displayName":"Load Vocabulary Only","name":"vocabOnly","type":"boolean","default":false,"description":"Whether to only load the model vocabulary without the weights. Useful for quickly testing tokenization."},{"displayName":"Output Format","name":"format","type":"options","options":[{"name":"Default","value":"default"},{"name":"JSON","value":"json"}],"default":"default","description":"Specifies the format of the API response"}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LMChatOllama/ollama.svg"},
|
|
53
|
-
{"displayName":"OpenRouter Chat Model","name":"lmChatOpenRouter","group":["transform"],"version":[1],"description":"For advanced usage with an AI chain","defaults":{"name":"OpenRouter Chat Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Chat Models (Recommended)"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatopenrouter/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"openRouterApi","required":true}],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"={{ $credentials?.url }}"},"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"If using JSON response format, you must include word \"json\" in the prompt in your chain or agent. Also, make sure to select latest models released post November 2023.","name":"notice","type":"notice","default":"","displayOptions":{"show":{"/options.responseFormat":["json_object"]}}},{"displayName":"Model","name":"model","type":"options","description":"The model which will generate the completion. <a href=\"https://openrouter.ai/docs/models\">Learn more</a>.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/models"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"data"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.id}}","value":"={{$responseItem.id}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"default":"openai/gpt-4.1-mini"},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Frequency Penalty","name":"frequencyPenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim","type":"number"},{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":-1,"description":"The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).","type":"number","typeOptions":{"maxValue":32768}},{"displayName":"Response Format","name":"responseFormat","default":"text","type":"options","options":[{"name":"Text","value":"text","description":"Regular text response"},{"name":"JSON","value":"json_object","description":"Enables JSON mode, which should guarantee the message the model generates is valid JSON"}]},{"displayName":"Presence Penalty","name":"presencePenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics","type":"number"},{"displayName":"Sampling Temperature","name":"temperature","default":0.7,"typeOptions":{"maxValue":2,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"},{"displayName":"Timeout","name":"timeout","default":360000,"description":"Maximum amount of time a request is allowed to take in milliseconds","type":"number"},{"displayName":"Max Retries","name":"maxRetries","default":2,"description":"Maximum number of retries to attempt","type":"number"},{"displayName":"Top P","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"}]}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatOpenRouter/openrouter.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatOpenRouter/openrouter.dark.svg"}},
|
|
54
|
-
{"displayName":"Vercel AI Gateway Chat Model","name":"lmChatVercelAiGateway","group":["transform"],"version":[1],"description":"For advanced usage with an AI chain via Vercel AI Gateway","defaults":{"name":"Vercel AI Gateway Chat Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Chat Models (Recommended)"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatvercel/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"vercelAiGatewayApi","required":true}],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"={{ $credentials?.url }}"},"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"If using JSON response format, you must include word \"json\" in the prompt in your chain or agent. Also, make sure to select latest models released post November 2023.","name":"notice","type":"notice","default":"","displayOptions":{"show":{"/options.responseFormat":["json_object"]}}},{"displayName":"Model","name":"model","type":"options","description":"The model which will generate the completion","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/models"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"data"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.id}}","value":"={{$responseItem.id}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"default":"openai/gpt-4o"},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Frequency Penalty","name":"frequencyPenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim","type":"number"},{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":-1,"description":"The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).","type":"number","typeOptions":{"maxValue":32768}},{"displayName":"Response Format","name":"responseFormat","default":"text","type":"options","options":[{"name":"Text","value":"text","description":"Regular text response"},{"name":"JSON","value":"json_object","description":"Enables JSON mode, which should guarantee the message the model generates is valid JSON"}]},{"displayName":"Presence Penalty","name":"presencePenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics","type":"number"},{"displayName":"Sampling Temperature","name":"temperature","default":0.7,"typeOptions":{"maxValue":2,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"},{"displayName":"Timeout","name":"timeout","default":360000,"description":"Maximum amount of time a request is allowed to take in milliseconds","type":"number"},{"displayName":"Max Retries","name":"maxRetries","default":2,"description":"Maximum number of retries to attempt","type":"number"},{"displayName":"Top P","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"}]}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatVercelAiGateway/vercel.dark.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatVercelAiGateway/vercel.svg"}},
|
|
55
|
-
{"displayName":"xAI Grok Chat Model","name":"lmChatXAiGrok","group":["transform"],"version":[1],"description":"For advanced usage with an AI chain","defaults":{"name":"xAI Grok Chat Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Chat Models (Recommended)"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatxaigrok/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"xAiApi","required":true}],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"={{ $credentials?.url }}"},"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"If using JSON response format, you must include word \"json\" in the prompt in your chain or agent. Also, make sure to select latest models released post November 2023.","name":"notice","type":"notice","default":"","displayOptions":{"show":{"/options.responseFormat":["json_object"]}}},{"displayName":"Model","name":"model","type":"options","description":"The model which will generate the completion. <a href=\"https://docs.x.ai/docs/models\">Learn more</a>.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/models"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"data"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.id}}","value":"={{$responseItem.id}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"default":"grok-2-vision-1212"},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Frequency Penalty","name":"frequencyPenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim","type":"number"},{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":-1,"description":"The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).","type":"number","typeOptions":{"maxValue":32768}},{"displayName":"Response Format","name":"responseFormat","default":"text","type":"options","options":[{"name":"Text","value":"text","description":"Regular text response"},{"name":"JSON","value":"json_object","description":"Enables JSON mode, which should guarantee the message the model generates is valid JSON"}]},{"displayName":"Presence Penalty","name":"presencePenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics","type":"number"},{"displayName":"Sampling Temperature","name":"temperature","default":0.7,"typeOptions":{"maxValue":2,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"},{"displayName":"Timeout","name":"timeout","default":360000,"description":"Maximum amount of time a request is allowed to take in milliseconds","type":"number"},{"displayName":"Max Retries","name":"maxRetries","default":2,"description":"Maximum number of retries to attempt","type":"number"},{"displayName":"Top P","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"}]}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatXAiGrok/logo.dark.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatXAiGrok/logo.svg"}},
|
|
56
|
-
{"displayName":"OpenAI Chat Model","name":"lmChatOpenAi","group":["transform"],"version":[1,1.1,1.2,1.3],"description":"For advanced usage with an AI chain","defaults":{"name":"OpenAI Chat Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Chat Models (Recommended)"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatopenai/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"openAiApi","required":true}],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"={{ $parameter.options?.baseURL?.split(\"/\").slice(0,-1).join(\"/\") || $credentials?.url?.split(\"/\").slice(0,-1).join(\"/\") || \"https://api.openai.com\" }}"},"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"If using JSON response format, you must include word \"json\" in the prompt in your chain or agent. Also, make sure to select latest models released post November 2023.","name":"notice","type":"notice","default":"","displayOptions":{"show":{"/options.responseFormat":["json_object"]}}},{"displayName":"If using JSON response format, you must include word \"json\" in the prompt in your chain or agent. Also, make sure to select latest models released post November 2023.","name":"notice","type":"notice","default":"","displayOptions":{"show":{"/options.textFormat.textOptions.type":["json_object"]}}},{"displayName":"Model","name":"model","type":"options","description":"The model which will generate the completion. <a href=\"https://beta.openai.com/docs/models/overview\">Learn more</a>.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"={{ $parameter.options?.baseURL?.split(\"/\").slice(-1).pop() || $credentials?.url?.split(\"/\").slice(-1).pop() || \"v1\" }}/models"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"data"}},{"type":"filter","properties":{"pass":"={{\n\t\t\t\t\t\t\t\t\t\t\t\t($parameter.options?.baseURL && !$parameter.options?.baseURL?.startsWith('https://api.openai.com/')) ||\n\t\t\t\t\t\t\t\t\t\t\t\t($credentials?.url && !$credentials.url.startsWith('https://api.openai.com/')) ||\n\t\t\t\t\t\t\t\t\t\t\t\t$responseItem.id.startsWith('ft:') ||\n\t\t\t\t\t\t\t\t\t\t\t\t$responseItem.id.startsWith('o1') ||\n\t\t\t\t\t\t\t\t\t\t\t\t$responseItem.id.startsWith('o3') ||\n\t\t\t\t\t\t\t\t\t\t\t\t($responseItem.id.startsWith('gpt-') && !$responseItem.id.includes('instruct'))\n\t\t\t\t\t\t\t\t\t\t\t}}"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.id}}","value":"={{$responseItem.id}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"default":"gpt-5-mini","builderHint":{"message":"Always default to latest mini model gpt-5-mini"},"displayOptions":{"hide":{"@version":[{"_cnd":{"gte":1.2}}]}}},{"displayName":"Model","name":"model","type":"resourceLocator","default":{"mode":"list","value":"gpt-5-mini"},"builderHint":{"message":"Always default to latest mini model gpt-5-mini"},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","placeholder":"Select a model...","typeOptions":{"searchListMethod":"searchModels","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"gpt-5-mini"}],"description":"The model. Choose from the list, or specify an ID.","displayOptions":{"hide":{"@version":[{"_cnd":{"lte":1.1}}]}}},{"displayName":"When using non-OpenAI models via \"Base URL\" override, not all models might be chat-compatible or support other features, like tools calling or JSON response format","name":"notice","type":"notice","default":"","displayOptions":{"show":{"/options.baseURL":[{"_cnd":{"exists":true}}]}}},{"displayName":"Use Responses API","name":"responsesApiEnabled","type":"boolean","default":true,"description":"Whether to use the Responses API to generate the response. <a href=\"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatopenai/#use-responses-api\">Learn more</a>.","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.3}}]}}},{"displayName":"Built-in Tools","name":"builtInTools","placeholder":"Add Built-in Tool","type":"collection","default":{},"options":[{"displayName":"Web Search","name":"webSearch","type":"collection","default":{"searchContextSize":"medium"},"options":[{"displayName":"Search Context Size","name":"searchContextSize","type":"options","default":"medium","description":"High level guidance for the amount of context window space to use for the search","options":[{"name":"Low","value":"low"},{"name":"Medium","value":"medium"},{"name":"High","value":"high"}]},{"displayName":"Web Search Allowed Domains","name":"allowedDomains","type":"string","default":"","description":"Comma-separated list of domains to search. Only domains in this list will be searched.","placeholder":"e.g. google.com, wikipedia.org"},{"displayName":"Country","name":"country","type":"string","default":"","placeholder":"e.g. US, GB"},{"displayName":"City","name":"city","type":"string","default":"","placeholder":"e.g. New York, London"},{"displayName":"Region","name":"region","type":"string","default":"","placeholder":"e.g. New York, London"}]},{"displayName":"File Search","name":"fileSearch","type":"collection","default":{"vectorStoreIds":"[]"},"options":[{"displayName":"Vector Store IDs","name":"vectorStoreIds","description":"The vector store IDs to use for the file search. Vector stores are managed via OpenAI Dashboard. <a href=\"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatopenai/#built-in-tools\">Learn more</a>.","type":"json","default":"[]","required":true},{"displayName":"Filters","name":"filters","type":"json","default":"{}"},{"displayName":"Max Results","name":"maxResults","type":"number","default":1,"typeOptions":{"minValue":1,"maxValue":50}}]},{"displayName":"Code Interpreter","name":"codeInterpreter","type":"boolean","default":true,"description":"Whether to allow the model to execute code in a sandboxed environment"}],"displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.3}}],"/responsesApiEnabled":[true]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Base URL","name":"baseURL","default":"https://api.openai.com/v1","description":"Override the default base URL for the API","type":"string","displayOptions":{"hide":{"@version":[{"_cnd":{"gte":1.1}}]}}},{"displayName":"Frequency Penalty","name":"frequencyPenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim","type":"number"},{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":-1,"description":"The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).","type":"number","typeOptions":{"maxValue":32768}},{"displayName":"Response Format","name":"responseFormat","default":"text","type":"options","options":[{"name":"Text","value":"text","description":"Regular text response"},{"name":"JSON","value":"json_object","description":"Enables JSON mode, which should guarantee the message the model generates is valid JSON"}],"displayOptions":{"show":{"@version":[{"_cnd":{"lt":1.3}}]}}},{"displayName":"Response Format","name":"responseFormat","default":"text","type":"options","options":[{"name":"Text","value":"text","description":"Regular text response"},{"name":"JSON","value":"json_object","description":"Enables JSON mode, which should guarantee the message the model generates is valid JSON"}],"displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.3}}],"/responsesApiEnabled":[false]}}},{"displayName":"Response Format","name":"textFormat","type":"fixedCollection","default":{"textOptions":[{"type":"text"}]},"options":[{"displayName":"Text","name":"textOptions","values":[{"displayName":"Type","name":"type","type":"options","default":"","options":[{"name":"Text","value":"text"},{"name":"JSON Schema (recommended)","value":"json_schema"},{"name":"JSON Object","value":"json_object"}]},{"displayName":"Verbosity","name":"verbosity","type":"options","default":"medium","options":[{"name":"Low","value":"low"},{"name":"Medium","value":"medium"},{"name":"High","value":"high"}]},{"displayName":"Name","name":"name","type":"string","default":"my_schema","description":"The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length of 64.","displayOptions":{"show":{"type":["json_schema"]}}},{"displayName":"All properties in the schema must be set to \"required\", when using \"strict\" mode.","name":"requiredNotice","type":"notice","default":"","displayOptions":{"show":{"strict":[true]}}},{"displayName":"Schema","name":"schema","type":"json","default":"{\n \"type\": \"object\",\n \"properties\": {\n \"message\": {\n \"type\": \"string\"\n }\n },\n \"additionalProperties\": false,\n \"required\": [\"message\"]\n}","description":"The schema of the response format","displayOptions":{"show":{"type":["json_schema"]}}},{"displayName":"Description","name":"description","type":"string","default":"","description":"The description of the response format","displayOptions":{"show":{"type":["json_schema"]}}},{"displayName":"Strict","name":"strict","type":"boolean","default":false,"description":"Whether to require that the AI will always generate responses that match the provided JSON Schema","displayOptions":{"show":{"type":["json_schema"]}}}]}],"displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.3}}],"/responsesApiEnabled":[true]}}},{"displayName":"Presence Penalty","name":"presencePenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics","type":"number"},{"displayName":"Sampling Temperature","name":"temperature","default":0.7,"typeOptions":{"maxValue":2,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"},{"displayName":"Reasoning Effort","name":"reasoningEffort","default":"medium","description":"Controls the amount of reasoning tokens to use. A value of \"low\" will favor speed and economical token usage, \"high\" will favor more complete reasoning at the cost of more tokens generated and slower responses.","type":"options","options":[{"name":"Low","value":"low","description":"Favors speed and economical token usage"},{"name":"Medium","value":"medium","description":"Balance between speed and reasoning accuracy"},{"name":"High","value":"high","description":"Favors more complete reasoning at the cost of more tokens generated and slower responses"}],"displayOptions":{"show":{"/model":[{"_cnd":{"regex":"(^o1([-\\d]+)?$)|(^o[3-9].*)|(^gpt-5.*)"}}]}}},{"displayName":"Timeout","name":"timeout","default":60000,"description":"Maximum amount of time a request is allowed to take in milliseconds","type":"number"},{"displayName":"Max Retries","name":"maxRetries","default":2,"description":"Maximum number of retries to attempt","type":"number"},{"displayName":"Top P","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"},{"displayName":"Conversation ID","name":"conversationId","default":"","description":"The conversation that this response belongs to. Input items and output items from this response are automatically added to this conversation after this response completes.","type":"string","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.3}}],"/responsesApiEnabled":[true]}}},{"displayName":"Prompt Cache Key","name":"promptCacheKey","type":"string","default":"","description":"Used by OpenAI to cache responses for similar requests to optimize your cache hit rates","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.3}}],"/responsesApiEnabled":[true]}}},{"displayName":"Safety Identifier","name":"safetyIdentifier","type":"string","default":"","description":"A stable identifier used to help detect users of your application that may be violating OpenAI's usage policies. The IDs should be a string that uniquely identifies each user.","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.3}}],"/responsesApiEnabled":[true]}}},{"displayName":"Service Tier","name":"serviceTier","type":"options","default":"auto","description":"The service tier to use for the request","options":[{"name":"Auto","value":"auto"},{"name":"Flex","value":"flex"},{"name":"Default","value":"default"},{"name":"Priority","value":"priority"}],"displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.3}}],"/responsesApiEnabled":[true]}}},{"displayName":"Metadata","name":"metadata","type":"json","description":"Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format, and querying for objects via API or the dashboard. Keys are strings with a maximum length of 64 characters. Values are strings with a maximum length of 512 characters.","default":"{}","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.3}}],"/responsesApiEnabled":[true]}}},{"displayName":"Top Logprobs","name":"topLogprobs","type":"number","default":0,"description":"An integer between 0 and 20 specifying the number of most likely tokens to return at each token position, each with an associated log probability","typeOptions":{"minValue":0,"maxValue":20},"displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.3}}],"/responsesApiEnabled":[true]}}},{"displayName":"Prompt","name":"promptConfig","type":"fixedCollection","description":"Configure the reusable prompt template configured via OpenAI Dashboard. <a href=\"https://platform.openai.com/docs/guides/prompt-engineering#reusable-prompts\">Learn more</a>.","default":{"promptOptions":[{"promptId":""}]},"options":[{"displayName":"Prompt","name":"promptOptions","values":[{"displayName":"Prompt ID","name":"promptId","type":"string","default":"","description":"The unique identifier of the prompt template to use"},{"displayName":"Version","name":"version","type":"string","default":"","description":"Optional version of the prompt template"},{"displayName":"Variables","name":"variables","type":"json","default":"{}","description":"Variables to be substituted into the prompt template"}]}],"displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.3}}],"/responsesApiEnabled":[true]}}}]}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LMChatOpenAi/openAiLight.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LMChatOpenAi/openAiLight.dark.svg"}},
|
|
53
|
+
{"displayName":"OpenRouter Chat Model","name":"lmChatOpenRouter","group":["transform"],"version":[1],"description":"For advanced usage with an AI chain","defaults":{"name":"OpenRouter Chat Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Chat Models (Recommended)"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatopenrouter/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"openRouterApi","required":true}],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"={{ $credentials?.url }}"},"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"If using JSON response format, you must include word \"json\" in the prompt in your chain or agent. Also, make sure to select latest models released post November 2023.","name":"notice","type":"notice","default":"","displayOptions":{"show":{"/options.responseFormat":["json_object"]}}},{"displayName":"Model","name":"model","type":"options","description":"The model which will generate the completion. <a href=\"https://openrouter.ai/docs/models\">Learn more</a>.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/models"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"data"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.id}}","value":"={{$responseItem.id}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"default":"openai/gpt-4.1-mini","builderHint":{"message":"Default to a current flagship (e.g. openai/gpt-5.4, anthropic/claude-sonnet-4.6, google/gemini-3.1-pro-preview). Avoid openai/gpt-4o, anthropic/claude-3.x, and other pre-2026 models."}},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Frequency Penalty","name":"frequencyPenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim","type":"number"},{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":-1,"description":"The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).","type":"number","typeOptions":{"maxValue":32768}},{"displayName":"Response Format","name":"responseFormat","default":"text","type":"options","options":[{"name":"Text","value":"text","description":"Regular text response"},{"name":"JSON","value":"json_object","description":"Enables JSON mode, which should guarantee the message the model generates is valid JSON"}]},{"displayName":"Presence Penalty","name":"presencePenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics","type":"number"},{"displayName":"Sampling Temperature","name":"temperature","default":0.7,"typeOptions":{"maxValue":2,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"},{"displayName":"Timeout","name":"timeout","default":360000,"description":"Maximum amount of time a request is allowed to take in milliseconds","type":"number"},{"displayName":"Max Retries","name":"maxRetries","default":2,"description":"Maximum number of retries to attempt","type":"number"},{"displayName":"Top P","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"}]}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatOpenRouter/openrouter.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatOpenRouter/openrouter.dark.svg"}},
|
|
54
|
+
{"displayName":"Vercel AI Gateway Chat Model","name":"lmChatVercelAiGateway","group":["transform"],"version":[1],"description":"For advanced usage with an AI chain via Vercel AI Gateway","defaults":{"name":"Vercel AI Gateway Chat Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Chat Models (Recommended)"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatvercel/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"vercelAiGatewayApi","required":true}],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"={{ $credentials?.url }}"},"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"If using JSON response format, you must include word \"json\" in the prompt in your chain or agent. Also, make sure to select latest models released post November 2023.","name":"notice","type":"notice","default":"","displayOptions":{"show":{"/options.responseFormat":["json_object"]}}},{"displayName":"Model","name":"model","type":"options","description":"The model which will generate the completion","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/models"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"data"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.id}}","value":"={{$responseItem.id}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"default":"openai/gpt-4o","builderHint":{"message":"Default to a current flagship (e.g. openai/gpt-5.4, anthropic/claude-sonnet-4.6, google/gemini-3.1-pro). Avoid the openai/gpt-4o default and other pre-2026 models."}},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Frequency Penalty","name":"frequencyPenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim","type":"number"},{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":-1,"description":"The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).","type":"number","typeOptions":{"maxValue":32768}},{"displayName":"Response Format","name":"responseFormat","default":"text","type":"options","options":[{"name":"Text","value":"text","description":"Regular text response"},{"name":"JSON","value":"json_object","description":"Enables JSON mode, which should guarantee the message the model generates is valid JSON"}]},{"displayName":"Presence Penalty","name":"presencePenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics","type":"number"},{"displayName":"Sampling Temperature","name":"temperature","default":0.7,"typeOptions":{"maxValue":2,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"},{"displayName":"Timeout","name":"timeout","default":360000,"description":"Maximum amount of time a request is allowed to take in milliseconds","type":"number"},{"displayName":"Max Retries","name":"maxRetries","default":2,"description":"Maximum number of retries to attempt","type":"number"},{"displayName":"Top P","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"}]}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatVercelAiGateway/vercel.dark.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatVercelAiGateway/vercel.svg"}},
|
|
55
|
+
{"displayName":"xAI Grok Chat Model","name":"lmChatXAiGrok","group":["transform"],"version":[1],"description":"For advanced usage with an AI chain","defaults":{"name":"xAI Grok Chat Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Chat Models (Recommended)"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatxaigrok/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"xAiApi","required":true}],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"={{ $credentials?.url }}"},"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"If using JSON response format, you must include word \"json\" in the prompt in your chain or agent. Also, make sure to select latest models released post November 2023.","name":"notice","type":"notice","default":"","displayOptions":{"show":{"/options.responseFormat":["json_object"]}}},{"displayName":"Model","name":"model","type":"options","description":"The model which will generate the completion. <a href=\"https://docs.x.ai/docs/models\">Learn more</a>.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/models"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"data"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.id}}","value":"={{$responseItem.id}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"default":"grok-2-vision-1212","builderHint":{"message":"Default to the latest flagship Grok (grok-4.20-0309-reasoning, or grok-4.20-multi-agent-0309 for agent workloads). Avoid grok-4, grok-2, and grok-1 variants."}},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Frequency Penalty","name":"frequencyPenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim","type":"number"},{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":-1,"description":"The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).","type":"number","typeOptions":{"maxValue":32768}},{"displayName":"Response Format","name":"responseFormat","default":"text","type":"options","options":[{"name":"Text","value":"text","description":"Regular text response"},{"name":"JSON","value":"json_object","description":"Enables JSON mode, which should guarantee the message the model generates is valid JSON"}]},{"displayName":"Presence Penalty","name":"presencePenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics","type":"number"},{"displayName":"Sampling Temperature","name":"temperature","default":0.7,"typeOptions":{"maxValue":2,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"},{"displayName":"Timeout","name":"timeout","default":360000,"description":"Maximum amount of time a request is allowed to take in milliseconds","type":"number"},{"displayName":"Max Retries","name":"maxRetries","default":2,"description":"Maximum number of retries to attempt","type":"number"},{"displayName":"Top P","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"}]}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatXAiGrok/logo.dark.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatXAiGrok/logo.svg"}},
|
|
56
|
+
{"displayName":"OpenAI Chat Model","name":"lmChatOpenAi","group":["transform"],"version":[1,1.1,1.2,1.3],"description":"For advanced usage with an AI chain","defaults":{"name":"OpenAI Chat Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Chat Models (Recommended)"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatopenai/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"openAiApi","required":true}],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"={{ $parameter.options?.baseURL?.split(\"/\").slice(0,-1).join(\"/\") || $credentials?.url?.split(\"/\").slice(0,-1).join(\"/\") || \"https://api.openai.com\" }}"},"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"If using JSON response format, you must include word \"json\" in the prompt in your chain or agent. Also, make sure to select latest models released post November 2023.","name":"notice","type":"notice","default":"","displayOptions":{"show":{"/options.responseFormat":["json_object"]}}},{"displayName":"If using JSON response format, you must include word \"json\" in the prompt in your chain or agent. Also, make sure to select latest models released post November 2023.","name":"notice","type":"notice","default":"","displayOptions":{"show":{"/options.textFormat.textOptions.type":["json_object"]}}},{"displayName":"Model","name":"model","type":"options","description":"The model which will generate the completion. <a href=\"https://beta.openai.com/docs/models/overview\">Learn more</a>.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"={{ $parameter.options?.baseURL?.split(\"/\").slice(-1).pop() || $credentials?.url?.split(\"/\").slice(-1).pop() || \"v1\" }}/models"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"data"}},{"type":"filter","properties":{"pass":"={{\n\t\t\t\t\t\t\t\t\t\t\t\t($parameter.options?.baseURL && !$parameter.options?.baseURL?.startsWith('https://api.openai.com/')) ||\n\t\t\t\t\t\t\t\t\t\t\t\t($credentials?.url && !$credentials.url.startsWith('https://api.openai.com/')) ||\n\t\t\t\t\t\t\t\t\t\t\t\t$responseItem.id.startsWith('ft:') ||\n\t\t\t\t\t\t\t\t\t\t\t\t$responseItem.id.startsWith('o1') ||\n\t\t\t\t\t\t\t\t\t\t\t\t$responseItem.id.startsWith('o3') ||\n\t\t\t\t\t\t\t\t\t\t\t\t($responseItem.id.startsWith('gpt-') && !$responseItem.id.includes('instruct'))\n\t\t\t\t\t\t\t\t\t\t\t}}"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.id}}","value":"={{$responseItem.id}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"default":"gpt-5-mini","builderHint":{"message":"Prefer the GPT-5.4 family: the flagship variant (e.g. `gpt-5.4`) for general use, a `-mini` / `-nano` variant when the task explicitly calls for cost-efficiency, or `-pro` only when the user asks for maximum capability. Never use gpt-4o, gpt-4-turbo, gpt-4, gpt-3.5, or earlier — those are superseded by the GPT-5 family and are not valid choices."},"displayOptions":{"hide":{"@version":[{"_cnd":{"gte":1.2}}]}}},{"displayName":"Model","name":"model","type":"resourceLocator","default":{"mode":"list","value":"gpt-5-mini"},"builderHint":{"message":"Prefer the GPT-5.4 family: the flagship variant (e.g. `gpt-5.4`) for general use, a `-mini` / `-nano` variant when the task explicitly calls for cost-efficiency, or `-pro` only when the user asks for maximum capability. Never use gpt-4o, gpt-4-turbo, gpt-4, gpt-3.5, or earlier — those are superseded by the GPT-5 family and are not valid choices."},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","placeholder":"Select a model...","typeOptions":{"searchListMethod":"searchModels","searchable":true}},{"displayName":"ID","name":"id","type":"string","placeholder":"gpt-5-mini"}],"description":"The model. Choose from the list, or specify an ID.","displayOptions":{"hide":{"@version":[{"_cnd":{"lte":1.1}}]}}},{"displayName":"When using non-OpenAI models via \"Base URL\" override, not all models might be chat-compatible or support other features, like tools calling or JSON response format","name":"notice","type":"notice","default":"","displayOptions":{"show":{"/options.baseURL":[{"_cnd":{"exists":true}}]}}},{"displayName":"Use Responses API","name":"responsesApiEnabled","type":"boolean","default":true,"description":"Whether to use the Responses API to generate the response. <a href=\"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatopenai/#use-responses-api\">Learn more</a>.","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.3}}]}}},{"displayName":"Built-in Tools","name":"builtInTools","placeholder":"Add Built-in Tool","type":"collection","default":{},"options":[{"displayName":"Web Search","name":"webSearch","type":"collection","default":{"searchContextSize":"medium"},"options":[{"displayName":"Search Context Size","name":"searchContextSize","type":"options","default":"medium","description":"High level guidance for the amount of context window space to use for the search","options":[{"name":"Low","value":"low"},{"name":"Medium","value":"medium"},{"name":"High","value":"high"}]},{"displayName":"Web Search Allowed Domains","name":"allowedDomains","type":"string","default":"","description":"Comma-separated list of domains to search. Only domains in this list will be searched.","placeholder":"e.g. google.com, wikipedia.org"},{"displayName":"Country","name":"country","type":"string","default":"","placeholder":"e.g. US, GB"},{"displayName":"City","name":"city","type":"string","default":"","placeholder":"e.g. New York, London"},{"displayName":"Region","name":"region","type":"string","default":"","placeholder":"e.g. New York, London"}]},{"displayName":"File Search","name":"fileSearch","type":"collection","default":{"vectorStoreIds":"[]"},"options":[{"displayName":"Vector Store IDs","name":"vectorStoreIds","description":"The vector store IDs to use for the file search. Vector stores are managed via OpenAI Dashboard. <a href=\"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatopenai/#built-in-tools\">Learn more</a>.","type":"json","default":"[]","required":true},{"displayName":"Filters","name":"filters","type":"json","default":"{}"},{"displayName":"Max Results","name":"maxResults","type":"number","default":1,"typeOptions":{"minValue":1,"maxValue":50}}]},{"displayName":"Code Interpreter","name":"codeInterpreter","type":"boolean","default":true,"description":"Whether to allow the model to execute code in a sandboxed environment"}],"displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.3}}],"/responsesApiEnabled":[true]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Base URL","name":"baseURL","default":"https://api.openai.com/v1","description":"Override the default base URL for the API","type":"string","displayOptions":{"hide":{"@version":[{"_cnd":{"gte":1.1}}]}}},{"displayName":"Frequency Penalty","name":"frequencyPenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim","type":"number"},{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":-1,"description":"The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).","type":"number","typeOptions":{"maxValue":32768}},{"displayName":"Response Format","name":"responseFormat","default":"text","type":"options","options":[{"name":"Text","value":"text","description":"Regular text response"},{"name":"JSON","value":"json_object","description":"Enables JSON mode, which should guarantee the message the model generates is valid JSON"}],"displayOptions":{"show":{"@version":[{"_cnd":{"lt":1.3}}]}}},{"displayName":"Response Format","name":"responseFormat","default":"text","type":"options","options":[{"name":"Text","value":"text","description":"Regular text response"},{"name":"JSON","value":"json_object","description":"Enables JSON mode, which should guarantee the message the model generates is valid JSON"}],"displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.3}}],"/responsesApiEnabled":[false]}}},{"displayName":"Response Format","name":"textFormat","type":"fixedCollection","default":{"textOptions":[{"type":"text"}]},"options":[{"displayName":"Text","name":"textOptions","values":[{"displayName":"Type","name":"type","type":"options","default":"","options":[{"name":"Text","value":"text"},{"name":"JSON Schema (recommended)","value":"json_schema"},{"name":"JSON Object","value":"json_object"}]},{"displayName":"Verbosity","name":"verbosity","type":"options","default":"medium","options":[{"name":"Low","value":"low"},{"name":"Medium","value":"medium"},{"name":"High","value":"high"}]},{"displayName":"Name","name":"name","type":"string","default":"my_schema","description":"The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length of 64.","displayOptions":{"show":{"type":["json_schema"]}}},{"displayName":"All properties in the schema must be set to \"required\", when using \"strict\" mode.","name":"requiredNotice","type":"notice","default":"","displayOptions":{"show":{"strict":[true]}}},{"displayName":"Schema","name":"schema","type":"json","default":"{\n \"type\": \"object\",\n \"properties\": {\n \"message\": {\n \"type\": \"string\"\n }\n },\n \"additionalProperties\": false,\n \"required\": [\"message\"]\n}","description":"The schema of the response format","displayOptions":{"show":{"type":["json_schema"]}}},{"displayName":"Description","name":"description","type":"string","default":"","description":"The description of the response format","displayOptions":{"show":{"type":["json_schema"]}}},{"displayName":"Strict","name":"strict","type":"boolean","default":false,"description":"Whether to require that the AI will always generate responses that match the provided JSON Schema","displayOptions":{"show":{"type":["json_schema"]}}}]}],"displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.3}}],"/responsesApiEnabled":[true]}}},{"displayName":"Presence Penalty","name":"presencePenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics","type":"number"},{"displayName":"Sampling Temperature","name":"temperature","default":0.7,"typeOptions":{"maxValue":2,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"},{"displayName":"Reasoning Effort","name":"reasoningEffort","default":"medium","description":"Controls the amount of reasoning tokens to use. A value of \"low\" will favor speed and economical token usage, \"high\" will favor more complete reasoning at the cost of more tokens generated and slower responses.","type":"options","options":[{"name":"Low","value":"low","description":"Favors speed and economical token usage"},{"name":"Medium","value":"medium","description":"Balance between speed and reasoning accuracy"},{"name":"High","value":"high","description":"Favors more complete reasoning at the cost of more tokens generated and slower responses"}],"displayOptions":{"show":{"/model":[{"_cnd":{"regex":"(^o1([-\\d]+)?$)|(^o[3-9].*)|(^gpt-5.*)"}}]}}},{"displayName":"Timeout","name":"timeout","default":60000,"description":"Maximum amount of time a request is allowed to take in milliseconds","type":"number"},{"displayName":"Max Retries","name":"maxRetries","default":2,"description":"Maximum number of retries to attempt","type":"number"},{"displayName":"Top P","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"},{"displayName":"Conversation ID","name":"conversationId","default":"","description":"The conversation that this response belongs to. Input items and output items from this response are automatically added to this conversation after this response completes.","type":"string","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.3}}],"/responsesApiEnabled":[true]}}},{"displayName":"Prompt Cache Key","name":"promptCacheKey","type":"string","default":"","description":"Used by OpenAI to cache responses for similar requests to optimize your cache hit rates","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.3}}],"/responsesApiEnabled":[true]}}},{"displayName":"Safety Identifier","name":"safetyIdentifier","type":"string","default":"","description":"A stable identifier used to help detect users of your application that may be violating OpenAI's usage policies. The IDs should be a string that uniquely identifies each user.","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.3}}],"/responsesApiEnabled":[true]}}},{"displayName":"Service Tier","name":"serviceTier","type":"options","default":"auto","description":"The service tier to use for the request","options":[{"name":"Auto","value":"auto"},{"name":"Flex","value":"flex"},{"name":"Default","value":"default"},{"name":"Priority","value":"priority"}],"displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.3}}],"/responsesApiEnabled":[true]}}},{"displayName":"Metadata","name":"metadata","type":"json","description":"Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format, and querying for objects via API or the dashboard. Keys are strings with a maximum length of 64 characters. Values are strings with a maximum length of 512 characters.","default":"{}","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.3}}],"/responsesApiEnabled":[true]}}},{"displayName":"Top Logprobs","name":"topLogprobs","type":"number","default":0,"description":"An integer between 0 and 20 specifying the number of most likely tokens to return at each token position, each with an associated log probability","typeOptions":{"minValue":0,"maxValue":20},"displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.3}}],"/responsesApiEnabled":[true]}}},{"displayName":"Prompt","name":"promptConfig","type":"fixedCollection","description":"Configure the reusable prompt template configured via OpenAI Dashboard. <a href=\"https://platform.openai.com/docs/guides/prompt-engineering#reusable-prompts\">Learn more</a>.","default":{"promptOptions":[{"promptId":""}]},"options":[{"displayName":"Prompt","name":"promptOptions","values":[{"displayName":"Prompt ID","name":"promptId","type":"string","default":"","description":"The unique identifier of the prompt template to use"},{"displayName":"Version","name":"version","type":"string","default":"","description":"Optional version of the prompt template"},{"displayName":"Variables","name":"variables","type":"json","default":"{}","description":"Variables to be substituted into the prompt template"}]}],"displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.3}}],"/responsesApiEnabled":[true]}}}]}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LMChatOpenAi/openAiLight.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LMChatOpenAi/openAiLight.dark.svg"}},
|
|
57
57
|
{"displayName":"OpenAI Model","name":"lmOpenAi","hidden":true,"group":["transform"],"version":1,"description":"For advanced usage with an AI chain","defaults":{"name":"OpenAI Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Text Completion Models"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmopenai/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"openAiApi","required":true}],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"={{ $parameter.options?.baseURL?.split(\"/\").slice(0,-1).join(\"/\") || \"https://api.openai.com\" }}"},"properties":[{"displayName":"This node is using OpenAI completions which are now deprecated. Please use the OpenAI Chat Model node instead.","name":"deprecated","type":"notice","default":""},{"displayName":"Model","name":"model","type":"resourceLocator","default":{"mode":"list","value":"gpt-3.5-turbo-instruct"},"required":true,"description":"The model which will generate the completion. <a href=\"https://beta.openai.com/docs/models/overview\">Learn more</a>.","modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"openAiModelSearch"}},{"displayName":"ID","name":"id","type":"string"}],"routing":{"send":{"type":"body","property":"model","value":"={{$parameter.model.value}}"}}},{"displayName":"When using non OpenAI models via Base URL override, not all models might be chat-compatible or support other features, like tools calling or JSON response format.","name":"notice","type":"notice","default":"","displayOptions":{"show":{"/options.baseURL":[{"_cnd":{"exists":true}}]}}},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Base URL","name":"baseURL","default":"https://api.openai.com/v1","description":"Override the default base URL for the API","type":"string"},{"displayName":"Frequency Penalty","name":"frequencyPenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim","type":"number"},{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":-1,"description":"The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).","type":"number","typeOptions":{"maxValue":32768}},{"displayName":"Presence Penalty","name":"presencePenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics","type":"number"},{"displayName":"Sampling Temperature","name":"temperature","default":0.7,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"},{"displayName":"Timeout","name":"timeout","default":60000,"description":"Maximum amount of time a request is allowed to take in milliseconds","type":"number"},{"displayName":"Max Retries","name":"maxRetries","default":2,"description":"Maximum number of retries to attempt","type":"number"},{"displayName":"Top P","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"}]}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LMOpenAi/openAiLight.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LMOpenAi/openAiLight.dark.svg"}},
|
|
58
58
|
{"displayName":"Cohere Model","name":"lmCohere","group":["transform"],"version":1,"description":"Language Model Cohere","defaults":{"name":"Cohere Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Text Completion Models"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmcohere/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"cohereApi","required":true}],"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":250,"description":"The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).","type":"number","typeOptions":{"maxValue":32768}},{"displayName":"Model","name":"model","type":"string","description":"The name of the model to use","default":""},{"displayName":"Sampling Temperature","name":"temperature","default":0,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"}]}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LMCohere/cohere.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LMCohere/cohere.dark.svg"}},
|
|
59
59
|
{"displayName":"Lemonade Model","name":"lmLemonade","group":["transform"],"version":1,"description":"Language Model Lemonade","defaults":{"name":"Lemonade Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Text Completion Models"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmlemonade/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"lemonadeApi","required":true}],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"={{ $credentials.baseUrl.replace(new RegExp(\"/$\"), \"\") }}"},"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Model","name":"model","type":"options","default":"","description":"The model which will generate the completion. Models are loaded and managed through the Lemonade server.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/models"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"data"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.id}}","value":"={{$responseItem.id}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"required":true},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Sampling Temperature","name":"temperature","default":0.7,"typeOptions":{"maxValue":2,"minValue":0,"numberPrecision":1},"description":"Controls the randomness of the generated text. Lower values make the output more focused and deterministic, while higher values make it more diverse and random.","type":"number"},{"displayName":"Top P","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Chooses from the smallest possible set of tokens whose cumulative probability exceeds the probability top_p. Helps generate more human-like text by reducing repetitions.","type":"number"},{"displayName":"Frequency Penalty","name":"frequencyPenalty","type":"number","default":0,"typeOptions":{"minValue":-2,"maxValue":2,"numberPrecision":1},"description":"Adjusts the penalty for tokens that have already appeared in the generated text. Positive values discourage repetition, negative values encourage it."},{"displayName":"Presence Penalty","name":"presencePenalty","type":"number","default":0,"typeOptions":{"minValue":-2,"maxValue":2,"numberPrecision":1},"description":"Adjusts the penalty for tokens based on their presence in the generated text so far. Positive values penalize tokens that have already appeared, encouraging diversity."},{"displayName":"Max Tokens to Generate","name":"maxTokens","type":"number","default":-1,"description":"The maximum number of tokens to generate. Set to -1 for no limit. Be cautious when setting this to a large value, as it can lead to very long outputs."},{"displayName":"Stop Sequences","name":"stop","type":"string","default":"","description":"Comma-separated list of sequences where the model will stop generating text"}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LMLemonade/lemonade.svg"},
|
|
@@ -86,7 +86,7 @@
|
|
|
86
86
|
{"displayName":"Code Tool","name":"toolCode","icon":"fa:code","iconColor":"black","group":["transform"],"version":[1,1.1,1.2,1.3],"description":"Write a tool in JS or Python","defaults":{"name":"Code Tool"},"codex":{"categories":["AI"],"subcategories":{"AI":["Tools"],"Tools":["Recommended Tools"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.toolcode/"}]}},"inputs":[],"outputs":["ai_tool"],"outputNames":["Tool"],"properties":[{"displayName":"This node must be connected to an AI agent. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"See an example of a conversational agent with custom tool written in JavaScript <a href=\"/templates/1963\" target=\"_blank\">here</a>.","name":"noticeTemplateExample","type":"notice","default":""},{"displayName":"Name","name":"name","type":"string","default":"","placeholder":"My_Tool","displayOptions":{"show":{"@version":[1]}}},{"displayName":"Name","name":"name","type":"string","default":"","placeholder":"e.g. My_Tool","validateType":"string-alphanumeric","description":"The name of the function to be called, could contain letters, numbers, and underscores only","displayOptions":{"show":{"@version":[1.1]}}},{"displayName":"Description","name":"description","type":"string","default":"","placeholder":"Call this tool to get a random color. The input should be a string with comma separted names of colors to exclude.","typeOptions":{"rows":3}},{"displayName":"Language","name":"language","type":"options","noDataExpression":true,"options":[{"name":"JavaScript","value":"javaScript"},{"name":"Python (Beta)","value":"python"}],"default":"javaScript"},{"displayName":"JavaScript","name":"jsCode","type":"string","displayOptions":{"show":{"language":["javaScript"]}},"typeOptions":{"editor":"jsEditor"},"default":"// Example: convert the incoming query to uppercase and return it\nreturn query.toUpperCase()","hint":"You can access the input the tool receives via the input property \"query\". The returned value should be a single string.","description":"E.g. Converts any text to uppercase","noDataExpression":true},{"displayName":"Python","name":"pythonCode","type":"string","displayOptions":{"show":{"language":["python"]}},"typeOptions":{"editor":"codeNodeEditor","editorLanguage":"python"},"default":"# Example: convert the incoming query to uppercase and return it\nreturn _query.upper()","hint":"You can access the input the tool receives via the input property \"_query\". The returned value should be a single string.","description":"E.g. Converts any text to uppercase","noDataExpression":true},{"displayName":"Specify Input Schema","name":"specifyInputSchema","type":"boolean","description":"Whether to specify the schema for the function. This would require the LLM to provide the input in the correct format and would validate it against the schema.","noDataExpression":true,"default":false},{"displayName":"Schema Type","name":"schemaType","type":"options","noDataExpression":true,"options":[{"name":"Generate From JSON Example","value":"fromJson","description":"Generate a schema from an example JSON object"},{"name":"Define using JSON Schema","value":"manual","description":"Define the JSON schema manually"}],"default":"fromJson","description":"How to specify the schema for the function","displayOptions":{"show":{"specifyInputSchema":[true]}}},{"displayName":"JSON Example","name":"jsonSchemaExample","type":"json","default":"{\n\t\"some_input\": \"some_value\"\n}","noDataExpression":true,"typeOptions":{"rows":10},"displayOptions":{"show":{"specifyInputSchema":[true],"schemaType":["fromJson"]}},"description":"Example JSON object to use to generate the schema"},{"displayName":"All properties will be required. To make them optional, use the 'JSON Schema' schema type instead","name":"notice","type":"notice","default":"","displayOptions":{"show":{"specifyInputSchema":[true],"@version":[{"_cnd":{"gte":1.3}}],"schemaType":["fromJson"]}}},{"displayName":"Input Schema","name":"inputSchema","type":"json","default":"{\n\"type\": \"object\",\n\"properties\": {\n\t\"some_input\": {\n\t\t\"type\": \"string\",\n\t\t\"description\": \"Some input to the function\"\n\t\t}\n\t}\n}","noDataExpression":false,"typeOptions":{"rows":10},"displayOptions":{"show":{"specifyInputSchema":[true],"schemaType":["manual"]}},"description":"Schema to use for the function","hint":"Use <a target=\"_blank\" href=\"https://json-schema.org/\">JSON Schema</a> format (<a target=\"_blank\" href=\"https://json-schema.org/learn/miscellaneous-examples.html\">examples</a>). $refs syntax is currently not supported."}]},
|
|
87
87
|
{"displayName":"HTTP Request Tool","name":"toolHttpRequest","group":["output"],"version":[1,1.1],"description":"Makes an HTTP request and returns the response data","subtitle":"={{ $parameter.toolDescription }}","defaults":{"name":"HTTP Request"},"credentials":[],"codex":{"categories":["AI"],"subcategories":{"AI":["Tools"],"Tools":["Recommended Tools"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.toolhttprequest/"}]}},"hidden":true,"inputs":[],"outputs":["ai_tool"],"outputNames":["Tool"],"properties":[{"displayName":"This node must be connected to an AI agent. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Description","name":"toolDescription","type":"string","description":"Explain to LLM what this tool does, better description would allow LLM to produce expected result","placeholder":"e.g. Get the current weather in the requested city","default":"","typeOptions":{"rows":3}},{"displayName":"Method","name":"method","type":"options","options":[{"name":"DELETE","value":"DELETE"},{"name":"GET","value":"GET"},{"name":"PATCH","value":"PATCH"},{"name":"POST","value":"POST"},{"name":"PUT","value":"PUT"}],"default":"GET"},{"displayName":"Tip: You can use a {placeholder} for any part of the request to be filled by the model. Provide more context about them in the placeholders section","name":"placeholderNotice","type":"notice","default":""},{"displayName":"URL","name":"url","type":"string","default":"","required":true,"placeholder":"e.g. http://www.example.com/{path}"},{"displayName":"Authentication","name":"authentication","description":"Select the type of authentication to use if needed, authentication would be done by n8n and your credentials will not be shared with the LLM","noDataExpression":true,"type":"options","options":[{"name":"None","value":"none"},{"name":"Predefined Credential Type","value":"predefinedCredentialType","description":"We've already implemented auth for many services so that you don't have to set it up manually"},{"name":"Generic Credential Type","value":"genericCredentialType","description":"Fully customizable. Choose between basic, header, OAuth2, etc."}],"default":"none"},{"displayName":"Credential Type","name":"nodeCredentialType","type":"credentialsSelect","noDataExpression":true,"required":true,"default":"","credentialTypes":["extends:oAuth2Api","extends:oAuth1Api","has:authenticate"],"displayOptions":{"show":{"authentication":["predefinedCredentialType"]}}},{"displayName":"Make sure you have specified the scope(s) for the Service Account in the credential","name":"googleApiWarning","type":"notice","default":"","displayOptions":{"show":{"nodeCredentialType":["googleApi"]}}},{"displayName":"Generic Auth Type","name":"genericAuthType","type":"credentialsSelect","required":true,"default":"","credentialTypes":["has:genericAuth"],"displayOptions":{"show":{"authentication":["genericCredentialType"]}}},{"displayName":"Send Query Parameters","name":"sendQuery","type":"boolean","default":false,"noDataExpression":true,"description":"Whether the request has query params or not"},{"displayName":"Specify Query Parameters","name":"specifyQuery","type":"options","options":[{"name":"Using Fields Below","value":"keypair"},{"name":"Using JSON Below","value":"json"},{"name":"Let Model Specify Entire Body","value":"model"}],"default":"keypair","displayOptions":{"show":{"sendQuery":[true]}}},{"displayName":"Query Parameters","name":"parametersQuery","type":"fixedCollection","typeOptions":{"multipleValues":true},"placeholder":"Add Parameter","default":{"values":[{"name":""}]},"options":[{"name":"values","displayName":"Values","values":[{"displayName":"Name","name":"name","type":"string","default":""},{"displayName":"Value Provided","name":"valueProvider","type":"options","options":[{"name":"By Model (and is required)","value":"modelRequired"},{"name":"By Model (but is optional)","value":"modelOptional"},{"name":"Using Field Below","value":"fieldValue"}],"default":"modelRequired"},{"displayName":"Value","name":"value","type":"string","default":"","hint":"Use a {placeholder} for any data to be filled in by the model","displayOptions":{"show":{"valueProvider":["fieldValue"]}}}]}],"displayOptions":{"show":{"sendQuery":[true],"specifyQuery":["keypair"]}}},{"displayName":"JSON","name":"jsonQuery","type":"string","typeOptions":{"rows":5},"hint":"Use a {placeholder} for any data to be filled in by the model","default":"","displayOptions":{"show":{"sendQuery":[true],"specifyQuery":["json"]}}},{"displayName":"Send Headers","name":"sendHeaders","type":"boolean","default":false,"noDataExpression":true,"description":"Whether the request has headers or not"},{"displayName":"Specify Headers","name":"specifyHeaders","type":"options","options":[{"name":"Using Fields Below","value":"keypair"},{"name":"Using JSON Below","value":"json"},{"name":"Let Model Specify Entire Body","value":"model"}],"default":"keypair","displayOptions":{"show":{"sendHeaders":[true]}}},{"displayName":"Header Parameters","name":"parametersHeaders","type":"fixedCollection","typeOptions":{"multipleValues":true},"placeholder":"Add Parameter","default":{"values":[{"name":""}]},"options":[{"name":"values","displayName":"Values","values":[{"displayName":"Name","name":"name","type":"string","default":""},{"displayName":"Value Provided","name":"valueProvider","type":"options","options":[{"name":"By Model (and is required)","value":"modelRequired"},{"name":"By Model (but is optional)","value":"modelOptional"},{"name":"Using Field Below","value":"fieldValue"}],"default":"modelRequired"},{"displayName":"Value","name":"value","type":"string","default":"","hint":"Use a {placeholder} for any data to be filled in by the model","displayOptions":{"show":{"valueProvider":["fieldValue"]}}}]}],"displayOptions":{"show":{"sendHeaders":[true],"specifyHeaders":["keypair"]}}},{"displayName":"JSON","name":"jsonHeaders","type":"string","typeOptions":{"rows":5},"hint":"Use a {placeholder} for any data to be filled in by the model","default":"","displayOptions":{"show":{"sendHeaders":[true],"specifyHeaders":["json"]}}},{"displayName":"Send Body","name":"sendBody","type":"boolean","default":false,"noDataExpression":true,"description":"Whether the request has body or not"},{"displayName":"Specify Body","name":"specifyBody","type":"options","options":[{"name":"Using Fields Below","value":"keypair"},{"name":"Using JSON Below","value":"json"},{"name":"Let Model Specify Entire Body","value":"model"}],"default":"keypair","displayOptions":{"show":{"sendBody":[true]}}},{"displayName":"Body Parameters","name":"parametersBody","type":"fixedCollection","typeOptions":{"multipleValues":true},"placeholder":"Add Parameter","default":{"values":[{"name":""}]},"options":[{"name":"values","displayName":"Values","values":[{"displayName":"Name","name":"name","type":"string","default":""},{"displayName":"Value Provided","name":"valueProvider","type":"options","options":[{"name":"By Model (and is required)","value":"modelRequired"},{"name":"By Model (but is optional)","value":"modelOptional"},{"name":"Using Field Below","value":"fieldValue"}],"default":"modelRequired"},{"displayName":"Value","name":"value","type":"string","default":"","hint":"Use a {placeholder} for any data to be filled in by the model","displayOptions":{"show":{"valueProvider":["fieldValue"]}}}]}],"displayOptions":{"show":{"sendBody":[true],"specifyBody":["keypair"]}}},{"displayName":"JSON","name":"jsonBody","type":"string","typeOptions":{"rows":5},"hint":"Use a {placeholder} for any data to be filled in by the model","default":"","displayOptions":{"show":{"sendBody":[true],"specifyBody":["json"]}}},{"displayName":"Placeholder Definitions","name":"placeholderDefinitions","type":"fixedCollection","typeOptions":{"multipleValues":true},"placeholder":"Add Definition","default":[],"options":[{"name":"values","displayName":"Values","values":[{"displayName":"Placeholder Name","name":"name","type":"string","default":""},{"displayName":"Description","name":"description","type":"string","default":""},{"displayName":"Type","name":"type","type":"options","options":[{"name":"Not Specified (Default)","value":"not specified"},{"name":"String","value":"string"},{"name":"Number","value":"number"},{"name":"Boolean","value":"boolean"},{"name":"JSON","value":"json"}],"default":"not specified"}]}]},{"displayName":"Optimize Response","name":"optimizeResponse","type":"boolean","default":false,"noDataExpression":true,"description":"Whether the optimize the tool response to reduce amount of data passed to the LLM that could lead to better result and reduce cost"},{"displayName":"Expected Response Type","name":"responseType","type":"options","displayOptions":{"show":{"optimizeResponse":[true]}},"options":[{"name":"JSON","value":"json"},{"name":"HTML","value":"html"},{"name":"Text","value":"text"}],"default":"json"},{"displayName":"Field Containing Data","name":"dataField","type":"string","default":"","placeholder":"e.g. records","description":"Specify the name of the field in the response containing the data","hint":"leave blank to use whole response","requiresDataPath":"single","displayOptions":{"show":{"optimizeResponse":[true],"responseType":["json"]}}},{"displayName":"Include Fields","name":"fieldsToInclude","type":"options","description":"What fields response object should include","default":"all","displayOptions":{"show":{"optimizeResponse":[true],"responseType":["json"]}},"options":[{"name":"All","value":"all","description":"Include all fields"},{"name":"Selected","value":"selected","description":"Include only fields specified below"},{"name":"Except","value":"except","description":"Exclude fields specified below"}]},{"displayName":"Fields","name":"fields","type":"string","default":"","placeholder":"e.g. field1,field2","description":"Comma-separated list of the field names. Supports dot notation. You can drag the selected fields from the input panel.","requiresDataPath":"multiple","displayOptions":{"show":{"optimizeResponse":[true],"responseType":["json"]},"hide":{"fieldsToInclude":["all"]}}},{"displayName":"Selector (CSS)","name":"cssSelector","type":"string","description":"Select specific element(e.g. body) or multiple elements(e.g. div) of chosen type in the response HTML.","placeholder":"e.g. body","default":"body","displayOptions":{"show":{"optimizeResponse":[true],"responseType":["html"]}}},{"displayName":"Return Only Content","name":"onlyContent","type":"boolean","default":false,"description":"Whether to return only content of html elements, stripping html tags and attributes","hint":"Uses less tokens and may be easier for model to understand","displayOptions":{"show":{"optimizeResponse":[true],"responseType":["html"]}}},{"displayName":"Elements To Omit","name":"elementsToOmit","type":"string","displayOptions":{"show":{"optimizeResponse":[true],"responseType":["html"],"onlyContent":[true]}},"default":"","placeholder":"e.g. img, .className, #ItemId","description":"Comma-separated list of selectors that would be excluded when extracting content"},{"displayName":"Truncate Response","name":"truncateResponse","type":"boolean","default":false,"hint":"Helps save tokens","displayOptions":{"show":{"optimizeResponse":[true],"responseType":["text","html"]}}},{"displayName":"Max Response Characters","name":"maxLength","type":"number","default":1000,"typeOptions":{"minValue":1},"displayOptions":{"show":{"optimizeResponse":[true],"responseType":["text","html"],"truncateResponse":[true]}}}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/tools/ToolHttpRequest/httprequest.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/tools/ToolHttpRequest/httprequest.dark.svg"}},
|
|
88
88
|
{"displayName":"SearXNG","name":"toolSearXng","group":["transform"],"version":1,"description":"Search in SearXNG","defaults":{"name":"SearXNG"},"codex":{"categories":["AI"],"subcategories":{"AI":["Tools"],"Tools":["Other Tools"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.toolsearxng"}]}},"inputs":[],"outputs":["ai_tool"],"outputNames":["Tool"],"credentials":[{"name":"searXngApi","required":true}],"properties":[{"displayName":"This node must be connected to an AI agent. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Number of Results","name":"numResults","type":"number","default":10},{"displayName":"Search Page Number","name":"pageNumber","type":"number","default":1},{"displayName":"Language","name":"language","type":"string","default":"en","description":"Defines the language to use. It's a two-letter language code. (e.g., `en` for English, `es` for Spanish, or `fr` for French). Head to <a href=\"https://docs.searxng.org/user/search-syntax.html#select-language\">SearXNG search syntax page</a> for more info."},{"displayName":"Safe Search","name":"safesearch","type":"options","options":[{"name":"None","value":0},{"name":"Moderate","value":1},{"name":"Strict","value":2}],"default":0,"description":"Filter search results of engines which support safe search"}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/tools/ToolSearXng/searXng.svg"},
|
|
89
|
-
{"displayName":"SerpApi (Google Search)","name":"toolSerpApi","group":["transform"],"version":1,"description":"Search in Google using SerpAPI","defaults":{"name":"SerpAPI"},"codex":{"categories":["AI"],"subcategories":{"AI":["Tools"],"Tools":["Other Tools"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.toolserpapi/"}]}},"inputs":[],"outputs":["ai_tool"],"outputNames":["Tool"],"credentials":[{"name":"serpApi","required":true}],"properties":[{"displayName":"This node must be connected to an AI agent. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Country","name":"gl","type":"string","default":"us","description":"Defines the country to use for search. Head to <a href=\"https://serpapi.com/google-countries\">Google countries page</a> for a full list of supported countries."},{"displayName":"Device","name":"device","type":"options","options":[{"name":"Desktop","value":"desktop"},{"name":"Mobile","value":"mobile"},{"name":"Tablet","value":"tablet"}],"default":"desktop","description":"Device to use to get the results"},{"displayName":"Explicit Array","name":"no_cache","type":"boolean","default":false,"description":"Whether to force SerpApi to fetch the Google results even if a cached version is already present. Cache expires after 1h. Cached searches are free, and are not counted towards your searches per month."},{"displayName":"Google Domain","name":"google_domain","type":"string","default":"google.com","description":"Defines the domain to use for search. Head to <a href=\"https://serpapi.com/google-domains\">Google domains page</a> for a full list of supported domains."},{"displayName":"Language","name":"hl","type":"string","default":"en","description":"Defines the language to use. It's a two-letter language code. (e.g., `en` for English, `es` for Spanish, or `fr` for French). Head to <a href=\"https://serpapi.com/google-languages\">Google languages page</a> for a full list of supported languages."}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/tools/ToolSerpApi/serpApi.svg"},
|
|
89
|
+
{"displayName":"SerpApi (Google Search)","name":"toolSerpApi","group":["transform"],"version":1,"hidden":true,"description":"Search in Google using SerpAPI","defaults":{"name":"SerpAPI"},"codex":{"categories":["AI"],"subcategories":{"AI":["Tools"],"Tools":["Other Tools"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.toolserpapi/"}]}},"inputs":[],"outputs":["ai_tool"],"outputNames":["Tool"],"credentials":[{"name":"serpApi","required":true}],"properties":[{"displayName":"This node must be connected to an AI agent. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"This node is deprecated and will not be updated in the future. Please use the official verified community node instead.","name":"oldVersionNotice","type":"notice","default":""},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Country","name":"gl","type":"string","default":"us","description":"Defines the country to use for search. Head to <a href=\"https://serpapi.com/google-countries\">Google countries page</a> for a full list of supported countries."},{"displayName":"Device","name":"device","type":"options","options":[{"name":"Desktop","value":"desktop"},{"name":"Mobile","value":"mobile"},{"name":"Tablet","value":"tablet"}],"default":"desktop","description":"Device to use to get the results"},{"displayName":"Explicit Array","name":"no_cache","type":"boolean","default":false,"description":"Whether to force SerpApi to fetch the Google results even if a cached version is already present. Cache expires after 1h. Cached searches are free, and are not counted towards your searches per month."},{"displayName":"Google Domain","name":"google_domain","type":"string","default":"google.com","description":"Defines the domain to use for search. Head to <a href=\"https://serpapi.com/google-domains\">Google domains page</a> for a full list of supported domains."},{"displayName":"Language","name":"hl","type":"string","default":"en","description":"Defines the language to use. It's a two-letter language code. (e.g., `en` for English, `es` for Spanish, or `fr` for French). Head to <a href=\"https://serpapi.com/google-languages\">Google languages page</a> for a full list of supported languages."}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/tools/ToolSerpApi/serpApi.svg"},
|
|
90
90
|
{"displayName":"Think Tool","name":"toolThink","icon":"node:think-tool","iconColor":"black","group":["transform"],"version":[1,1.1],"description":"Invite the AI agent to do some thinking","defaults":{"name":"Think"},"codex":{"categories":["AI"],"subcategories":{"AI":["Tools"],"Tools":["Other Tools"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.toolthink/"}]}},"inputs":[],"outputs":["ai_tool"],"outputNames":["Tool"],"properties":[{"displayName":"This node must be connected to an AI agent. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Think Tool Description","name":"description","type":"string","default":"Use the tool to think about something. It will not obtain new information or change the database, but just append the thought to the log. Use it when complex reasoning or some cache memory is needed.","placeholder":"[Describe your thinking tool here, explaining how it will help the AI think]","description":"The thinking tool's description","typeOptions":{"rows":3},"required":true}]},
|
|
91
91
|
{"displayName":"Vector Store Question Answer Tool","name":"toolVectorStore","icon":"node:vector-store-question-answer-tool","iconColor":"black","group":["transform"],"version":[1,1.1],"description":"Answer questions with a vector store","defaults":{"name":"Answer questions with a vector store"},"codex":{"categories":["AI"],"subcategories":{"AI":["Tools"],"Tools":["Other Tools"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.toolvectorstore/"}]}},"inputs":[{"displayName":"Vector Store","maxConnections":1,"type":"ai_vectorStore","required":true},{"displayName":"Model","maxConnections":1,"type":"ai_languageModel","required":true}],"outputs":["ai_tool"],"outputNames":["Tool"],"builderHint":{"inputs":{"ai_vectorStore":{"required":true},"ai_languageModel":{"required":true}}},"properties":[{"displayName":"This node must be connected to an AI agent. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Data Name","name":"name","type":"string","default":"","placeholder":"e.g. users_info","validateType":"string-alphanumeric","description":"Name of the data in vector store. This will be used to fill this tool description: Useful for when you need to answer questions about [name]. Whenever you need information about [data description], you should ALWAYS use this. Input should be a fully formed question.","displayOptions":{"show":{"@version":[1]}}},{"displayName":"Description of Data","name":"description","type":"string","default":"","placeholder":"[Describe your data here, e.g. a user's name, email, etc.]","description":"Describe the data in vector store. This will be used to fill this tool description: Useful for when you need to answer questions about [name]. Whenever you need information about [data description], you should ALWAYS use this. Input should be a fully formed question.","typeOptions":{"rows":3}},{"displayName":"Limit","name":"topK","type":"number","default":4,"description":"The maximum number of results to return"}]},
|
|
92
92
|
{"displayName":"Wikipedia","name":"toolWikipedia","group":["transform"],"version":1,"description":"Search in Wikipedia","defaults":{"name":"Wikipedia"},"codex":{"categories":["AI"],"subcategories":{"AI":["Tools"],"Tools":["Other Tools"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.toolwikipedia/"}]}},"inputs":[],"outputs":["ai_tool"],"outputNames":["Tool"],"properties":[{"displayName":"This node must be connected to an AI agent. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/tools/ToolWikipedia/wikipedia.svg"},
|
|
@@ -94,7 +94,7 @@
|
|
|
94
94
|
{"displayName":"Call n8n Workflow Tool","name":"toolWorkflow","icon":"node:call-n8n-sub-workflow-tool","iconColor":"black","group":["transform"],"description":"Uses another n8n workflow as a tool. Allows packaging any n8n node(s) as a tool.","codex":{"categories":["AI"],"subcategories":{"AI":["Tools"],"Tools":["Recommended Tools"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.toolworkflow/"}]}},"defaultVersion":2.2,"defaults":{"name":"Call n8n Workflow Tool"},"version":[2,2.1,2.2],"inputs":[],"outputs":["ai_tool"],"outputNames":["Tool"],"properties":[{"displayName":"This node must be connected to an AI agent. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"See an example of a workflow to suggest meeting slots using AI <a href=\"/templates/1953\" target=\"_blank\">here</a>.","name":"noticeTemplateExample","type":"notice","default":""},{"displayName":"Name","name":"name","type":"string","default":"","placeholder":"e.g. My_Color_Tool","validateType":"string-alphanumeric","description":"The name of the function to be called, could contain letters, numbers, and underscores only","displayOptions":{"show":{"@version":[{"_cnd":{"lte":2.1}}]}}},{"displayName":"Description","name":"description","type":"string","default":"","placeholder":"Call this tool to get a random color. The input should be a string with comma separated names of colors to exclude.","typeOptions":{"rows":3}},{"displayName":"This tool will call the workflow you define below, and look in the last node for the response. The workflow needs to start with an Execute Workflow trigger","name":"executeNotice","type":"notice","default":""},{"displayName":"Source","name":"source","type":"options","options":[{"name":"Database","value":"database","description":"Load the workflow from the database by ID"},{"name":"Define Below","value":"parameter","description":"Pass the JSON code of a workflow"}],"default":"database","description":"Where to get the workflow to execute from"},{"displayName":"Workflow","name":"workflowId","type":"workflowSelector","displayOptions":{"show":{"source":["database"]}},"default":"","required":true},{"displayName":"Workflow Inputs","name":"workflowInputs","type":"resourceMapper","noDataExpression":true,"default":{"mappingMode":"defineBelow","value":null},"required":true,"typeOptions":{"loadOptionsDependsOn":["workflowId.value"],"resourceMapper":{"localResourceMapperMethod":"loadSubWorkflowInputs","valuesLabel":"Workflow Inputs","mode":"map","fieldWords":{"singular":"workflow input","plural":"workflow inputs"},"addAllFields":true,"multiKeyMatch":false,"supportAutoMap":false}},"displayOptions":{"show":{"source":["database"]},"hide":{"workflowId":[""]}}},{"displayName":"Workflow JSON","name":"workflowJson","type":"json","typeOptions":{"rows":10},"displayOptions":{"show":{"source":["parameter"]}},"default":"\n\n\n\n\n\n\n\n\n","required":true,"description":"The workflow JSON code to execute"}]},
|
|
95
95
|
{"displayName":"Call n8n Workflow Tool","name":"toolWorkflow","icon":"node:call-n8n-sub-workflow-tool","iconColor":"black","group":["transform"],"description":"Uses another n8n workflow as a tool. Allows packaging any n8n node(s) as a tool.","codex":{"categories":["AI"],"subcategories":{"AI":["Tools"],"Tools":["Recommended Tools"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.toolworkflow/"}]}},"defaultVersion":2.2,"version":[1,1.1,1.2,1.3],"defaults":{"name":"Call n8n Workflow Tool"},"inputs":[],"outputs":["ai_tool"],"outputNames":["Tool"],"properties":[{"displayName":"This node must be connected to an AI agent. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"See an example of a workflow to suggest meeting slots using AI <a href=\"/templates/1953\" target=\"_blank\">here</a>.","name":"noticeTemplateExample","type":"notice","default":""},{"displayName":"Name","name":"name","type":"string","default":"","placeholder":"My_Color_Tool","displayOptions":{"show":{"@version":[1]}}},{"displayName":"Name","name":"name","type":"string","default":"","placeholder":"e.g. My_Color_Tool","validateType":"string-alphanumeric","description":"The name of the function to be called, could contain letters, numbers, and underscores only","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.1}}]}}},{"displayName":"Description","name":"description","type":"string","default":"","placeholder":"Call this tool to get a random color. The input should be a string with comma separted names of colors to exclude.","typeOptions":{"rows":3}},{"displayName":"This tool will call the workflow you define below, and look in the last node for the response. The workflow needs to start with an Execute Workflow trigger","name":"executeNotice","type":"notice","default":""},{"displayName":"Source","name":"source","type":"options","options":[{"name":"Database","value":"database","description":"Load the workflow from the database by ID"},{"name":"Define Below","value":"parameter","description":"Pass the JSON code of a workflow"}],"default":"database","description":"Where to get the workflow to execute from"},{"displayName":"Workflow ID","name":"workflowId","type":"string","displayOptions":{"show":{"source":["database"],"@version":[{"_cnd":{"lte":1.1}}]}},"default":"","required":true,"description":"The workflow to execute","hint":"Can be found in the URL of the workflow"},{"displayName":"Workflow","name":"workflowId","type":"workflowSelector","displayOptions":{"show":{"source":["database"],"@version":[{"_cnd":{"gte":1.2}}]}},"default":"","required":true},{"displayName":"Workflow JSON","name":"workflowJson","type":"json","typeOptions":{"rows":10},"displayOptions":{"show":{"source":["parameter"]}},"default":"\n\n\n\n\n\n\n\n\n","required":true,"description":"The workflow JSON code to execute"},{"displayName":"Field to Return","name":"responsePropertyName","type":"string","default":"response","required":true,"hint":"The field in the last-executed node of the workflow that contains the response","description":"Where to find the data that this tool should return. n8n will look in the output of the last-executed node of the workflow for a field with this name, and return its value.","displayOptions":{"show":{"@version":[{"_cnd":{"lt":1.3}}]}}},{"displayName":"Extra Workflow Inputs","name":"fields","placeholder":"Add Value","type":"fixedCollection","description":"These will be output by the 'execute workflow' trigger of the workflow being called","typeOptions":{"multipleValues":true,"sortable":true},"default":{},"options":[{"name":"values","displayName":"Values","values":[{"displayName":"Name","name":"name","type":"string","default":"","placeholder":"e.g. fieldName","description":"Name of the field to set the value of. Supports dot-notation. Example: data.person[0].name.","requiresDataPath":"single"},{"displayName":"Type","name":"type","type":"options","description":"The field value type","options":[{"name":"String","value":"stringValue"},{"name":"Number","value":"numberValue"},{"name":"Boolean","value":"booleanValue"},{"name":"Array","value":"arrayValue"},{"name":"Object","value":"objectValue"}],"default":"stringValue"},{"displayName":"Value","name":"stringValue","type":"string","default":"","displayOptions":{"show":{"type":["stringValue"]}},"validateType":"string","ignoreValidationDuringExecution":true},{"displayName":"Value","name":"numberValue","type":"string","default":"","displayOptions":{"show":{"type":["numberValue"]}},"validateType":"number","ignoreValidationDuringExecution":true},{"displayName":"Value","name":"booleanValue","type":"options","default":"true","options":[{"name":"True","value":"true"},{"name":"False","value":"false"}],"displayOptions":{"show":{"type":["booleanValue"]}},"validateType":"boolean","ignoreValidationDuringExecution":true},{"displayName":"Value","name":"arrayValue","type":"string","default":"","placeholder":"e.g. [ arrayItem1, arrayItem2, arrayItem3 ]","displayOptions":{"show":{"type":["arrayValue"]}},"validateType":"array","ignoreValidationDuringExecution":true},{"displayName":"Value","name":"objectValue","type":"json","default":"={}","typeOptions":{"rows":2},"displayOptions":{"show":{"type":["objectValue"]}},"validateType":"object","ignoreValidationDuringExecution":true}]}]},{"displayName":"Specify Input Schema","name":"specifyInputSchema","type":"boolean","description":"Whether to specify the schema for the function. This would require the LLM to provide the input in the correct format and would validate it against the schema.","noDataExpression":true,"default":false},{"displayName":"Schema Type","name":"schemaType","type":"options","noDataExpression":true,"options":[{"name":"Generate From JSON Example","value":"fromJson","description":"Generate a schema from an example JSON object"},{"name":"Define using JSON Schema","value":"manual","description":"Define the JSON schema manually"}],"default":"fromJson","description":"How to specify the schema for the function","displayOptions":{"show":{"specifyInputSchema":[true]}}},{"displayName":"JSON Example","name":"jsonSchemaExample","type":"json","default":"{\n\t\"some_input\": \"some_value\"\n}","noDataExpression":true,"typeOptions":{"rows":10},"displayOptions":{"show":{"schemaType":["fromJson"]}},"description":"Example JSON object to use to generate the schema"},{"displayName":"Input Schema","name":"inputSchema","type":"json","default":"{\n\"type\": \"object\",\n\"properties\": {\n\t\"some_input\": {\n\t\t\"type\": \"string\",\n\t\t\"description\": \"Some input to the function\"\n\t\t}\n\t}\n}","noDataExpression":false,"typeOptions":{"rows":10},"displayOptions":{"show":{"schemaType":["manual"]}},"description":"Schema to use for the function","hint":"Use <a target=\"_blank\" href=\"https://json-schema.org/\">JSON Schema</a> format (<a target=\"_blank\" href=\"https://json-schema.org/learn/miscellaneous-examples.html\">examples</a>). $refs syntax is currently not supported."}]},
|
|
96
96
|
{"displayName":"Manual Chat Trigger","name":"manualChatTrigger","icon":"fa:comments","group":["trigger"],"version":[1,1.1],"description":"Runs the flow on new manual chat message","eventTriggerDescription":"","maxNodes":1,"hidden":true,"defaults":{"name":"When chat message received","color":"#909298"},"codex":{"categories":["Core Nodes"],"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-langchain.chattrigger/"}]},"subcategories":{"Core Nodes":["Other Trigger Nodes"]}},"inputs":[],"outputs":["main"],"properties":[{"displayName":"This node is where a manual chat workflow execution starts. To make one, go back to the canvas and click ‘Chat’","name":"notice","type":"notice","default":""},{"displayName":"Chat and execute workflow","name":"openChat","type":"button","typeOptions":{"buttonConfig":{"action":"openChat"}},"default":""}]},
|
|
97
|
-
{"displayName":"Chat Trigger","name":"chatTrigger","icon":"node:chat-trigger","iconColor":"black","group":["trigger"],"version":[1,1.1,1.2,1.3,1.4],"defaultVersion":1.4,"description":"Runs the workflow when an n8n generated webchat is submitted","defaults":{"name":"When chat message received"},"codex":{"categories":["Core Nodes"],"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-langchain.chattrigger/"}]}},"maxNodes":1,"inputs":"={{ (() => {\n\t\t\tif (!['hostedChat', 'webhook'].includes($parameter.mode)) {\n\t\t\t\treturn [];\n\t\t\t}\n\t\t\tif ($parameter.options?.loadPreviousSession !== 'memory') {\n\t\t\t\treturn [];\n\t\t\t}\n\n\t\t\treturn [\n\t\t\t\t{\n\t\t\t\t\tdisplayName: 'Memory',\n\t\t\t\t\tmaxConnections: 1,\n\t\t\t\t\ttype: 'ai_memory',\n\t\t\t\t\trequired: true,\n\t\t\t\t}\n\t\t\t];\n\t\t })() }}","outputs":["main"],"builderHint":{"inputs":{"ai_memory":{"required":true,"displayOptions":{"show":{"mode":["hostedChat","webhook"],"options.loadPreviousSession":["memory"]}}}}},"credentials":[{"name":"httpBasicAuth","required":true,"displayOptions":{"show":{"authentication":["basicAuth"]}}}],"webhooks":[{"name":"setup","httpMethod":"GET","responseMode":"onReceived","path":"chat","ndvHideUrl":true},{"name":"default","httpMethod":"POST","responseMode":"={{$parameter.options?.[\"responseMode\"] ?? ($parameter.availableInChat ? \"streaming\" : \"lastNode\") }}","path":"chat","ndvHideMethod":true,"ndvHideUrl":"={{ !$parameter.public }}"}],"eventTriggerDescription":"Waiting for you to submit the chat","activationMessage":"You can now make calls to your production chat URL.","triggerPanel":false,"properties":[{"displayName":"Make Chat Publicly Available","name":"public","type":"boolean","default":false,"description":"Whether the chat should be publicly available or only accessible through the manual chat interface"},{"displayName":"Mode","name":"mode","type":"options","options":[{"name":"Hosted Chat","value":"hostedChat","description":"Chat on a page served by n8n"},{"name":"Embedded Chat","value":"webhook","description":"Chat through a widget embedded in another page, or by calling a webhook"}],"default":"hostedChat","displayOptions":{"show":{"public":[true]}}},{"displayName":"Chat will be live at the URL above once this workflow is published. Live executions will show up in the ‘executions’ tab","name":"hostedChatNotice","type":"notice","displayOptions":{"show":{"mode":["hostedChat"],"public":[true]}},"default":""},{"displayName":"Follow the instructions <a href=\"https://www.npmjs.com/package/@n8n/chat\" target=\"_blank\">here</a> to embed chat in a webpage (or just call the webhook URL at the top of this section). Chat will be live once you publish this workflow","name":"embeddedChatNotice","type":"notice","displayOptions":{"show":{"mode":["webhook"],"public":[true]}},"default":""},{"displayName":"Authentication","name":"authentication","type":"options","displayOptions":{"show":{"public":[true]}},"options":[{"name":"Basic Auth","value":"basicAuth","description":"Simple username and password (the same one for all users)"},{"name":"n8n User Auth","value":"n8nUserAuth","description":"Require user to be logged in with their n8n account"},{"name":"None","value":"none"}],"default":"none","description":"The way to authenticate"},{"displayName":"Initial Message(s)","name":"initialMessages","type":"string","displayOptions":{"show":{"mode":["hostedChat"],"public":[true]}},"typeOptions":{"rows":3},"default":"Hi there! 👋\nMy name is Nathan. How can I assist you today?","description":"Default messages shown at the start of the chat, one per line"},{"displayName":"Make Available in n8n Chat Hub","name":"availableInChat","type":"boolean","default":false,"noDataExpression":true,"description":"Whether to make the agent available in n8n Chat Hub for n8n instance users to chat with"},{"displayName":"Your Chat Trigger node is out of date. To update, delete this node and insert a new Chat Trigger node.","name":"availableInChatNotice","type":"notice","displayOptions":{"show":{"availableInChat":[true],"@version":[{"_cnd":{"lt":1.2}}]}},"default":""},{"displayName":"Your n8n users will be able to use this agent in <a href=\"/home/chat/\" target=\"_blank\">Chat</a> once this workflow is published. Make sure to share this workflow with at least Project Chat User access to all users who should use it.","name":"availableInChatNotice","type":"notice","displayOptions":{"show":{"availableInChat":[true],"@version":[{"_cnd":{"gte":1.2}}]}},"default":""},{"displayName":"Agent Icon","name":"agentIcon","type":"icon","default":{"type":"icon","value":"bot"},"noDataExpression":true,"description":"The icon of the agent on n8n Chat","displayOptions":{"show":{"availableInChat":[true],"@version":[{"_cnd":{"gte":1.2}}]}}},{"displayName":"Agent Name","name":"agentName","type":"string","default":"","noDataExpression":true,"description":"The name of the agent on n8n Chat. Name of the workflow is used if left empty.","displayOptions":{"show":{"availableInChat":[true],"@version":[{"_cnd":{"gte":1.2}}]}}},{"displayName":"Agent Description","name":"agentDescription","type":"string","typeOptions":{"rows":2},"default":"","noDataExpression":true,"description":"The description of the agent on n8n Chat","displayOptions":{"show":{"availableInChat":[true],"@version":[{"_cnd":{"gte":1.2}}]}}},{"displayName":"Suggestions","name":"suggestedPrompts","type":"fixedCollection","typeOptions":{"multipleValues":true,"fixedCollection":{"layout":"inline"}},"default":{},"noDataExpression":true,"placeholder":"Add Prompt","description":"Suggested prompts shown to users in n8n Chat Hub to start a conversation with the agent","displayOptions":{"show":{"availableInChat":[true],"@version":[{"_cnd":{"gte":1.2}}]}},"options":[{"name":"prompts","displayName":"Prompts","values":[{"displayName":"Icon","name":"icon","type":"icon","noDataExpression":true,"default":{"type":"icon","value":"comment"}},{"displayName":"Prompt Text","name":"text","type":"string","default":"","noDataExpression":true,"required":true}]}]},{"displayName":"Options","name":"options","type":"collection","displayOptions":{"show":{"public":[false],"@version":[1,1.1]}},"placeholder":"Add Field","default":{},"options":[{"displayName":"Allow File Uploads","name":"allowFileUploads","type":"boolean","default":false,"description":"Whether to allow file uploads in the chat"},{"displayName":"Allowed File Mime Types","name":"allowedFilesMimeTypes","type":"string","default":"*","placeholder":"e.g. image/*, text/*, application/pdf","description":"Allowed file types for upload. Comma-separated list of <a href=\"https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types/Common_types\" target=\"_blank\">MIME types</a>."}]},{"displayName":"Options","name":"options","type":"collection","displayOptions":{"show":{"mode":["hostedChat","webhook"],"public":[true],"@version":[1,1.1]}},"placeholder":"Add Field","default":{},"options":[{"displayName":"Allowed Origins (CORS)","name":"allowedOrigins","type":"string","default":"*","description":"Comma-separated list of URLs allowed for cross-origin non-preflight requests. Use * (default) to allow all origins.","displayOptions":{"show":{"/mode":["hostedChat","webhook"]}}},{"displayName":"Allow File Uploads","name":"allowFileUploads","type":"boolean","default":false,"description":"Whether to allow file uploads in the chat","displayOptions":{"show":{"/mode":["hostedChat"]}}},{"displayName":"Allowed File Mime Types","name":"allowedFilesMimeTypes","type":"string","default":"*","placeholder":"e.g. image/*, text/*, application/pdf","description":"Allowed file types for upload. Comma-separated list of <a href=\"https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types/Common_types\" target=\"_blank\">MIME types</a>.","displayOptions":{"show":{"/mode":["hostedChat"]}}},{"displayName":"Input Placeholder","name":"inputPlaceholder","type":"string","displayOptions":{"show":{"/mode":["hostedChat"]}},"default":"Type your question..","placeholder":"e.g. Type your message here","description":"Shown as placeholder text in the chat input field"},{"displayName":"Load Previous Session","name":"loadPreviousSession","type":"options","options":[{"name":"Off","value":"notSupported","description":"Loading messages of previous session is turned off"},{"name":"From Memory","value":"memory","description":"Load session messages from memory"},{"name":"Manually","value":"manually","description":"Manually return messages of session"}],"default":"notSupported","description":"If loading messages of a previous session should be enabled","builderHint":{"message":"Set to 'memory' to persist conversation history across sessions"}},{"displayName":"Require Button Click to Start Chat","name":"showWelcomeScreen","type":"boolean","displayOptions":{"show":{"/mode":["hostedChat"]}},"default":false,"description":"Whether to show the welcome screen at the start of the chat"},{"displayName":"Start Conversation Button Text","name":"getStarted","type":"string","displayOptions":{"show":{"showWelcomeScreen":[true],"/mode":["hostedChat"]}},"default":"New Conversation","placeholder":"e.g. New Conversation","description":"Shown as part of the welcome screen, in the middle of the chat window"},{"displayName":"Subtitle","name":"subtitle","type":"string","displayOptions":{"show":{"/mode":["hostedChat"]}},"default":"Start a chat. We're here to help you 24/7.","placeholder":"e.g. We're here for you","description":"Shown at the top of the chat, under the title"},{"displayName":"Title","name":"title","type":"string","displayOptions":{"show":{"/mode":["hostedChat"]}},"default":"Hi there! 👋","placeholder":"e.g. Welcome","description":"Shown at the top of the chat"},{"displayName":"Custom Chat Styling","name":"customCss","type":"string","typeOptions":{"rows":10,"editor":"cssEditor"},"displayOptions":{"show":{"/mode":["hostedChat"]}},"default":":root {\n /* Colors */\n --chat--color--primary: #e74266;\n --chat--color--primary-shade-50: #db4061;\n --chat--color--primary--shade-100: #cf3c5c;\n --chat--color--secondary: #20b69e;\n --chat--color-secondary-shade-50: #1ca08a;\n --chat--color-white: #fff;\n --chat--color-light: #f2f4f8;\n --chat--color-light-shade-50: #e6e9f1;\n --chat--color-light-shade-100: #c2c5cc;\n --chat--color-medium: #d2d4d9;\n --chat--color-dark: #101330;\n --chat--color-disabled: #d2d4d9;\n --chat--color-typing: #404040;\n\n /* Base Layout */\n --chat--spacing: 1rem;\n --chat--border-radius: 0.25rem;\n --chat--transition-duration: 0.15s;\n --chat--font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen-Sans, Ubuntu, Cantarell, 'Helvetica Neue', sans-serif;\n\n /* Window Dimensions */\n --chat--window--width: 400px;\n --chat--window--height: 600px;\n --chat--window--bottom: var(--chat--spacing);\n --chat--window--right: var(--chat--spacing);\n --chat--window--z-index: 9999;\n --chat--window--border: 1px solid var(--chat--color-light-shade-50);\n --chat--window--border-radius: var(--chat--border-radius);\n --chat--window--margin-bottom: var(--chat--spacing);\n\n /* Header Styles */\n --chat--header-height: auto;\n --chat--header--padding: var(--chat--spacing);\n --chat--header--background: var(--chat--color-dark);\n --chat--header--color: var(--chat--color-light);\n --chat--header--border-top: none;\n --chat--header--border-bottom: none;\n --chat--header--border-left: none;\n --chat--header--border-right: none;\n --chat--heading--font-size: 2em;\n --chat--subtitle--font-size: inherit;\n --chat--subtitle--line-height: 1.8;\n\n /* Message Styles */\n --chat--message--font-size: 1rem;\n --chat--message--padding: var(--chat--spacing);\n --chat--message--border-radius: var(--chat--border-radius);\n --chat--message-line-height: 1.5;\n --chat--message--margin-bottom: calc(var(--chat--spacing) * 1);\n --chat--message--bot--background: var(--chat--color-white);\n --chat--message--bot--color: var(--chat--color-dark);\n --chat--message--bot--border: none;\n --chat--message--user--background: var(--chat--color--secondary);\n --chat--message--user--color: var(--chat--color-white);\n --chat--message--user--border: none;\n --chat--message--pre--background: rgba(0, 0, 0, 0.05);\n --chat--messages-list--padding: var(--chat--spacing);\n\n /* Toggle Button */\n --chat--toggle--size: 64px;\n --chat--toggle--width: var(--chat--toggle--size);\n --chat--toggle--height: var(--chat--toggle--size);\n --chat--toggle--border-radius: 50%;\n --chat--toggle--background: var(--chat--color--primary);\n --chat--toggle--hover--background: var(--chat--color--primary-shade-50);\n --chat--toggle--active--background: var(--chat--color--primary--shade-100);\n --chat--toggle--color: var(--chat--color-white);\n\n /* Input Area */\n --chat--textarea--height: 50px;\n --chat--textarea--max-height: 30rem;\n --chat--input--font-size: inherit;\n --chat--input--border: 0;\n --chat--input--border-radius: 0;\n --chat--input--padding: 0.8rem;\n --chat--input--background: var(--chat--color-white);\n --chat--input--text-color: initial;\n --chat--input--line-height: 1.5;\n --chat--input--placeholder--font-size: var(--chat--input--font-size);\n --chat--input--border-active: 0;\n --chat--input--left--panel--width: 2rem;\n\n /* Button Styles */\n --chat--button--padding: calc(var(--chat--spacing) * 5 / 8) var(--chat--spacing);\n --chat--button--border-radius: var(--chat--border-radius);\n --chat--button--font-size: 1rem;\n --chat--button--line-height: 1;\n --chat--button--color--primary: var(--chat--color-light);\n --chat--button--background--primary: var(--chat--color--secondary);\n --chat--button--border--primary: none;\n --chat--button--color--primary--hover: var(--chat--color-light);\n --chat--button--background--primary--hover: var(--chat--color-secondary-shade-50);\n --chat--button--border--primary--hover: none;\n --chat--button--color--primary--disabled: var(--chat--color-light);\n --chat--button--background--primary--disabled: #81bbb1;\n --chat--button--border--primary--disabled: none;\n --chat--button--color--secondary: var(--chat--color-light);\n --chat--button--background--secondary: hsl(0, 0%, 58%);\n --chat--button--border--secondary: none;\n --chat--button--color--secondary--hover: var(--chat--color-light);\n --chat--button--background--secondary--hover: hsl(0, 0%, 51%);\n --chat--button--border--secondary--hover: none;\n --chat--button--color--secondary--disabled: var(--chat--color-light);\n --chat--button--background--secondary--disabled: hsl(0, 0%, 78%);\n --chat--button--border--secondary--disabled: none;\n --chat--close--button--color-hover: var(--chat--color--primary);\n\n /* Send and File Buttons */\n --chat--input--send--button--background: var(--chat--color-white);\n --chat--input--send--button--color: var(--chat--color--secondary);\n --chat--input--send--button--background-hover: var(--chat--color--primary-shade-50);\n --chat--input--send--button--color-hover: var(--chat--color-secondary-shade-50);\n --chat--input--file--button--background: var(--chat--color-white);\n --chat--input--file--button--color: var(--chat--color--secondary);\n --chat--input--file--button--background-hover: var(--chat--input--file--button--background);\n --chat--input--file--button--color-hover: var(--chat--color-secondary-shade-50);\n --chat--files-spacing: 0.25rem;\n\n /* Body and Footer */\n --chat--body--background: var(--chat--color-light);\n --chat--footer--background: var(--chat--color-light);\n --chat--footer--color: var(--chat--color-dark);\n}\n\n\n/* You can override any class styles, too. Right-click inspect in Chat UI to find class to override. */\n.chat-message {\n\tmax-width: 50%;\n}","description":"Override default styling of the public chat interface with CSS"},{"displayName":"Response Mode","name":"responseMode","type":"options","options":[{"name":"When Last Node Finishes","value":"lastNode","description":"Returns data of the last-executed node"},{"name":"Using 'Respond to Webhook' Node","value":"responseNode","description":"Response defined in that node"}],"default":"lastNode","description":"When and how to respond to the webhook"},{"displayName":"Auto-save highlighted data","name":"autoSaveHighlightedData","type":"boolean","default":true,"description":"Whether to automatically save <a href=\"https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-base.executiondata/\" target=\"_blank\">highlighted data</a>. This data can then be used to filter executions in the Executions view. Available on Pro and Enterprise plans in n8n Cloud, and on Enterprise or registered Community Edition for self-hosted. Defaults to true."}]},{"displayName":"Options","name":"options","type":"collection","displayOptions":{"show":{"mode":["hostedChat","webhook"],"public":[true],"@version":[1.2]}},"placeholder":"Add Field","default":{},"options":[{"displayName":"Allowed Origins (CORS)","name":"allowedOrigins","type":"string","default":"*","description":"Comma-separated list of URLs allowed for cross-origin non-preflight requests. Use * (default) to allow all origins.","displayOptions":{"show":{"/mode":["hostedChat","webhook"]}}},{"displayName":"Allow File Uploads","name":"allowFileUploads","type":"boolean","default":false,"description":"Whether to allow file uploads in the chat","displayOptions":{"show":{"/mode":["hostedChat"]}}},{"displayName":"Allowed File Mime Types","name":"allowedFilesMimeTypes","type":"string","default":"*","placeholder":"e.g. image/*, text/*, application/pdf","description":"Allowed file types for upload. Comma-separated list of <a href=\"https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types/Common_types\" target=\"_blank\">MIME types</a>.","displayOptions":{"show":{"/mode":["hostedChat"]}}},{"displayName":"Input Placeholder","name":"inputPlaceholder","type":"string","displayOptions":{"show":{"/mode":["hostedChat"]}},"default":"Type your question..","placeholder":"e.g. Type your message here","description":"Shown as placeholder text in the chat input field"},{"displayName":"Load Previous Session","name":"loadPreviousSession","type":"options","options":[{"name":"Off","value":"notSupported","description":"Loading messages of previous session is turned off"},{"name":"From Memory","value":"memory","description":"Load session messages from memory"},{"name":"Manually","value":"manually","description":"Manually return messages of session"}],"default":"notSupported","description":"If loading messages of a previous session should be enabled","builderHint":{"message":"Set to 'memory' to persist conversation history across sessions"}},{"displayName":"Require Button Click to Start Chat","name":"showWelcomeScreen","type":"boolean","displayOptions":{"show":{"/mode":["hostedChat"]}},"default":false,"description":"Whether to show the welcome screen at the start of the chat"},{"displayName":"Start Conversation Button Text","name":"getStarted","type":"string","displayOptions":{"show":{"showWelcomeScreen":[true],"/mode":["hostedChat"]}},"default":"New Conversation","placeholder":"e.g. New Conversation","description":"Shown as part of the welcome screen, in the middle of the chat window"},{"displayName":"Subtitle","name":"subtitle","type":"string","displayOptions":{"show":{"/mode":["hostedChat"]}},"default":"Start a chat. We're here to help you 24/7.","placeholder":"e.g. We're here for you","description":"Shown at the top of the chat, under the title"},{"displayName":"Title","name":"title","type":"string","displayOptions":{"show":{"/mode":["hostedChat"]}},"default":"Hi there! 👋","placeholder":"e.g. Welcome","description":"Shown at the top of the chat"},{"displayName":"Custom Chat Styling","name":"customCss","type":"string","typeOptions":{"rows":10,"editor":"cssEditor"},"displayOptions":{"show":{"/mode":["hostedChat"]}},"default":":root {\n /* Colors */\n --chat--color--primary: #e74266;\n --chat--color--primary-shade-50: #db4061;\n --chat--color--primary--shade-100: #cf3c5c;\n --chat--color--secondary: #20b69e;\n --chat--color-secondary-shade-50: #1ca08a;\n --chat--color-white: #fff;\n --chat--color-light: #f2f4f8;\n --chat--color-light-shade-50: #e6e9f1;\n --chat--color-light-shade-100: #c2c5cc;\n --chat--color-medium: #d2d4d9;\n --chat--color-dark: #101330;\n --chat--color-disabled: #d2d4d9;\n --chat--color-typing: #404040;\n\n /* Base Layout */\n --chat--spacing: 1rem;\n --chat--border-radius: 0.25rem;\n --chat--transition-duration: 0.15s;\n --chat--font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen-Sans, Ubuntu, Cantarell, 'Helvetica Neue', sans-serif;\n\n /* Window Dimensions */\n --chat--window--width: 400px;\n --chat--window--height: 600px;\n --chat--window--bottom: var(--chat--spacing);\n --chat--window--right: var(--chat--spacing);\n --chat--window--z-index: 9999;\n --chat--window--border: 1px solid var(--chat--color-light-shade-50);\n --chat--window--border-radius: var(--chat--border-radius);\n --chat--window--margin-bottom: var(--chat--spacing);\n\n /* Header Styles */\n --chat--header-height: auto;\n --chat--header--padding: var(--chat--spacing);\n --chat--header--background: var(--chat--color-dark);\n --chat--header--color: var(--chat--color-light);\n --chat--header--border-top: none;\n --chat--header--border-bottom: none;\n --chat--header--border-left: none;\n --chat--header--border-right: none;\n --chat--heading--font-size: 2em;\n --chat--subtitle--font-size: inherit;\n --chat--subtitle--line-height: 1.8;\n\n /* Message Styles */\n --chat--message--font-size: 1rem;\n --chat--message--padding: var(--chat--spacing);\n --chat--message--border-radius: var(--chat--border-radius);\n --chat--message-line-height: 1.5;\n --chat--message--margin-bottom: calc(var(--chat--spacing) * 1);\n --chat--message--bot--background: var(--chat--color-white);\n --chat--message--bot--color: var(--chat--color-dark);\n --chat--message--bot--border: none;\n --chat--message--user--background: var(--chat--color--secondary);\n --chat--message--user--color: var(--chat--color-white);\n --chat--message--user--border: none;\n --chat--message--pre--background: rgba(0, 0, 0, 0.05);\n --chat--messages-list--padding: var(--chat--spacing);\n\n /* Toggle Button */\n --chat--toggle--size: 64px;\n --chat--toggle--width: var(--chat--toggle--size);\n --chat--toggle--height: var(--chat--toggle--size);\n --chat--toggle--border-radius: 50%;\n --chat--toggle--background: var(--chat--color--primary);\n --chat--toggle--hover--background: var(--chat--color--primary-shade-50);\n --chat--toggle--active--background: var(--chat--color--primary--shade-100);\n --chat--toggle--color: var(--chat--color-white);\n\n /* Input Area */\n --chat--textarea--height: 50px;\n --chat--textarea--max-height: 30rem;\n --chat--input--font-size: inherit;\n --chat--input--border: 0;\n --chat--input--border-radius: 0;\n --chat--input--padding: 0.8rem;\n --chat--input--background: var(--chat--color-white);\n --chat--input--text-color: initial;\n --chat--input--line-height: 1.5;\n --chat--input--placeholder--font-size: var(--chat--input--font-size);\n --chat--input--border-active: 0;\n --chat--input--left--panel--width: 2rem;\n\n /* Button Styles */\n --chat--button--padding: calc(var(--chat--spacing) * 5 / 8) var(--chat--spacing);\n --chat--button--border-radius: var(--chat--border-radius);\n --chat--button--font-size: 1rem;\n --chat--button--line-height: 1;\n --chat--button--color--primary: var(--chat--color-light);\n --chat--button--background--primary: var(--chat--color--secondary);\n --chat--button--border--primary: none;\n --chat--button--color--primary--hover: var(--chat--color-light);\n --chat--button--background--primary--hover: var(--chat--color-secondary-shade-50);\n --chat--button--border--primary--hover: none;\n --chat--button--color--primary--disabled: var(--chat--color-light);\n --chat--button--background--primary--disabled: #81bbb1;\n --chat--button--border--primary--disabled: none;\n --chat--button--color--secondary: var(--chat--color-light);\n --chat--button--background--secondary: hsl(0, 0%, 58%);\n --chat--button--border--secondary: none;\n --chat--button--color--secondary--hover: var(--chat--color-light);\n --chat--button--background--secondary--hover: hsl(0, 0%, 51%);\n --chat--button--border--secondary--hover: none;\n --chat--button--color--secondary--disabled: var(--chat--color-light);\n --chat--button--background--secondary--disabled: hsl(0, 0%, 78%);\n --chat--button--border--secondary--disabled: none;\n --chat--close--button--color-hover: var(--chat--color--primary);\n\n /* Send and File Buttons */\n --chat--input--send--button--background: var(--chat--color-white);\n --chat--input--send--button--color: var(--chat--color--secondary);\n --chat--input--send--button--background-hover: var(--chat--color--primary-shade-50);\n --chat--input--send--button--color-hover: var(--chat--color-secondary-shade-50);\n --chat--input--file--button--background: var(--chat--color-white);\n --chat--input--file--button--color: var(--chat--color--secondary);\n --chat--input--file--button--background-hover: var(--chat--input--file--button--background);\n --chat--input--file--button--color-hover: var(--chat--color-secondary-shade-50);\n --chat--files-spacing: 0.25rem;\n\n /* Body and Footer */\n --chat--body--background: var(--chat--color-light);\n --chat--footer--background: var(--chat--color-light);\n --chat--footer--color: var(--chat--color-dark);\n}\n\n\n/* You can override any class styles, too. Right-click inspect in Chat UI to find class to override. */\n.chat-message {\n\tmax-width: 50%;\n}","description":"Override default styling of the public chat interface with CSS"},{"displayName":"Response Mode","name":"responseMode","type":"options","options":[{"name":"When Last Node Finishes","value":"lastNode","description":"Returns data of the last-executed node"},{"name":"Using 'Respond to Webhook' Node","value":"responseNode","description":"Response defined in that node"},{"name":"Streaming","value":"streaming","description":"Streaming response from specified nodes (e.g. Agents)"}],"default":"lastNode","description":"When and how to respond to the webhook","displayOptions":{"show":{"/availableInChat":[false]}}},{"displayName":"Response Mode","name":"responseMode","type":"options","options":[{"name":"Streaming","value":"streaming","description":"Streaming response from specified nodes (e.g. Agents)"},{"name":"When Last Node Finishes","value":"lastNode","description":"Returns data of the last-executed node"}],"default":"streaming","description":"When and how to respond to the webhook","displayOptions":{"show":{"/availableInChat":[true]}}},{"displayName":"Auto-save highlighted data","name":"autoSaveHighlightedData","type":"boolean","default":true,"description":"Whether to automatically save <a href=\"https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-base.executiondata/\" target=\"_blank\">highlighted data</a>. This data can then be used to filter executions in the Executions view. Available on Pro and Enterprise plans in n8n Cloud, and on Enterprise or registered Community Edition for self-hosted. Defaults to true."}]},{"displayName":"Options","name":"options","type":"collection","displayOptions":{"show":{"public":[false],"@version":[{"_cnd":{"gte":1.3}}]}},"placeholder":"Add Field","default":{},"options":[{"displayName":"Allow File Uploads","name":"allowFileUploads","type":"boolean","default":false,"description":"Whether to allow file uploads in the chat"},{"displayName":"Allowed File Mime Types","name":"allowedFilesMimeTypes","type":"string","default":"*","placeholder":"e.g. image/*, text/*, application/pdf","description":"Allowed file types for upload. Comma-separated list of <a href=\"https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types/Common_types\" target=\"_blank\">MIME types</a>."},{"displayName":"Response Mode","name":"responseMode","type":"options","options":[{"name":"When Last Node Finishes","value":"lastNode","description":"Returns data of the last-executed node"},{"name":"Using Response Nodes","value":"responseNodes","description":"Send responses to the chat by using one or more Chat nodes"},{"name":"Streaming","value":"streaming","description":"Streaming response from specified nodes (e.g. Agents)"}],"default":"lastNode","description":"When and how to respond to the chat","displayOptions":{"show":{"/availableInChat":[false]}}},{"displayName":"Response Mode","name":"responseMode","type":"options","options":[{"name":"Streaming","value":"streaming","description":"Streaming response from specified nodes (e.g. Agents)"},{"name":"When Last Node Finishes","value":"lastNode","description":"Returns data of the last-executed node"},{"name":"Using Response Nodes","value":"responseNodes","description":"Send responses to the chat by using one or more Chat nodes"}],"default":"streaming","description":"When and how to respond to the chat","displayOptions":{"show":{"/availableInChat":[true]}}},{"displayName":"Auto-save highlighted data","name":"autoSaveHighlightedData","type":"boolean","default":true,"description":"Whether to automatically save <a href=\"https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-base.executiondata/\" target=\"_blank\">highlighted data</a>. This data can then be used to filter executions in the Executions view. Available on Pro and Enterprise plans in n8n Cloud, and on Enterprise or registered Community Edition for self-hosted. Defaults to true."}]},{"displayName":"Options","name":"options","type":"collection","displayOptions":{"show":{"mode":["hostedChat","webhook"],"public":[true],"@version":[{"_cnd":{"gte":1.3}}]}},"placeholder":"Add Field","default":{},"options":[{"displayName":"Allowed Origins (CORS)","name":"allowedOrigins","type":"string","default":"*","description":"Comma-separated list of URLs allowed for cross-origin non-preflight requests. Use * (default) to allow all origins.","displayOptions":{"show":{"/mode":["hostedChat","webhook"]}}},{"displayName":"Allow File Uploads","name":"allowFileUploads","type":"boolean","default":false,"description":"Whether to allow file uploads in the chat","displayOptions":{"show":{"/mode":["hostedChat"]}}},{"displayName":"Allowed File Mime Types","name":"allowedFilesMimeTypes","type":"string","default":"*","placeholder":"e.g. image/*, text/*, application/pdf","description":"Allowed file types for upload. Comma-separated list of <a href=\"https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types/Common_types\" target=\"_blank\">MIME types</a>.","displayOptions":{"show":{"/mode":["hostedChat"]}}},{"displayName":"Input Placeholder","name":"inputPlaceholder","type":"string","displayOptions":{"show":{"/mode":["hostedChat"]}},"default":"Type your question..","placeholder":"e.g. Type your message here","description":"Shown as placeholder text in the chat input field"},{"displayName":"Load Previous Session","name":"loadPreviousSession","type":"options","options":[{"name":"Off","value":"notSupported","description":"Loading messages of previous session is turned off"},{"name":"From Memory","value":"memory","description":"Load session messages from memory"},{"name":"Manually","value":"manually","description":"Manually return messages of session"}],"default":"notSupported","description":"If loading messages of a previous session should be enabled","builderHint":{"message":"Set to 'memory' to persist conversation history across sessions"}},{"displayName":"Require Button Click to Start Chat","name":"showWelcomeScreen","type":"boolean","displayOptions":{"show":{"/mode":["hostedChat"]}},"default":false,"description":"Whether to show the welcome screen at the start of the chat"},{"displayName":"Start Conversation Button Text","name":"getStarted","type":"string","displayOptions":{"show":{"showWelcomeScreen":[true],"/mode":["hostedChat"]}},"default":"New Conversation","placeholder":"e.g. New Conversation","description":"Shown as part of the welcome screen, in the middle of the chat window"},{"displayName":"Subtitle","name":"subtitle","type":"string","displayOptions":{"show":{"/mode":["hostedChat"]}},"default":"Start a chat. We're here to help you 24/7.","placeholder":"e.g. We're here for you","description":"Shown at the top of the chat, under the title"},{"displayName":"Title","name":"title","type":"string","displayOptions":{"show":{"/mode":["hostedChat"]}},"default":"Hi there! 👋","placeholder":"e.g. Welcome","description":"Shown at the top of the chat"},{"displayName":"Custom Chat Styling","name":"customCss","type":"string","typeOptions":{"rows":10,"editor":"cssEditor"},"displayOptions":{"show":{"/mode":["hostedChat"]}},"default":":root {\n /* Colors */\n --chat--color--primary: #e74266;\n --chat--color--primary-shade-50: #db4061;\n --chat--color--primary--shade-100: #cf3c5c;\n --chat--color--secondary: #20b69e;\n --chat--color-secondary-shade-50: #1ca08a;\n --chat--color-white: #fff;\n --chat--color-light: #f2f4f8;\n --chat--color-light-shade-50: #e6e9f1;\n --chat--color-light-shade-100: #c2c5cc;\n --chat--color-medium: #d2d4d9;\n --chat--color-dark: #101330;\n --chat--color-disabled: #d2d4d9;\n --chat--color-typing: #404040;\n\n /* Base Layout */\n --chat--spacing: 1rem;\n --chat--border-radius: 0.25rem;\n --chat--transition-duration: 0.15s;\n --chat--font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen-Sans, Ubuntu, Cantarell, 'Helvetica Neue', sans-serif;\n\n /* Window Dimensions */\n --chat--window--width: 400px;\n --chat--window--height: 600px;\n --chat--window--bottom: var(--chat--spacing);\n --chat--window--right: var(--chat--spacing);\n --chat--window--z-index: 9999;\n --chat--window--border: 1px solid var(--chat--color-light-shade-50);\n --chat--window--border-radius: var(--chat--border-radius);\n --chat--window--margin-bottom: var(--chat--spacing);\n\n /* Header Styles */\n --chat--header-height: auto;\n --chat--header--padding: var(--chat--spacing);\n --chat--header--background: var(--chat--color-dark);\n --chat--header--color: var(--chat--color-light);\n --chat--header--border-top: none;\n --chat--header--border-bottom: none;\n --chat--header--border-left: none;\n --chat--header--border-right: none;\n --chat--heading--font-size: 2em;\n --chat--subtitle--font-size: inherit;\n --chat--subtitle--line-height: 1.8;\n\n /* Message Styles */\n --chat--message--font-size: 1rem;\n --chat--message--padding: var(--chat--spacing);\n --chat--message--border-radius: var(--chat--border-radius);\n --chat--message-line-height: 1.5;\n --chat--message--margin-bottom: calc(var(--chat--spacing) * 1);\n --chat--message--bot--background: var(--chat--color-white);\n --chat--message--bot--color: var(--chat--color-dark);\n --chat--message--bot--border: none;\n --chat--message--user--background: var(--chat--color--secondary);\n --chat--message--user--color: var(--chat--color-white);\n --chat--message--user--border: none;\n --chat--message--pre--background: rgba(0, 0, 0, 0.05);\n --chat--messages-list--padding: var(--chat--spacing);\n\n /* Toggle Button */\n --chat--toggle--size: 64px;\n --chat--toggle--width: var(--chat--toggle--size);\n --chat--toggle--height: var(--chat--toggle--size);\n --chat--toggle--border-radius: 50%;\n --chat--toggle--background: var(--chat--color--primary);\n --chat--toggle--hover--background: var(--chat--color--primary-shade-50);\n --chat--toggle--active--background: var(--chat--color--primary--shade-100);\n --chat--toggle--color: var(--chat--color-white);\n\n /* Input Area */\n --chat--textarea--height: 50px;\n --chat--textarea--max-height: 30rem;\n --chat--input--font-size: inherit;\n --chat--input--border: 0;\n --chat--input--border-radius: 0;\n --chat--input--padding: 0.8rem;\n --chat--input--background: var(--chat--color-white);\n --chat--input--text-color: initial;\n --chat--input--line-height: 1.5;\n --chat--input--placeholder--font-size: var(--chat--input--font-size);\n --chat--input--border-active: 0;\n --chat--input--left--panel--width: 2rem;\n\n /* Button Styles */\n --chat--button--padding: calc(var(--chat--spacing) * 5 / 8) var(--chat--spacing);\n --chat--button--border-radius: var(--chat--border-radius);\n --chat--button--font-size: 1rem;\n --chat--button--line-height: 1;\n --chat--button--color--primary: var(--chat--color-light);\n --chat--button--background--primary: var(--chat--color--secondary);\n --chat--button--border--primary: none;\n --chat--button--color--primary--hover: var(--chat--color-light);\n --chat--button--background--primary--hover: var(--chat--color-secondary-shade-50);\n --chat--button--border--primary--hover: none;\n --chat--button--color--primary--disabled: var(--chat--color-light);\n --chat--button--background--primary--disabled: #81bbb1;\n --chat--button--border--primary--disabled: none;\n --chat--button--color--secondary: var(--chat--color-light);\n --chat--button--background--secondary: hsl(0, 0%, 58%);\n --chat--button--border--secondary: none;\n --chat--button--color--secondary--hover: var(--chat--color-light);\n --chat--button--background--secondary--hover: hsl(0, 0%, 51%);\n --chat--button--border--secondary--hover: none;\n --chat--button--color--secondary--disabled: var(--chat--color-light);\n --chat--button--background--secondary--disabled: hsl(0, 0%, 78%);\n --chat--button--border--secondary--disabled: none;\n --chat--close--button--color-hover: var(--chat--color--primary);\n\n /* Send and File Buttons */\n --chat--input--send--button--background: var(--chat--color-white);\n --chat--input--send--button--color: var(--chat--color--secondary);\n --chat--input--send--button--background-hover: var(--chat--color--primary-shade-50);\n --chat--input--send--button--color-hover: var(--chat--color-secondary-shade-50);\n --chat--input--file--button--background: var(--chat--color-white);\n --chat--input--file--button--color: var(--chat--color--secondary);\n --chat--input--file--button--background-hover: var(--chat--input--file--button--background);\n --chat--input--file--button--color-hover: var(--chat--color-secondary-shade-50);\n --chat--files-spacing: 0.25rem;\n\n /* Body and Footer */\n --chat--body--background: var(--chat--color-light);\n --chat--footer--background: var(--chat--color-light);\n --chat--footer--color: var(--chat--color-dark);\n}\n\n\n/* You can override any class styles, too. Right-click inspect in Chat UI to find class to override. */\n.chat-message {\n\tmax-width: 50%;\n}","description":"Override default styling of the public chat interface with CSS"},{"displayName":"Response Mode","name":"responseMode","type":"options","options":[{"name":"When Last Node Finishes","value":"lastNode","description":"Returns data of the last-executed node"},{"name":"Streaming","value":"streaming","description":"Streaming response from specified nodes (e.g. Agents)"},{"name":"Using 'Respond to Webhook' Node","value":"responseNode","description":"Response defined in that node"}],"default":"lastNode","description":"When and how to respond to the chat","displayOptions":{"show":{"/mode":["webhook"],"/availableInChat":[false]}}},{"displayName":"Response Mode","name":"responseMode","type":"options","options":[{"name":"Streaming","value":"streaming","description":"Streaming response from specified nodes (e.g. Agents)"},{"name":"When Last Node Finishes","value":"lastNode","description":"Returns data of the last-executed node"}],"default":"streaming","description":"When and how to respond to the chat","displayOptions":{"show":{"/mode":["webhook"],"/availableInChat":[true]}}},{"displayName":"Response Mode","name":"responseMode","type":"options","options":[{"name":"When Last Node Finishes","value":"lastNode","description":"Returns data of the last-executed node"},{"name":"Streaming","value":"streaming","description":"Streaming response from specified nodes (e.g. Agents)"},{"name":"Using Response Nodes","value":"responseNodes","description":"Send responses to the chat by using one or more Chat nodes"}],"default":"lastNode","description":"When and how to respond to the chat","displayOptions":{"show":{"/mode":["hostedChat"],"/availableInChat":[false]}}},{"displayName":"Response Mode","name":"responseMode","type":"options","options":[{"name":"Streaming","value":"streaming","description":"Streaming response from specified nodes (e.g. Agents)"},{"name":"When Last Node Finishes","value":"lastNode","description":"Returns data of the last-executed node"},{"name":"Using Response Nodes","value":"responseNodes","description":"Send responses to the chat by using one or more Chat nodes"}],"default":"streaming","description":"When and how to respond to the chat","displayOptions":{"show":{"/mode":["hostedChat"],"/availableInChat":[true]}}},{"displayName":"Auto-save highlighted data","name":"autoSaveHighlightedData","type":"boolean","default":true,"description":"Whether to automatically save <a href=\"https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-base.executiondata/\" target=\"_blank\">highlighted data</a>. This data can then be used to filter executions in the Executions view. Available on Pro and Enterprise plans in n8n Cloud, and on Enterprise or registered Community Edition for self-hosted. Defaults to true."}]}]},
|
|
97
|
+
{"displayName":"Chat Trigger","name":"chatTrigger","icon":"node:chat-trigger","iconColor":"black","group":["trigger"],"version":[1,1.1,1.2,1.3,1.4],"defaultVersion":1.4,"description":"Runs the workflow when an n8n generated webchat is submitted","defaults":{"name":"When chat message received"},"codex":{"categories":["Core Nodes"],"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-langchain.chattrigger/"}]}},"maxNodes":1,"inputs":"={{ (() => {\n\t\t\tif (!['hostedChat', 'webhook'].includes($parameter.mode)) {\n\t\t\t\treturn [];\n\t\t\t}\n\t\t\tif ($parameter.options?.loadPreviousSession !== 'memory') {\n\t\t\t\treturn [];\n\t\t\t}\n\n\t\t\treturn [\n\t\t\t\t{\n\t\t\t\t\tdisplayName: 'Memory',\n\t\t\t\t\tmaxConnections: 1,\n\t\t\t\t\ttype: 'ai_memory',\n\t\t\t\t\trequired: true,\n\t\t\t\t}\n\t\t\t];\n\t\t })() }}","outputs":["main"],"builderHint":{"inputs":{"ai_memory":{"required":true,"displayOptions":{"show":{"mode":["hostedChat","webhook"],"options.loadPreviousSession":["memory"]}}}}},"credentials":[{"name":"httpBasicAuth","required":true,"displayOptions":{"show":{"authentication":["basicAuth"]}}}],"webhooks":[{"name":"setup","httpMethod":"GET","responseMode":"onReceived","path":"chat","ndvHideUrl":true},{"name":"default","httpMethod":"POST","responseMode":"={{$parameter.options?.[\"responseMode\"] ?? ($parameter.availableInChat ? \"streaming\" : \"lastNode\") }}","path":"chat","ndvHideMethod":true,"ndvHideUrl":"={{ !$parameter.public }}"}],"eventTriggerDescription":"Waiting for you to submit the chat","activationMessage":"You can now make calls to your production chat URL.","triggerPanel":false,"properties":[{"displayName":"Make Chat Publicly Available","name":"public","type":"boolean","default":false,"description":"Whether the chat should be publicly available or only accessible through the manual chat interface"},{"displayName":"Mode","name":"mode","type":"options","options":[{"name":"Hosted Chat","value":"hostedChat","description":"Chat on a page served by n8n"},{"name":"Embedded Chat","value":"webhook","description":"Chat through a widget embedded in another page, or by calling a webhook"}],"default":"hostedChat","displayOptions":{"show":{"public":[true]}}},{"displayName":"Chat will be live at the URL above once this workflow is published. Live executions will show up in the ‘executions’ tab","name":"hostedChatNotice","type":"notice","displayOptions":{"show":{"mode":["hostedChat"],"public":[true]}},"default":""},{"displayName":"Follow the instructions <a href=\"https://www.npmjs.com/package/@n8n/chat\" target=\"_blank\">here</a> to embed chat in a webpage (or just call the webhook URL at the top of this section). Chat will be live once you publish this workflow","name":"embeddedChatNotice","type":"notice","displayOptions":{"show":{"mode":["webhook"],"public":[true]}},"default":""},{"displayName":"Authentication","name":"authentication","type":"options","displayOptions":{"show":{"public":[true]}},"options":[{"name":"Basic Auth","value":"basicAuth","description":"Simple username and password (the same one for all users)"},{"name":"n8n User Auth","value":"n8nUserAuth","description":"Require user to be logged in with their n8n account"},{"name":"None","value":"none"}],"default":"none","description":"The way to authenticate"},{"displayName":"Initial Message(s)","name":"initialMessages","type":"string","displayOptions":{"show":{"mode":["hostedChat"],"public":[true]}},"typeOptions":{"rows":3},"default":"Hi there! 👋\nMy name is Nathan. How can I assist you today?","description":"Default messages shown at the start of the chat, one per line"},{"displayName":"Make Available in n8n Chat Hub","name":"availableInChat","type":"boolean","default":false,"noDataExpression":true,"description":"Whether to make the agent available in n8n Chat Hub for n8n instance users to chat with"},{"displayName":"Your Chat Trigger node is out of date. To update, delete this node and insert a new Chat Trigger node.","name":"availableInChatNotice","type":"notice","displayOptions":{"show":{"availableInChat":[true],"@version":[{"_cnd":{"lt":1.2}}]}},"default":""},{"displayName":"Your n8n users will be able to use this agent in <a href=\"/home/chat/\" target=\"_blank\">Chat</a> once this workflow is published. Make sure to share this workflow with at least Project Chat User access to all users who should use it.","name":"availableInChatNotice","type":"notice","displayOptions":{"show":{"availableInChat":[true],"@version":[{"_cnd":{"gte":1.2}}]}},"default":""},{"displayName":"Agent Icon","name":"agentIcon","type":"icon","default":{"type":"icon","value":"bot"},"noDataExpression":true,"description":"The icon of the agent on n8n Chat","displayOptions":{"show":{"availableInChat":[true],"@version":[{"_cnd":{"gte":1.2}}]}}},{"displayName":"Agent Name","name":"agentName","type":"string","default":"","noDataExpression":true,"description":"The name of the agent on n8n Chat. Name of the workflow is used if left empty.","displayOptions":{"show":{"availableInChat":[true],"@version":[{"_cnd":{"gte":1.2}}]}}},{"displayName":"Agent Description","name":"agentDescription","type":"string","typeOptions":{"rows":2},"default":"","noDataExpression":true,"description":"The description of the agent on n8n Chat","displayOptions":{"show":{"availableInChat":[true],"@version":[{"_cnd":{"gte":1.2}}]}}},{"displayName":"Suggestions","name":"suggestedPrompts","type":"fixedCollection","typeOptions":{"multipleValues":true,"fixedCollection":{"layout":"inline"}},"default":{},"noDataExpression":true,"placeholder":"Add Prompt","description":"Suggested prompts shown to users in n8n Chat Hub to start a conversation with the agent","displayOptions":{"show":{"availableInChat":[true],"@version":[{"_cnd":{"gte":1.2}}]}},"options":[{"name":"prompts","displayName":"Prompts","values":[{"displayName":"Icon","name":"icon","type":"icon","noDataExpression":true,"default":{"type":"icon","value":"comment"}},{"displayName":"Prompt Text","name":"text","type":"string","default":"","noDataExpression":true,"required":true}]}]},{"displayName":"Options","name":"options","type":"collection","displayOptions":{"show":{"public":[false],"@version":[1,1.1]}},"placeholder":"Add Field","default":{},"options":[{"displayName":"Allow File Uploads","name":"allowFileUploads","type":"boolean","default":false,"description":"Whether to allow file uploads in the chat"},{"displayName":"Allowed File Mime Types","name":"allowedFilesMimeTypes","type":"string","default":"*","placeholder":"e.g. image/*, text/*, application/pdf","description":"Allowed file types for upload. Comma-separated list of <a href=\"https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types/Common_types\" target=\"_blank\">MIME types</a>."}]},{"displayName":"Options","name":"options","type":"collection","displayOptions":{"show":{"mode":["hostedChat","webhook"],"public":[true],"@version":[1,1.1]}},"placeholder":"Add Field","default":{},"options":[{"displayName":"Allowed Origins (CORS)","name":"allowedOrigins","type":"string","default":"*","description":"Comma-separated list of URLs allowed for cross-origin non-preflight requests. Use * (default) to allow all origins.","displayOptions":{"show":{"/mode":["hostedChat","webhook"]}}},{"displayName":"Allow File Uploads","name":"allowFileUploads","type":"boolean","default":false,"description":"Whether to allow file uploads in the chat","displayOptions":{"show":{"/mode":["hostedChat"]}}},{"displayName":"Allowed File Mime Types","name":"allowedFilesMimeTypes","type":"string","default":"*","placeholder":"e.g. image/*, text/*, application/pdf","description":"Allowed file types for upload. Comma-separated list of <a href=\"https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types/Common_types\" target=\"_blank\">MIME types</a>.","displayOptions":{"show":{"/mode":["hostedChat"]}}},{"displayName":"Input Placeholder","name":"inputPlaceholder","type":"string","displayOptions":{"show":{"/mode":["hostedChat"]}},"default":"Type your question..","placeholder":"e.g. Type your message here","description":"Shown as placeholder text in the chat input field"},{"displayName":"Load Previous Session","name":"loadPreviousSession","type":"options","options":[{"name":"Off","value":"notSupported","description":"Loading messages of previous session is turned off"},{"name":"From Memory","value":"memory","description":"Load session messages from memory"},{"name":"Manually","value":"manually","description":"Manually return messages of session"}],"default":"notSupported","description":"If loading messages of a previous session should be enabled","builderHint":{"message":"This ONLY rehydrates the chat widget UI when the user reopens it — it does NOT give the Agent memory. The Agent gets memory from its own memory subnode regardless of this setting. Only set to 'memory' if the user wants the widget to restore visible history on reload; if so, you MUST also attach a memory subnode to this trigger (use the same memory node as the Agent so widget history matches what the Agent remembers). Otherwise leave as 'notSupported'."}},{"displayName":"Require Button Click to Start Chat","name":"showWelcomeScreen","type":"boolean","displayOptions":{"show":{"/mode":["hostedChat"]}},"default":false,"description":"Whether to show the welcome screen at the start of the chat"},{"displayName":"Start Conversation Button Text","name":"getStarted","type":"string","displayOptions":{"show":{"showWelcomeScreen":[true],"/mode":["hostedChat"]}},"default":"New Conversation","placeholder":"e.g. New Conversation","description":"Shown as part of the welcome screen, in the middle of the chat window"},{"displayName":"Subtitle","name":"subtitle","type":"string","displayOptions":{"show":{"/mode":["hostedChat"]}},"default":"Start a chat. We're here to help you 24/7.","placeholder":"e.g. We're here for you","description":"Shown at the top of the chat, under the title"},{"displayName":"Title","name":"title","type":"string","displayOptions":{"show":{"/mode":["hostedChat"]}},"default":"Hi there! 👋","placeholder":"e.g. Welcome","description":"Shown at the top of the chat"},{"displayName":"Custom Chat Styling","name":"customCss","type":"string","typeOptions":{"rows":10,"editor":"cssEditor"},"displayOptions":{"show":{"/mode":["hostedChat"]}},"default":":root {\n /* Colors */\n --chat--color--primary: #e74266;\n --chat--color--primary-shade-50: #db4061;\n --chat--color--primary--shade-100: #cf3c5c;\n --chat--color--secondary: #20b69e;\n --chat--color-secondary-shade-50: #1ca08a;\n --chat--color-white: #fff;\n --chat--color-light: #f2f4f8;\n --chat--color-light-shade-50: #e6e9f1;\n --chat--color-light-shade-100: #c2c5cc;\n --chat--color-medium: #d2d4d9;\n --chat--color-dark: #101330;\n --chat--color-disabled: #d2d4d9;\n --chat--color-typing: #404040;\n\n /* Base Layout */\n --chat--spacing: 1rem;\n --chat--border-radius: 0.25rem;\n --chat--transition-duration: 0.15s;\n --chat--font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen-Sans, Ubuntu, Cantarell, 'Helvetica Neue', sans-serif;\n\n /* Window Dimensions */\n --chat--window--width: 400px;\n --chat--window--height: 600px;\n --chat--window--bottom: var(--chat--spacing);\n --chat--window--right: var(--chat--spacing);\n --chat--window--z-index: 9999;\n --chat--window--border: 1px solid var(--chat--color-light-shade-50);\n --chat--window--border-radius: var(--chat--border-radius);\n --chat--window--margin-bottom: var(--chat--spacing);\n\n /* Header Styles */\n --chat--header-height: auto;\n --chat--header--padding: var(--chat--spacing);\n --chat--header--background: var(--chat--color-dark);\n --chat--header--color: var(--chat--color-light);\n --chat--header--border-top: none;\n --chat--header--border-bottom: none;\n --chat--header--border-left: none;\n --chat--header--border-right: none;\n --chat--heading--font-size: 2em;\n --chat--subtitle--font-size: inherit;\n --chat--subtitle--line-height: 1.8;\n\n /* Message Styles */\n --chat--message--font-size: 1rem;\n --chat--message--padding: var(--chat--spacing);\n --chat--message--border-radius: var(--chat--border-radius);\n --chat--message-line-height: 1.5;\n --chat--message--margin-bottom: calc(var(--chat--spacing) * 1);\n --chat--message--bot--background: var(--chat--color-white);\n --chat--message--bot--color: var(--chat--color-dark);\n --chat--message--bot--border: none;\n --chat--message--user--background: var(--chat--color--secondary);\n --chat--message--user--color: var(--chat--color-white);\n --chat--message--user--border: none;\n --chat--message--pre--background: rgba(0, 0, 0, 0.05);\n --chat--messages-list--padding: var(--chat--spacing);\n\n /* Toggle Button */\n --chat--toggle--size: 64px;\n --chat--toggle--width: var(--chat--toggle--size);\n --chat--toggle--height: var(--chat--toggle--size);\n --chat--toggle--border-radius: 50%;\n --chat--toggle--background: var(--chat--color--primary);\n --chat--toggle--hover--background: var(--chat--color--primary-shade-50);\n --chat--toggle--active--background: var(--chat--color--primary--shade-100);\n --chat--toggle--color: var(--chat--color-white);\n\n /* Input Area */\n --chat--textarea--height: 50px;\n --chat--textarea--max-height: 30rem;\n --chat--input--font-size: inherit;\n --chat--input--border: 0;\n --chat--input--border-radius: 0;\n --chat--input--padding: 0.8rem;\n --chat--input--background: var(--chat--color-white);\n --chat--input--text-color: initial;\n --chat--input--line-height: 1.5;\n --chat--input--placeholder--font-size: var(--chat--input--font-size);\n --chat--input--border-active: 0;\n --chat--input--left--panel--width: 2rem;\n\n /* Button Styles */\n --chat--button--padding: calc(var(--chat--spacing) * 5 / 8) var(--chat--spacing);\n --chat--button--border-radius: var(--chat--border-radius);\n --chat--button--font-size: 1rem;\n --chat--button--line-height: 1;\n --chat--button--color--primary: var(--chat--color-light);\n --chat--button--background--primary: var(--chat--color--secondary);\n --chat--button--border--primary: none;\n --chat--button--color--primary--hover: var(--chat--color-light);\n --chat--button--background--primary--hover: var(--chat--color-secondary-shade-50);\n --chat--button--border--primary--hover: none;\n --chat--button--color--primary--disabled: var(--chat--color-light);\n --chat--button--background--primary--disabled: #81bbb1;\n --chat--button--border--primary--disabled: none;\n --chat--button--color--secondary: var(--chat--color-light);\n --chat--button--background--secondary: hsl(0, 0%, 58%);\n --chat--button--border--secondary: none;\n --chat--button--color--secondary--hover: var(--chat--color-light);\n --chat--button--background--secondary--hover: hsl(0, 0%, 51%);\n --chat--button--border--secondary--hover: none;\n --chat--button--color--secondary--disabled: var(--chat--color-light);\n --chat--button--background--secondary--disabled: hsl(0, 0%, 78%);\n --chat--button--border--secondary--disabled: none;\n --chat--close--button--color-hover: var(--chat--color--primary);\n\n /* Send and File Buttons */\n --chat--input--send--button--background: var(--chat--color-white);\n --chat--input--send--button--color: var(--chat--color--secondary);\n --chat--input--send--button--background-hover: var(--chat--color--primary-shade-50);\n --chat--input--send--button--color-hover: var(--chat--color-secondary-shade-50);\n --chat--input--file--button--background: var(--chat--color-white);\n --chat--input--file--button--color: var(--chat--color--secondary);\n --chat--input--file--button--background-hover: var(--chat--input--file--button--background);\n --chat--input--file--button--color-hover: var(--chat--color-secondary-shade-50);\n --chat--files-spacing: 0.25rem;\n\n /* Body and Footer */\n --chat--body--background: var(--chat--color-light);\n --chat--footer--background: var(--chat--color-light);\n --chat--footer--color: var(--chat--color-dark);\n}\n\n\n/* You can override any class styles, too. Right-click inspect in Chat UI to find class to override. */\n.chat-message {\n\tmax-width: 50%;\n}","description":"Override default styling of the public chat interface with CSS"},{"displayName":"Response Mode","name":"responseMode","type":"options","options":[{"name":"When Last Node Finishes","value":"lastNode","description":"Returns data of the last-executed node"},{"name":"Using 'Respond to Webhook' Node","value":"responseNode","description":"Response defined in that node"}],"default":"lastNode","description":"When and how to respond to the webhook"},{"displayName":"Auto-save highlighted data","name":"autoSaveHighlightedData","type":"boolean","default":true,"description":"Whether to automatically save <a href=\"https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-base.executiondata/\" target=\"_blank\">highlighted data</a>. This data can then be used to filter executions in the Executions view. Available on Pro and Enterprise plans in n8n Cloud, and on Enterprise or registered Community Edition for self-hosted. Defaults to true."}]},{"displayName":"Options","name":"options","type":"collection","displayOptions":{"show":{"mode":["hostedChat","webhook"],"public":[true],"@version":[1.2]}},"placeholder":"Add Field","default":{},"options":[{"displayName":"Allowed Origins (CORS)","name":"allowedOrigins","type":"string","default":"*","description":"Comma-separated list of URLs allowed for cross-origin non-preflight requests. Use * (default) to allow all origins.","displayOptions":{"show":{"/mode":["hostedChat","webhook"]}}},{"displayName":"Allow File Uploads","name":"allowFileUploads","type":"boolean","default":false,"description":"Whether to allow file uploads in the chat","displayOptions":{"show":{"/mode":["hostedChat"]}}},{"displayName":"Allowed File Mime Types","name":"allowedFilesMimeTypes","type":"string","default":"*","placeholder":"e.g. image/*, text/*, application/pdf","description":"Allowed file types for upload. Comma-separated list of <a href=\"https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types/Common_types\" target=\"_blank\">MIME types</a>.","displayOptions":{"show":{"/mode":["hostedChat"]}}},{"displayName":"Input Placeholder","name":"inputPlaceholder","type":"string","displayOptions":{"show":{"/mode":["hostedChat"]}},"default":"Type your question..","placeholder":"e.g. Type your message here","description":"Shown as placeholder text in the chat input field"},{"displayName":"Load Previous Session","name":"loadPreviousSession","type":"options","options":[{"name":"Off","value":"notSupported","description":"Loading messages of previous session is turned off"},{"name":"From Memory","value":"memory","description":"Load session messages from memory"},{"name":"Manually","value":"manually","description":"Manually return messages of session"}],"default":"notSupported","description":"If loading messages of a previous session should be enabled","builderHint":{"message":"This ONLY rehydrates the chat widget UI when the user reopens it — it does NOT give the Agent memory. The Agent gets memory from its own memory subnode regardless of this setting. Only set to 'memory' if the user wants the widget to restore visible history on reload; if so, you MUST also attach a memory subnode to this trigger (use the same memory node as the Agent so widget history matches what the Agent remembers). Otherwise leave as 'notSupported'."}},{"displayName":"Require Button Click to Start Chat","name":"showWelcomeScreen","type":"boolean","displayOptions":{"show":{"/mode":["hostedChat"]}},"default":false,"description":"Whether to show the welcome screen at the start of the chat"},{"displayName":"Start Conversation Button Text","name":"getStarted","type":"string","displayOptions":{"show":{"showWelcomeScreen":[true],"/mode":["hostedChat"]}},"default":"New Conversation","placeholder":"e.g. New Conversation","description":"Shown as part of the welcome screen, in the middle of the chat window"},{"displayName":"Subtitle","name":"subtitle","type":"string","displayOptions":{"show":{"/mode":["hostedChat"]}},"default":"Start a chat. We're here to help you 24/7.","placeholder":"e.g. We're here for you","description":"Shown at the top of the chat, under the title"},{"displayName":"Title","name":"title","type":"string","displayOptions":{"show":{"/mode":["hostedChat"]}},"default":"Hi there! 👋","placeholder":"e.g. Welcome","description":"Shown at the top of the chat"},{"displayName":"Custom Chat Styling","name":"customCss","type":"string","typeOptions":{"rows":10,"editor":"cssEditor"},"displayOptions":{"show":{"/mode":["hostedChat"]}},"default":":root {\n /* Colors */\n --chat--color--primary: #e74266;\n --chat--color--primary-shade-50: #db4061;\n --chat--color--primary--shade-100: #cf3c5c;\n --chat--color--secondary: #20b69e;\n --chat--color-secondary-shade-50: #1ca08a;\n --chat--color-white: #fff;\n --chat--color-light: #f2f4f8;\n --chat--color-light-shade-50: #e6e9f1;\n --chat--color-light-shade-100: #c2c5cc;\n --chat--color-medium: #d2d4d9;\n --chat--color-dark: #101330;\n --chat--color-disabled: #d2d4d9;\n --chat--color-typing: #404040;\n\n /* Base Layout */\n --chat--spacing: 1rem;\n --chat--border-radius: 0.25rem;\n --chat--transition-duration: 0.15s;\n --chat--font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen-Sans, Ubuntu, Cantarell, 'Helvetica Neue', sans-serif;\n\n /* Window Dimensions */\n --chat--window--width: 400px;\n --chat--window--height: 600px;\n --chat--window--bottom: var(--chat--spacing);\n --chat--window--right: var(--chat--spacing);\n --chat--window--z-index: 9999;\n --chat--window--border: 1px solid var(--chat--color-light-shade-50);\n --chat--window--border-radius: var(--chat--border-radius);\n --chat--window--margin-bottom: var(--chat--spacing);\n\n /* Header Styles */\n --chat--header-height: auto;\n --chat--header--padding: var(--chat--spacing);\n --chat--header--background: var(--chat--color-dark);\n --chat--header--color: var(--chat--color-light);\n --chat--header--border-top: none;\n --chat--header--border-bottom: none;\n --chat--header--border-left: none;\n --chat--header--border-right: none;\n --chat--heading--font-size: 2em;\n --chat--subtitle--font-size: inherit;\n --chat--subtitle--line-height: 1.8;\n\n /* Message Styles */\n --chat--message--font-size: 1rem;\n --chat--message--padding: var(--chat--spacing);\n --chat--message--border-radius: var(--chat--border-radius);\n --chat--message-line-height: 1.5;\n --chat--message--margin-bottom: calc(var(--chat--spacing) * 1);\n --chat--message--bot--background: var(--chat--color-white);\n --chat--message--bot--color: var(--chat--color-dark);\n --chat--message--bot--border: none;\n --chat--message--user--background: var(--chat--color--secondary);\n --chat--message--user--color: var(--chat--color-white);\n --chat--message--user--border: none;\n --chat--message--pre--background: rgba(0, 0, 0, 0.05);\n --chat--messages-list--padding: var(--chat--spacing);\n\n /* Toggle Button */\n --chat--toggle--size: 64px;\n --chat--toggle--width: var(--chat--toggle--size);\n --chat--toggle--height: var(--chat--toggle--size);\n --chat--toggle--border-radius: 50%;\n --chat--toggle--background: var(--chat--color--primary);\n --chat--toggle--hover--background: var(--chat--color--primary-shade-50);\n --chat--toggle--active--background: var(--chat--color--primary--shade-100);\n --chat--toggle--color: var(--chat--color-white);\n\n /* Input Area */\n --chat--textarea--height: 50px;\n --chat--textarea--max-height: 30rem;\n --chat--input--font-size: inherit;\n --chat--input--border: 0;\n --chat--input--border-radius: 0;\n --chat--input--padding: 0.8rem;\n --chat--input--background: var(--chat--color-white);\n --chat--input--text-color: initial;\n --chat--input--line-height: 1.5;\n --chat--input--placeholder--font-size: var(--chat--input--font-size);\n --chat--input--border-active: 0;\n --chat--input--left--panel--width: 2rem;\n\n /* Button Styles */\n --chat--button--padding: calc(var(--chat--spacing) * 5 / 8) var(--chat--spacing);\n --chat--button--border-radius: var(--chat--border-radius);\n --chat--button--font-size: 1rem;\n --chat--button--line-height: 1;\n --chat--button--color--primary: var(--chat--color-light);\n --chat--button--background--primary: var(--chat--color--secondary);\n --chat--button--border--primary: none;\n --chat--button--color--primary--hover: var(--chat--color-light);\n --chat--button--background--primary--hover: var(--chat--color-secondary-shade-50);\n --chat--button--border--primary--hover: none;\n --chat--button--color--primary--disabled: var(--chat--color-light);\n --chat--button--background--primary--disabled: #81bbb1;\n --chat--button--border--primary--disabled: none;\n --chat--button--color--secondary: var(--chat--color-light);\n --chat--button--background--secondary: hsl(0, 0%, 58%);\n --chat--button--border--secondary: none;\n --chat--button--color--secondary--hover: var(--chat--color-light);\n --chat--button--background--secondary--hover: hsl(0, 0%, 51%);\n --chat--button--border--secondary--hover: none;\n --chat--button--color--secondary--disabled: var(--chat--color-light);\n --chat--button--background--secondary--disabled: hsl(0, 0%, 78%);\n --chat--button--border--secondary--disabled: none;\n --chat--close--button--color-hover: var(--chat--color--primary);\n\n /* Send and File Buttons */\n --chat--input--send--button--background: var(--chat--color-white);\n --chat--input--send--button--color: var(--chat--color--secondary);\n --chat--input--send--button--background-hover: var(--chat--color--primary-shade-50);\n --chat--input--send--button--color-hover: var(--chat--color-secondary-shade-50);\n --chat--input--file--button--background: var(--chat--color-white);\n --chat--input--file--button--color: var(--chat--color--secondary);\n --chat--input--file--button--background-hover: var(--chat--input--file--button--background);\n --chat--input--file--button--color-hover: var(--chat--color-secondary-shade-50);\n --chat--files-spacing: 0.25rem;\n\n /* Body and Footer */\n --chat--body--background: var(--chat--color-light);\n --chat--footer--background: var(--chat--color-light);\n --chat--footer--color: var(--chat--color-dark);\n}\n\n\n/* You can override any class styles, too. Right-click inspect in Chat UI to find class to override. */\n.chat-message {\n\tmax-width: 50%;\n}","description":"Override default styling of the public chat interface with CSS"},{"displayName":"Response Mode","name":"responseMode","type":"options","options":[{"name":"When Last Node Finishes","value":"lastNode","description":"Returns data of the last-executed node"},{"name":"Using 'Respond to Webhook' Node","value":"responseNode","description":"Response defined in that node"},{"name":"Streaming","value":"streaming","description":"Streaming response from specified nodes (e.g. Agents)"}],"default":"lastNode","description":"When and how to respond to the webhook","displayOptions":{"show":{"/availableInChat":[false]}}},{"displayName":"Response Mode","name":"responseMode","type":"options","options":[{"name":"Streaming","value":"streaming","description":"Streaming response from specified nodes (e.g. Agents)"},{"name":"When Last Node Finishes","value":"lastNode","description":"Returns data of the last-executed node"}],"default":"streaming","description":"When and how to respond to the webhook","displayOptions":{"show":{"/availableInChat":[true]}}},{"displayName":"Auto-save highlighted data","name":"autoSaveHighlightedData","type":"boolean","default":true,"description":"Whether to automatically save <a href=\"https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-base.executiondata/\" target=\"_blank\">highlighted data</a>. This data can then be used to filter executions in the Executions view. Available on Pro and Enterprise plans in n8n Cloud, and on Enterprise or registered Community Edition for self-hosted. Defaults to true."}]},{"displayName":"Options","name":"options","type":"collection","displayOptions":{"show":{"public":[false],"@version":[{"_cnd":{"gte":1.3}}]}},"placeholder":"Add Field","default":{},"options":[{"displayName":"Allow File Uploads","name":"allowFileUploads","type":"boolean","default":false,"description":"Whether to allow file uploads in the chat"},{"displayName":"Allowed File Mime Types","name":"allowedFilesMimeTypes","type":"string","default":"*","placeholder":"e.g. image/*, text/*, application/pdf","description":"Allowed file types for upload. Comma-separated list of <a href=\"https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types/Common_types\" target=\"_blank\">MIME types</a>."},{"displayName":"Response Mode","name":"responseMode","type":"options","options":[{"name":"When Last Node Finishes","value":"lastNode","description":"Returns data of the last-executed node"},{"name":"Using Response Nodes","value":"responseNodes","description":"Send responses to the chat by using one or more Chat nodes"},{"name":"Streaming","value":"streaming","description":"Streaming response from specified nodes (e.g. Agents)"}],"default":"lastNode","description":"When and how to respond to the chat","displayOptions":{"show":{"/availableInChat":[false]}}},{"displayName":"Response Mode","name":"responseMode","type":"options","options":[{"name":"Streaming","value":"streaming","description":"Streaming response from specified nodes (e.g. Agents)"},{"name":"When Last Node Finishes","value":"lastNode","description":"Returns data of the last-executed node"},{"name":"Using Response Nodes","value":"responseNodes","description":"Send responses to the chat by using one or more Chat nodes"}],"default":"streaming","description":"When and how to respond to the chat","displayOptions":{"show":{"/availableInChat":[true]}}},{"displayName":"Auto-save highlighted data","name":"autoSaveHighlightedData","type":"boolean","default":true,"description":"Whether to automatically save <a href=\"https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-base.executiondata/\" target=\"_blank\">highlighted data</a>. This data can then be used to filter executions in the Executions view. Available on Pro and Enterprise plans in n8n Cloud, and on Enterprise or registered Community Edition for self-hosted. Defaults to true."}]},{"displayName":"Options","name":"options","type":"collection","displayOptions":{"show":{"mode":["hostedChat","webhook"],"public":[true],"@version":[{"_cnd":{"gte":1.3}}]}},"placeholder":"Add Field","default":{},"options":[{"displayName":"Allowed Origins (CORS)","name":"allowedOrigins","type":"string","default":"*","description":"Comma-separated list of URLs allowed for cross-origin non-preflight requests. Use * (default) to allow all origins.","displayOptions":{"show":{"/mode":["hostedChat","webhook"]}}},{"displayName":"Allow File Uploads","name":"allowFileUploads","type":"boolean","default":false,"description":"Whether to allow file uploads in the chat","displayOptions":{"show":{"/mode":["hostedChat"]}}},{"displayName":"Allowed File Mime Types","name":"allowedFilesMimeTypes","type":"string","default":"*","placeholder":"e.g. image/*, text/*, application/pdf","description":"Allowed file types for upload. Comma-separated list of <a href=\"https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types/Common_types\" target=\"_blank\">MIME types</a>.","displayOptions":{"show":{"/mode":["hostedChat"]}}},{"displayName":"Input Placeholder","name":"inputPlaceholder","type":"string","displayOptions":{"show":{"/mode":["hostedChat"]}},"default":"Type your question..","placeholder":"e.g. Type your message here","description":"Shown as placeholder text in the chat input field"},{"displayName":"Load Previous Session","name":"loadPreviousSession","type":"options","options":[{"name":"Off","value":"notSupported","description":"Loading messages of previous session is turned off"},{"name":"From Memory","value":"memory","description":"Load session messages from memory"},{"name":"Manually","value":"manually","description":"Manually return messages of session"}],"default":"notSupported","description":"If loading messages of a previous session should be enabled","builderHint":{"message":"This ONLY rehydrates the chat widget UI when the user reopens it — it does NOT give the Agent memory. The Agent gets memory from its own memory subnode regardless of this setting. Only set to 'memory' if the user wants the widget to restore visible history on reload; if so, you MUST also attach a memory subnode to this trigger (use the same memory node as the Agent so widget history matches what the Agent remembers). Otherwise leave as 'notSupported'."}},{"displayName":"Require Button Click to Start Chat","name":"showWelcomeScreen","type":"boolean","displayOptions":{"show":{"/mode":["hostedChat"]}},"default":false,"description":"Whether to show the welcome screen at the start of the chat"},{"displayName":"Start Conversation Button Text","name":"getStarted","type":"string","displayOptions":{"show":{"showWelcomeScreen":[true],"/mode":["hostedChat"]}},"default":"New Conversation","placeholder":"e.g. New Conversation","description":"Shown as part of the welcome screen, in the middle of the chat window"},{"displayName":"Subtitle","name":"subtitle","type":"string","displayOptions":{"show":{"/mode":["hostedChat"]}},"default":"Start a chat. We're here to help you 24/7.","placeholder":"e.g. We're here for you","description":"Shown at the top of the chat, under the title"},{"displayName":"Title","name":"title","type":"string","displayOptions":{"show":{"/mode":["hostedChat"]}},"default":"Hi there! 👋","placeholder":"e.g. Welcome","description":"Shown at the top of the chat"},{"displayName":"Custom Chat Styling","name":"customCss","type":"string","typeOptions":{"rows":10,"editor":"cssEditor"},"displayOptions":{"show":{"/mode":["hostedChat"]}},"default":":root {\n /* Colors */\n --chat--color--primary: #e74266;\n --chat--color--primary-shade-50: #db4061;\n --chat--color--primary--shade-100: #cf3c5c;\n --chat--color--secondary: #20b69e;\n --chat--color-secondary-shade-50: #1ca08a;\n --chat--color-white: #fff;\n --chat--color-light: #f2f4f8;\n --chat--color-light-shade-50: #e6e9f1;\n --chat--color-light-shade-100: #c2c5cc;\n --chat--color-medium: #d2d4d9;\n --chat--color-dark: #101330;\n --chat--color-disabled: #d2d4d9;\n --chat--color-typing: #404040;\n\n /* Base Layout */\n --chat--spacing: 1rem;\n --chat--border-radius: 0.25rem;\n --chat--transition-duration: 0.15s;\n --chat--font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen-Sans, Ubuntu, Cantarell, 'Helvetica Neue', sans-serif;\n\n /* Window Dimensions */\n --chat--window--width: 400px;\n --chat--window--height: 600px;\n --chat--window--bottom: var(--chat--spacing);\n --chat--window--right: var(--chat--spacing);\n --chat--window--z-index: 9999;\n --chat--window--border: 1px solid var(--chat--color-light-shade-50);\n --chat--window--border-radius: var(--chat--border-radius);\n --chat--window--margin-bottom: var(--chat--spacing);\n\n /* Header Styles */\n --chat--header-height: auto;\n --chat--header--padding: var(--chat--spacing);\n --chat--header--background: var(--chat--color-dark);\n --chat--header--color: var(--chat--color-light);\n --chat--header--border-top: none;\n --chat--header--border-bottom: none;\n --chat--header--border-left: none;\n --chat--header--border-right: none;\n --chat--heading--font-size: 2em;\n --chat--subtitle--font-size: inherit;\n --chat--subtitle--line-height: 1.8;\n\n /* Message Styles */\n --chat--message--font-size: 1rem;\n --chat--message--padding: var(--chat--spacing);\n --chat--message--border-radius: var(--chat--border-radius);\n --chat--message-line-height: 1.5;\n --chat--message--margin-bottom: calc(var(--chat--spacing) * 1);\n --chat--message--bot--background: var(--chat--color-white);\n --chat--message--bot--color: var(--chat--color-dark);\n --chat--message--bot--border: none;\n --chat--message--user--background: var(--chat--color--secondary);\n --chat--message--user--color: var(--chat--color-white);\n --chat--message--user--border: none;\n --chat--message--pre--background: rgba(0, 0, 0, 0.05);\n --chat--messages-list--padding: var(--chat--spacing);\n\n /* Toggle Button */\n --chat--toggle--size: 64px;\n --chat--toggle--width: var(--chat--toggle--size);\n --chat--toggle--height: var(--chat--toggle--size);\n --chat--toggle--border-radius: 50%;\n --chat--toggle--background: var(--chat--color--primary);\n --chat--toggle--hover--background: var(--chat--color--primary-shade-50);\n --chat--toggle--active--background: var(--chat--color--primary--shade-100);\n --chat--toggle--color: var(--chat--color-white);\n\n /* Input Area */\n --chat--textarea--height: 50px;\n --chat--textarea--max-height: 30rem;\n --chat--input--font-size: inherit;\n --chat--input--border: 0;\n --chat--input--border-radius: 0;\n --chat--input--padding: 0.8rem;\n --chat--input--background: var(--chat--color-white);\n --chat--input--text-color: initial;\n --chat--input--line-height: 1.5;\n --chat--input--placeholder--font-size: var(--chat--input--font-size);\n --chat--input--border-active: 0;\n --chat--input--left--panel--width: 2rem;\n\n /* Button Styles */\n --chat--button--padding: calc(var(--chat--spacing) * 5 / 8) var(--chat--spacing);\n --chat--button--border-radius: var(--chat--border-radius);\n --chat--button--font-size: 1rem;\n --chat--button--line-height: 1;\n --chat--button--color--primary: var(--chat--color-light);\n --chat--button--background--primary: var(--chat--color--secondary);\n --chat--button--border--primary: none;\n --chat--button--color--primary--hover: var(--chat--color-light);\n --chat--button--background--primary--hover: var(--chat--color-secondary-shade-50);\n --chat--button--border--primary--hover: none;\n --chat--button--color--primary--disabled: var(--chat--color-light);\n --chat--button--background--primary--disabled: #81bbb1;\n --chat--button--border--primary--disabled: none;\n --chat--button--color--secondary: var(--chat--color-light);\n --chat--button--background--secondary: hsl(0, 0%, 58%);\n --chat--button--border--secondary: none;\n --chat--button--color--secondary--hover: var(--chat--color-light);\n --chat--button--background--secondary--hover: hsl(0, 0%, 51%);\n --chat--button--border--secondary--hover: none;\n --chat--button--color--secondary--disabled: var(--chat--color-light);\n --chat--button--background--secondary--disabled: hsl(0, 0%, 78%);\n --chat--button--border--secondary--disabled: none;\n --chat--close--button--color-hover: var(--chat--color--primary);\n\n /* Send and File Buttons */\n --chat--input--send--button--background: var(--chat--color-white);\n --chat--input--send--button--color: var(--chat--color--secondary);\n --chat--input--send--button--background-hover: var(--chat--color--primary-shade-50);\n --chat--input--send--button--color-hover: var(--chat--color-secondary-shade-50);\n --chat--input--file--button--background: var(--chat--color-white);\n --chat--input--file--button--color: var(--chat--color--secondary);\n --chat--input--file--button--background-hover: var(--chat--input--file--button--background);\n --chat--input--file--button--color-hover: var(--chat--color-secondary-shade-50);\n --chat--files-spacing: 0.25rem;\n\n /* Body and Footer */\n --chat--body--background: var(--chat--color-light);\n --chat--footer--background: var(--chat--color-light);\n --chat--footer--color: var(--chat--color-dark);\n}\n\n\n/* You can override any class styles, too. Right-click inspect in Chat UI to find class to override. */\n.chat-message {\n\tmax-width: 50%;\n}","description":"Override default styling of the public chat interface with CSS"},{"displayName":"Response Mode","name":"responseMode","type":"options","options":[{"name":"When Last Node Finishes","value":"lastNode","description":"Returns data of the last-executed node"},{"name":"Streaming","value":"streaming","description":"Streaming response from specified nodes (e.g. Agents)"},{"name":"Using 'Respond to Webhook' Node","value":"responseNode","description":"Response defined in that node"}],"default":"lastNode","description":"When and how to respond to the chat","displayOptions":{"show":{"/mode":["webhook"],"/availableInChat":[false]}}},{"displayName":"Response Mode","name":"responseMode","type":"options","options":[{"name":"Streaming","value":"streaming","description":"Streaming response from specified nodes (e.g. Agents)"},{"name":"When Last Node Finishes","value":"lastNode","description":"Returns data of the last-executed node"}],"default":"streaming","description":"When and how to respond to the chat","displayOptions":{"show":{"/mode":["webhook"],"/availableInChat":[true]}}},{"displayName":"Response Mode","name":"responseMode","type":"options","options":[{"name":"When Last Node Finishes","value":"lastNode","description":"Returns data of the last-executed node"},{"name":"Streaming","value":"streaming","description":"Streaming response from specified nodes (e.g. Agents)"},{"name":"Using Response Nodes","value":"responseNodes","description":"Send responses to the chat by using one or more Chat nodes"}],"default":"lastNode","description":"When and how to respond to the chat","displayOptions":{"show":{"/mode":["hostedChat"],"/availableInChat":[false]}}},{"displayName":"Response Mode","name":"responseMode","type":"options","options":[{"name":"Streaming","value":"streaming","description":"Streaming response from specified nodes (e.g. Agents)"},{"name":"When Last Node Finishes","value":"lastNode","description":"Returns data of the last-executed node"},{"name":"Using Response Nodes","value":"responseNodes","description":"Send responses to the chat by using one or more Chat nodes"}],"default":"streaming","description":"When and how to respond to the chat","displayOptions":{"show":{"/mode":["hostedChat"],"/availableInChat":[true]}}},{"displayName":"Auto-save highlighted data","name":"autoSaveHighlightedData","type":"boolean","default":true,"description":"Whether to automatically save <a href=\"https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-base.executiondata/\" target=\"_blank\">highlighted data</a>. This data can then be used to filter executions in the Executions view. Available on Pro and Enterprise plans in n8n Cloud, and on Enterprise or registered Community Edition for self-hosted. Defaults to true."}]}]},
|
|
98
98
|
{"usableAsTool":true,"displayName":"Chat","name":"chat","icon":"node:chat-trigger","iconColor":"black","group":["input"],"version":[1,1.1,1.2,1.3],"defaultVersion":1.3,"description":"Send a message into the chat","defaults":{"name":"Chat"},"builderHint":{"relatedNodes":[{"nodeType":"@n8n/n8n-nodes-langchain.chatTrigger","relationHint":"Required trigger for this node to work - must set responseMode to \"responseNodes\""}]},"codex":{"categories":["Core Nodes","HITL"],"subcategories":{"HITL":["Human in the Loop"]},"alias":["human","wait","hitl","respond","approve","confirm","send","message"],"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-langchain.respondtochat/"}]}},"inputs":"={{ ((parameters) => {\n const inputs = [\n {\n type: 'main',\n },\n ];\n if (parameters.options?.memoryConnection) {\n return [\n ...inputs,\n {\n type: 'ai_memory',\n displayName: 'Memory',\n maxConnections: 1,\n },\n ];\n }\n return inputs;\n})($parameter) }}","outputs":["main"],"waitingNodeTooltip":"={{ ((parameters) => {\n if (parameters?.operation === 'sendAndWait') {\n return \"Execution will continue after the user's response\";\n }\n return '';\n})($parameter) }}","webhooks":[{"name":"default","httpMethod":"GET","responseMode":"onReceived","responseData":"","path":"={{ $nodeId }}","restartWebhook":true,"isFullPath":true},{"name":"default","httpMethod":"POST","responseMode":"onReceived","responseData":"","path":"={{ $nodeId }}","restartWebhook":true,"isFullPath":true}],"properties":[{"displayName":"Verify you're using a chat trigger with the 'Response Mode' option set to 'Using Response Nodes'","name":"generalNotice","type":"notice","default":""},{"displayName":"Operation","name":"operation","type":"options","default":"send","noDataExpression":true,"options":[{"name":"Send Message","value":"send","action":"Send a message"},{"name":"Send and Wait for Response","value":"sendAndWait","action":"Send message and wait for response"}],"displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.1}}]}}},{"displayName":"Message","name":"message","type":"string","default":"","required":true,"typeOptions":{"rows":4}},{"displayName":"Wait for User Reply","name":"waitUserReply","type":"boolean","default":true,"noDataExpression":true,"displayOptions":{"show":{"@version":[{"_cnd":{"lt":1.1}}]}}},{"displayName":"Response Type","name":"responseType","type":"options","default":"freeTextChat","options":[{"name":"Approval","value":"approval","description":"User can approve/disapprove from within the message"},{"name":"Free Text","value":"freeTextChat","description":"User can submit a response in the chat"}],"displayOptions":{"show":{"operation":["sendAndWait"]}}},{"displayName":"Block User Input","name":"blockUserInput","type":"boolean","default":false,"description":"Whether to block input from the user while waiting for approval","displayOptions":{"show":{"responseType":["approval"]}}},{"displayName":"Define Form","name":"defineForm","type":"options","noDataExpression":true,"options":[{"name":"Using Fields Below","value":"fields"},{"name":"Using JSON","value":"json"}],"default":"fields","displayOptions":{"show":{"responseType":["customForm"],"operation":["sendAndWait"]}}},{"displayName":"Form Fields","name":"jsonOutput","type":"json","typeOptions":{"rows":5},"default":"[\n {\n \"fieldLabel\": \"Name\",\n \"placeholder\": \"enter your name\",\n \"requiredField\": true\n },\n {\n \"fieldLabel\": \"Age\",\n \"fieldType\": \"number\",\n \"placeholder\": \"enter your age\"\n },\n {\n \"fieldLabel\": \"Email\",\n \"fieldType\": \"email\",\n \"requiredField\": true\n },\n {\n \"fieldLabel\": \"Textarea\",\n \"fieldType\": \"textarea\"\n },\n {\n \"fieldLabel\": \"Dropdown Options\",\n \"fieldType\": \"dropdown\",\n \"fieldOptions\": {\n \"values\": [\n {\n \"option\": \"option 1\"\n },\n {\n \"option\": \"option 2\"\n }\n ]\n },\n \"requiredField\": true\n },\n {\n \"fieldLabel\": \"Checkboxes\",\n \"fieldType\": \"checkbox\",\n \"fieldOptions\": {\n \"values\": [\n {\n \"option\": \"option 1\"\n },\n {\n \"option\": \"option 2\"\n }\n ]\n }\n },\n {\n \"fieldLabel\": \"Radio\",\n \"fieldType\": \"radio\",\n \"fieldOptions\": {\n \"values\": [\n {\n \"option\": \"option 1\"\n },\n {\n \"option\": \"option 2\"\n }\n ]\n }\n },\n {\n \"fieldLabel\": \"Email\",\n \"fieldType\": \"email\",\n \"placeholder\": \"me@mail.con\"\n },\n {\n \"fieldLabel\": \"File\",\n \"fieldType\": \"file\",\n \"multipleFiles\": true,\n \"acceptFileTypes\": \".jpg, .png\"\n },\n {\n \"fieldLabel\": \"Number\",\n \"fieldType\": \"number\"\n },\n {\n \"fieldLabel\": \"Password\",\n \"fieldType\": \"password\"\n }\n]\n","validateType":"form-fields","ignoreValidationDuringExecution":true,"hint":"<a href=\"https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-base.form/\" target=\"_blank\">See docs</a> for field syntax","displayOptions":{"show":{"defineForm":["json"],"responseType":["customForm"],"operation":["sendAndWait"]}}},{"displayName":"Form Elements","name":"formFields","placeholder":"Add Form Element","type":"fixedCollection","default":{},"typeOptions":{"multipleValues":true,"sortable":true,"fixedCollection":{"itemTitle":"={{ $collection.item.properties.find(p => p.name === \"fieldType\").options.find(o => o.value === $collection.item.value.fieldType).name }}"}},"options":[{"displayName":"Values","name":"values","values":[{"displayName":"Field Name","name":"fieldName","description":"The name of the field, used in input attributes and referenced by the workflow","required":true,"type":"string","default":"","displayOptions":{"hide":{"fieldType":["html"]},"show":{"@version":[2.4]}}},{"displayName":"Label","name":"fieldLabel","type":"string","default":"","placeholder":"e.g. What is your name?","description":"Label that appears above the input field","required":true,"displayOptions":{"hide":{"fieldType":["hiddenField","html"]},"show":{"@version":[{"_cnd":{"gte":2.4}}]}}},{"displayName":"Field Name","name":"fieldLabel","type":"string","default":"","placeholder":"e.g. What is your name?","description":"Label that appears above the input field","required":true,"displayOptions":{"hide":{"fieldType":["hiddenField","html"]},"show":{"@version":[{"_cnd":{"lt":2.4}}]}}},{"displayName":"Field Name","name":"fieldName","description":"The name of the field, used in input attributes and referenced by the workflow","type":"string","default":"","displayOptions":{"show":{"fieldType":["hiddenField"],"@version":[{"_cnd":{"lt":2.4}}]}}},{"displayName":"Element Type","name":"fieldType","type":"options","default":"text","description":"The type of field to add to the form","options":[{"name":"Checkboxes","value":"checkbox"},{"name":"Custom HTML","value":"html"},{"name":"Date","value":"date"},{"name":"Dropdown","value":"dropdown"},{"name":"Email","value":"email"},{"name":"File","value":"file"},{"name":"Hidden Field","value":"hiddenField"},{"name":"Number","value":"number"},{"name":"Password","value":"password"},{"name":"Radio Buttons","value":"radio"},{"name":"Text Input","value":"text"},{"name":"Textarea","value":"textarea"}],"required":true},{"displayName":"Element Name","name":"elementName","type":"string","default":"","placeholder":"e.g. content-section","description":"Optional field. It can be used to include the html in the output.","displayOptions":{"show":{"fieldType":["html"]}}},{"displayName":"Custom Field Name","name":"fieldName","description":"The name of the field, used in input attributes and referenced by the workflow","type":"string","default":"","displayOptions":{"hide":{"fieldType":["html"]},"show":{"@version":[{"_cnd":{"gte":2.5}}]}}},{"displayName":"Placeholder","name":"placeholder","description":"Sample text to display inside the field","type":"string","default":"","displayOptions":{"hide":{"fieldType":["dropdown","date","file","html","hiddenField","radio","checkbox"]}}},{"displayName":"Default Value","name":"defaultValue","description":"Default value that will be pre-filled in the form field","type":"string","default":"","displayOptions":{"show":{"fieldType":["text","number","email","textarea"]}}},{"displayName":"Default Value","name":"defaultValue","description":"Default date value that will be pre-filled in the form field (format: YYYY-MM-DD)","type":"dateTime","typeOptions":{"dateOnly":true},"default":"","displayOptions":{"show":{"fieldType":["date"]}}},{"displayName":"Default Value","name":"defaultValue","description":"Default value that will be pre-selected. Must match one of the option labels.","type":"string","default":"","displayOptions":{"show":{"fieldType":["dropdown","radio"]}}},{"displayName":"Default Value","name":"defaultValue","description":"Default value(s) that will be pre-selected. Must match one or multiple of the option labels. Separate multiple pre-selected options with a comma.","type":"string","default":"","displayOptions":{"show":{"fieldType":["checkbox"]}}},{"displayName":"Field Value","name":"fieldValue","description":"Input value can be set here or will be passed as a query parameter via Field Name if no value is set","type":"string","default":"","displayOptions":{"show":{"fieldType":["hiddenField"]}}},{"displayName":"Field Options","name":"fieldOptions","placeholder":"Add Field Option","description":"List of options that can be selected from the dropdown","type":"fixedCollection","default":{"values":[{"option":""}]},"required":true,"displayOptions":{"show":{"fieldType":["dropdown"]}},"typeOptions":{"multipleValues":true,"sortable":true},"options":[{"displayName":"Values","name":"values","values":[{"displayName":"Option","name":"option","type":"string","default":""}]}]},{"displayName":"Checkboxes","name":"fieldOptions","placeholder":"Add Checkbox","type":"fixedCollection","default":{"values":[{"option":""}]},"required":true,"displayOptions":{"show":{"fieldType":["checkbox"]}},"typeOptions":{"multipleValues":true,"sortable":true},"options":[{"displayName":"Values","name":"values","values":[{"displayName":"Checkbox Label","name":"option","type":"string","default":""}]}]},{"displayName":"Radio Buttons","name":"fieldOptions","placeholder":"Add Radio Button","type":"fixedCollection","default":{"values":[{"option":""}]},"required":true,"displayOptions":{"show":{"fieldType":["radio"]}},"typeOptions":{"multipleValues":true,"sortable":true},"options":[{"displayName":"Values","name":"values","values":[{"displayName":"Radio Button Label","name":"option","type":"string","default":""}]}]},{"displayName":"Multiple Choice is a legacy option, please use Checkboxes or Radio Buttons field type instead","name":"multiselectLegacyNotice","type":"notice","default":"","displayOptions":{"show":{"multiselect":[true],"fieldType":["dropdown"],"@version":[{"_cnd":{"lt":2.3}}]}}},{"displayName":"Multiple Choice","name":"multiselect","type":"boolean","default":false,"description":"Whether to allow the user to select multiple options from the dropdown list","displayOptions":{"show":{"fieldType":["dropdown"],"@version":[{"_cnd":{"lt":2.3}}]}}},{"displayName":"Limit Selection","name":"limitSelection","type":"options","default":"unlimited","options":[{"name":"Exact Number","value":"exact"},{"name":"Range","value":"range"},{"name":"Unlimited","value":"unlimited"}],"displayOptions":{"show":{"fieldType":["checkbox"]}}},{"displayName":"Number of Selections","name":"numberOfSelections","type":"number","default":1,"typeOptions":{"numberPrecision":0,"minValue":1,"showEvenWhenOptional":true},"displayOptions":{"show":{"fieldType":["checkbox"],"limitSelection":["exact"]}}},{"displayName":"Minimum Selections","name":"minSelections","type":"number","default":0,"typeOptions":{"numberPrecision":0,"minValue":0,"showEvenWhenOptional":true},"displayOptions":{"show":{"fieldType":["checkbox"],"limitSelection":["range"]}}},{"displayName":"Maximum Selections","name":"maxSelections","type":"number","default":1,"typeOptions":{"numberPrecision":0,"minValue":1,"showEvenWhenOptional":true},"displayOptions":{"show":{"fieldType":["checkbox"],"limitSelection":["range"]}}},{"displayName":"HTML","name":"html","typeOptions":{"editor":"htmlEditor"},"type":"string","noDataExpression":true,"default":"<!-- Your custom HTML here --->\n\n\n","description":"HTML elements to display on the form page","hint":"Does not accept <code><script></code>, <code><style></code> or <code><input></code> tags","displayOptions":{"show":{"fieldType":["html"]}}},{"displayName":"Multiple Files","name":"multipleFiles","type":"boolean","default":true,"description":"Whether to allow the user to select multiple files from the file input or just one","displayOptions":{"show":{"fieldType":["file"]}}},{"displayName":"Accepted File Types","name":"acceptFileTypes","type":"string","default":"","description":"Comma-separated list of allowed file extensions","hint":"Leave empty to allow all file types","placeholder":"e.g. .jpg, .png","displayOptions":{"show":{"fieldType":["file"]}}},{"displayName":"The displayed date is formatted based on the locale of the user's browser","name":"formatDate","type":"notice","default":"","displayOptions":{"show":{"fieldType":["date"]}}},{"displayName":"Required Field","name":"requiredField","type":"boolean","default":false,"description":"Whether to require the user to enter a value for this field before submitting the form","displayOptions":{"hide":{"fieldType":["html","hiddenField"]}}}]}],"displayOptions":{"show":{"@version":[{"_cnd":{"lt":2.5}}],"defineForm":["fields"],"responseType":["customForm"],"operation":["sendAndWait"]}}},{"displayName":"Form Elements","name":"formFields","placeholder":"Add Form Element","type":"fixedCollection","default":{},"typeOptions":{"multipleValues":true,"sortable":true,"hideOptionalFields":true,"addOptionalFieldButtonText":"Add Attributes","fixedCollection":{"itemTitle":"={{ $collection.item.properties.find(p => p.name === \"fieldType\").options.find(o => o.value === $collection.item.value.fieldType).name }}"}},"options":[{"displayName":"Values","name":"values","values":[{"displayName":"Field Name","name":"fieldName","description":"The name of the field, used in input attributes and referenced by the workflow","required":true,"type":"string","default":"","displayOptions":{"hide":{"fieldType":["html"]},"show":{"@version":[2.4]}}},{"displayName":"Label","name":"fieldLabel","type":"string","default":"","placeholder":"e.g. What is your name?","description":"Label that appears above the input field","required":true,"displayOptions":{"hide":{"fieldType":["hiddenField","html"]},"show":{"@version":[{"_cnd":{"gte":2.4}}]}}},{"displayName":"Field Name","name":"fieldLabel","type":"string","default":"","placeholder":"e.g. What is your name?","description":"Label that appears above the input field","required":true,"displayOptions":{"hide":{"fieldType":["hiddenField","html"]},"show":{"@version":[{"_cnd":{"lt":2.4}}]}}},{"displayName":"Field Name","name":"fieldName","description":"The name of the field, used in input attributes and referenced by the workflow","type":"string","default":"","displayOptions":{"show":{"fieldType":["hiddenField"],"@version":[{"_cnd":{"lt":2.4}}]}}},{"displayName":"Element Type","name":"fieldType","type":"options","default":"text","description":"The type of field to add to the form","options":[{"name":"Checkboxes","value":"checkbox"},{"name":"Custom HTML","value":"html"},{"name":"Date","value":"date"},{"name":"Dropdown","value":"dropdown"},{"name":"Email","value":"email"},{"name":"File","value":"file"},{"name":"Hidden Field","value":"hiddenField"},{"name":"Number","value":"number"},{"name":"Password","value":"password"},{"name":"Radio Buttons","value":"radio"},{"name":"Text Input","value":"text"},{"name":"Textarea","value":"textarea"}],"required":true},{"displayName":"Element Name","name":"elementName","type":"string","default":"","placeholder":"e.g. content-section","description":"Optional field. It can be used to include the html in the output.","displayOptions":{"show":{"fieldType":["html"]}}},{"displayName":"Custom Field Name","name":"fieldName","description":"The name of the field, used in input attributes and referenced by the workflow","type":"string","default":"","displayOptions":{"hide":{"fieldType":["html"]},"show":{"@version":[{"_cnd":{"gte":2.5}}]}}},{"displayName":"Placeholder","name":"placeholder","description":"Sample text to display inside the field","type":"string","default":"","displayOptions":{"hide":{"fieldType":["dropdown","date","file","html","hiddenField","radio","checkbox"]}}},{"displayName":"Default Value","name":"defaultValue","description":"Default value that will be pre-filled in the form field","type":"string","default":"","displayOptions":{"show":{"fieldType":["text","number","email","textarea"]}}},{"displayName":"Default Value","name":"defaultValue","description":"Default date value that will be pre-filled in the form field (format: YYYY-MM-DD)","type":"dateTime","typeOptions":{"dateOnly":true},"default":"","displayOptions":{"show":{"fieldType":["date"]}}},{"displayName":"Default Value","name":"defaultValue","description":"Default value that will be pre-selected. Must match one of the option labels.","type":"string","default":"","displayOptions":{"show":{"fieldType":["dropdown","radio"]}}},{"displayName":"Default Value","name":"defaultValue","description":"Default value(s) that will be pre-selected. Must match one or multiple of the option labels. Separate multiple pre-selected options with a comma.","type":"string","default":"","displayOptions":{"show":{"fieldType":["checkbox"]}}},{"displayName":"Field Value","name":"fieldValue","description":"Input value can be set here or will be passed as a query parameter via Field Name if no value is set","type":"string","default":"","displayOptions":{"show":{"fieldType":["hiddenField"]}}},{"displayName":"Field Options","name":"fieldOptions","placeholder":"Add Field Option","description":"List of options that can be selected from the dropdown","type":"fixedCollection","default":{"values":[{"option":""}]},"required":true,"displayOptions":{"show":{"fieldType":["dropdown"]}},"typeOptions":{"multipleValues":true,"sortable":true},"options":[{"displayName":"Values","name":"values","values":[{"displayName":"Option","name":"option","type":"string","default":""}]}]},{"displayName":"Checkboxes","name":"fieldOptions","placeholder":"Add Checkbox","type":"fixedCollection","default":{"values":[{"option":""}]},"required":true,"displayOptions":{"show":{"fieldType":["checkbox"]}},"typeOptions":{"multipleValues":true,"sortable":true},"options":[{"displayName":"Values","name":"values","values":[{"displayName":"Checkbox Label","name":"option","type":"string","default":""}]}]},{"displayName":"Radio Buttons","name":"fieldOptions","placeholder":"Add Radio Button","type":"fixedCollection","default":{"values":[{"option":""}]},"required":true,"displayOptions":{"show":{"fieldType":["radio"]}},"typeOptions":{"multipleValues":true,"sortable":true},"options":[{"displayName":"Values","name":"values","values":[{"displayName":"Radio Button Label","name":"option","type":"string","default":""}]}]},{"displayName":"Multiple Choice is a legacy option, please use Checkboxes or Radio Buttons field type instead","name":"multiselectLegacyNotice","type":"notice","default":"","displayOptions":{"show":{"multiselect":[true],"fieldType":["dropdown"],"@version":[{"_cnd":{"lt":2.3}}]}}},{"displayName":"Multiple Choice","name":"multiselect","type":"boolean","default":false,"description":"Whether to allow the user to select multiple options from the dropdown list","displayOptions":{"show":{"fieldType":["dropdown"],"@version":[{"_cnd":{"lt":2.3}}]}}},{"displayName":"Limit Selection","name":"limitSelection","type":"options","default":"unlimited","options":[{"name":"Exact Number","value":"exact"},{"name":"Range","value":"range"},{"name":"Unlimited","value":"unlimited"}],"displayOptions":{"show":{"fieldType":["checkbox"]}}},{"displayName":"Number of Selections","name":"numberOfSelections","type":"number","default":1,"typeOptions":{"numberPrecision":0,"minValue":1,"showEvenWhenOptional":true},"displayOptions":{"show":{"fieldType":["checkbox"],"limitSelection":["exact"]}}},{"displayName":"Minimum Selections","name":"minSelections","type":"number","default":0,"typeOptions":{"numberPrecision":0,"minValue":0,"showEvenWhenOptional":true},"displayOptions":{"show":{"fieldType":["checkbox"],"limitSelection":["range"]}}},{"displayName":"Maximum Selections","name":"maxSelections","type":"number","default":1,"typeOptions":{"numberPrecision":0,"minValue":1,"showEvenWhenOptional":true},"displayOptions":{"show":{"fieldType":["checkbox"],"limitSelection":["range"]}}},{"displayName":"HTML","name":"html","typeOptions":{"editor":"htmlEditor"},"type":"string","noDataExpression":true,"default":"<!-- Your custom HTML here --->\n\n\n","description":"HTML elements to display on the form page","hint":"Does not accept <code><script></code>, <code><style></code> or <code><input></code> tags","displayOptions":{"show":{"fieldType":["html"]}}},{"displayName":"Multiple Files","name":"multipleFiles","type":"boolean","default":true,"description":"Whether to allow the user to select multiple files from the file input or just one","displayOptions":{"show":{"fieldType":["file"]}}},{"displayName":"Accepted File Types","name":"acceptFileTypes","type":"string","default":"","description":"Comma-separated list of allowed file extensions","hint":"Leave empty to allow all file types","placeholder":"e.g. .jpg, .png","displayOptions":{"show":{"fieldType":["file"]}}},{"displayName":"The displayed date is formatted based on the locale of the user's browser","name":"formatDate","type":"notice","default":"","displayOptions":{"show":{"fieldType":["date"]}}},{"displayName":"Required Field","name":"requiredField","type":"boolean","default":false,"description":"Whether to require the user to enter a value for this field before submitting the form","displayOptions":{"hide":{"fieldType":["html","hiddenField"]}}}]}],"displayOptions":{"show":{"@version":[{"_cnd":{"gte":2.5}}],"defineForm":["fields"],"responseType":["customForm"],"operation":["sendAndWait"]}}},{"displayName":"Approval Options","name":"approvalOptions","type":"fixedCollection","placeholder":"Add option","default":{},"options":[{"displayName":"Values","name":"values","values":[{"displayName":"Type of Approval","name":"approvalType","type":"options","placeholder":"Add option","default":"single","options":[{"name":"Approve Only","value":"single"},{"name":"Approve and Disapprove","value":"double"}]},{"displayName":"Approve Button Label","name":"approveLabel","type":"string","default":"Approve","displayOptions":{"show":{"approvalType":["single","double"]}}},{"displayName":"Approve Button Style","name":"buttonApprovalStyle","type":"options","default":"primary","options":[{"name":"Primary","value":"primary"},{"name":"Secondary","value":"secondary"}],"displayOptions":{"show":{"approvalType":["single","double"]}}},{"displayName":"Disapprove Button Label","name":"disapproveLabel","type":"string","default":"Decline","displayOptions":{"show":{"approvalType":["double"]}}},{"displayName":"Disapprove Button Style","name":"buttonDisapprovalStyle","type":"options","default":"secondary","options":[{"name":"Primary","value":"primary"},{"name":"Secondary","value":"secondary"}],"displayOptions":{"show":{"approvalType":["double"]}}}]}],"displayOptions":{"show":{"responseType":["approval"],"operation":["sendAndWait"]}}},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"displayOptions":{"hide":{"@tool":[true]}},"options":[{"displayName":"Add Memory Input Connection","name":"memoryConnection","type":"boolean","default":false,"displayOptions":{"hide":{"/responseType":["approval"]}}},{"displayName":"Limit Wait Time","name":"limitWaitTime","type":"fixedCollection","description":"Whether to limit the time this node should wait for a user response before execution resumes","default":{"values":{"limitType":"afterTimeInterval","resumeAmount":45,"resumeUnit":"minutes"}},"options":[{"displayName":"Values","name":"values","values":[{"displayName":"Limit Type","name":"limitType","type":"options","default":"afterTimeInterval","description":"Sets the condition for the execution to resume. Can be a specified date or after some time.","options":[{"name":"After Time Interval","description":"Waits for a certain amount of time","value":"afterTimeInterval"},{"name":"At Specified Time","description":"Waits until the set date and time to continue","value":"atSpecifiedTime"}]},{"displayName":"Amount","name":"resumeAmount","type":"number","displayOptions":{"show":{"limitType":["afterTimeInterval"]}},"typeOptions":{"minValue":0,"numberPrecision":2},"default":1,"description":"The time to wait"},{"displayName":"Unit","name":"resumeUnit","type":"options","displayOptions":{"show":{"limitType":["afterTimeInterval"]}},"options":[{"name":"Minutes","value":"minutes"},{"name":"Hours","value":"hours"},{"name":"Days","value":"days"}],"default":"hours","description":"Unit of the interval value"},{"displayName":"Max Date and Time","name":"maxDateAndTime","type":"dateTime","displayOptions":{"show":{"limitType":["atSpecifiedTime"]}},"default":"","description":"Continue execution after the specified date and time"}]}],"displayOptions":{"show":{"/waitUserReply":[true]}}},{"displayName":"Limit Wait Time","name":"limitWaitTime","type":"fixedCollection","description":"Whether to limit the time this node should wait for a user response before execution resumes","default":{"values":{"limitType":"afterTimeInterval","resumeAmount":45,"resumeUnit":"minutes"}},"options":[{"displayName":"Values","name":"values","values":[{"displayName":"Limit Type","name":"limitType","type":"options","default":"afterTimeInterval","description":"Sets the condition for the execution to resume. Can be a specified date or after some time.","options":[{"name":"After Time Interval","description":"Waits for a certain amount of time","value":"afterTimeInterval"},{"name":"At Specified Time","description":"Waits until the set date and time to continue","value":"atSpecifiedTime"}]},{"displayName":"Amount","name":"resumeAmount","type":"number","displayOptions":{"show":{"limitType":["afterTimeInterval"]}},"typeOptions":{"minValue":0,"numberPrecision":2},"default":1,"description":"The time to wait"},{"displayName":"Unit","name":"resumeUnit","type":"options","displayOptions":{"show":{"limitType":["afterTimeInterval"]}},"options":[{"name":"Minutes","value":"minutes"},{"name":"Hours","value":"hours"},{"name":"Days","value":"days"}],"default":"hours","description":"Unit of the interval value"},{"displayName":"Max Date and Time","name":"maxDateAndTime","type":"dateTime","displayOptions":{"show":{"limitType":["atSpecifiedTime"]}},"default":"","description":"Continue execution after the specified date and time"}]}],"displayOptions":{"show":{"/operation":["sendAndWait"]}}},{"displayName":"Auto-save highlighted data","name":"autoSaveHighlightedData","type":"boolean","default":true,"description":"Whether to automatically save <a href=\"https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-base.executiondata/\" target=\"_blank\">highlighted data</a>. This data can then be used to filter executions in the Executions view. Available on Pro and Enterprise plans in n8n Cloud, and on Enterprise or registered Community Edition for self-hosted. Defaults to true."}]},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Limit Wait Time","name":"limitWaitTime","type":"fixedCollection","description":"Whether to limit the time this node should wait for a user response before execution resumes","default":{"values":{"limitType":"afterTimeInterval","resumeAmount":45,"resumeUnit":"minutes"}},"options":[{"displayName":"Values","name":"values","values":[{"displayName":"Limit Type","name":"limitType","type":"options","default":"afterTimeInterval","description":"Sets the condition for the execution to resume. Can be a specified date or after some time.","options":[{"name":"After Time Interval","description":"Waits for a certain amount of time","value":"afterTimeInterval"},{"name":"At Specified Time","description":"Waits until the set date and time to continue","value":"atSpecifiedTime"}]},{"displayName":"Amount","name":"resumeAmount","type":"number","displayOptions":{"show":{"limitType":["afterTimeInterval"]}},"typeOptions":{"minValue":0,"numberPrecision":2},"default":1,"description":"The time to wait"},{"displayName":"Unit","name":"resumeUnit","type":"options","displayOptions":{"show":{"limitType":["afterTimeInterval"]}},"options":[{"name":"Minutes","value":"minutes"},{"name":"Hours","value":"hours"},{"name":"Days","value":"days"}],"default":"hours","description":"Unit of the interval value"},{"displayName":"Max Date and Time","name":"maxDateAndTime","type":"dateTime","displayOptions":{"show":{"limitType":["atSpecifiedTime"]}},"default":"","description":"Continue execution after the specified date and time"}]}]},{"displayName":"Auto-save highlighted data","name":"autoSaveHighlightedData","type":"boolean","default":true,"description":"Whether to automatically save <a href=\"https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-base.executiondata/\" target=\"_blank\">highlighted data</a>. This data can then be used to filter executions in the Executions view. Available on Pro and Enterprise plans in n8n Cloud, and on Enterprise or registered Community Edition for self-hosted. Defaults to true."}],"displayOptions":{"show":{"@tool":[true],"/waitUserReply":[true]}}},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Limit Wait Time","name":"limitWaitTime","type":"fixedCollection","description":"Whether to limit the time this node should wait for a user response before execution resumes","default":{"values":{"limitType":"afterTimeInterval","resumeAmount":45,"resumeUnit":"minutes"}},"options":[{"displayName":"Values","name":"values","values":[{"displayName":"Limit Type","name":"limitType","type":"options","default":"afterTimeInterval","description":"Sets the condition for the execution to resume. Can be a specified date or after some time.","options":[{"name":"After Time Interval","description":"Waits for a certain amount of time","value":"afterTimeInterval"},{"name":"At Specified Time","description":"Waits until the set date and time to continue","value":"atSpecifiedTime"}]},{"displayName":"Amount","name":"resumeAmount","type":"number","displayOptions":{"show":{"limitType":["afterTimeInterval"]}},"typeOptions":{"minValue":0,"numberPrecision":2},"default":1,"description":"The time to wait"},{"displayName":"Unit","name":"resumeUnit","type":"options","displayOptions":{"show":{"limitType":["afterTimeInterval"]}},"options":[{"name":"Minutes","value":"minutes"},{"name":"Hours","value":"hours"},{"name":"Days","value":"days"}],"default":"hours","description":"Unit of the interval value"},{"displayName":"Max Date and Time","name":"maxDateAndTime","type":"dateTime","displayOptions":{"show":{"limitType":["atSpecifiedTime"]}},"default":"","description":"Continue execution after the specified date and time"}]}]},{"displayName":"Auto-save highlighted data","name":"autoSaveHighlightedData","type":"boolean","default":true,"description":"Whether to automatically save <a href=\"https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-base.executiondata/\" target=\"_blank\">highlighted data</a>. This data can then be used to filter executions in the Executions view. Available on Pro and Enterprise plans in n8n Cloud, and on Enterprise or registered Community Edition for self-hosted. Defaults to true."}],"displayOptions":{"show":{"@tool":[true],"/operation":["sendAndWait"]}}}]},
|
|
99
99
|
{"displayName":"Azure AI Search Vector Store","name":"vectorStoreAzureAISearch","description":"Work with your data in Azure AI Search Vector Store","group":["transform"],"version":[1,1.1,1.2,1.3],"defaults":{"name":"Azure AI Search Vector Store"},"codex":{"categories":["AI"],"subcategories":{"AI":["Vector Stores","Tools","Root Nodes"],"Vector Stores":["Other Vector Stores"],"Tools":["Other Tools"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstoreazureaisearch/"}]}},"builderHint":{"inputs":{"ai_embedding":{"required":true},"ai_document":{"required":true,"displayOptions":{"show":{"mode":["insert"]}}},"ai_reranker":{"required":true,"displayOptions":{"show":{"mode":["load","retrieve","retrieve-as-tool"],"useReranker":[true]}}}}},"credentials":[{"name":"azureAiSearchApi","required":true}],"inputs":"={{\n\t\t\t((parameters) => {\n\t\t\t\tconst mode = parameters?.mode;\n\t\t\t\tconst useReranker = parameters?.useReranker;\n\t\t\t\tconst inputs = [{ displayName: \"Embedding\", type: \"ai_embedding\", required: true, maxConnections: 1}]\n\n\t\t\t\tif (['load', 'retrieve', 'retrieve-as-tool'].includes(mode) && useReranker) {\n\t\t\t\t\tinputs.push({ displayName: \"Reranker\", type: \"ai_reranker\", required: true, maxConnections: 1})\n\t\t\t\t}\n\n\t\t\t\tif (mode === 'retrieve-as-tool') {\n\t\t\t\t\treturn inputs;\n\t\t\t\t}\n\n\t\t\t\tif (['insert', 'load', 'update'].includes(mode)) {\n\t\t\t\t\tinputs.push({ displayName: \"\", type: \"main\"})\n\t\t\t\t}\n\n\t\t\t\tif (['insert'].includes(mode)) {\n\t\t\t\t\tinputs.push({ displayName: \"Document\", type: \"ai_document\", required: true, maxConnections: 1})\n\t\t\t\t}\n\t\t\t\treturn inputs\n\t\t\t})($parameter)\n\t\t}}","outputs":"={{\n\t\t\t((parameters) => {\n\t\t\t\tconst mode = parameters?.mode ?? 'retrieve';\n\n\t\t\t\tif (mode === 'retrieve-as-tool') {\n\t\t\t\t\treturn [{ displayName: \"Tool\", type: \"ai_tool\"}]\n\t\t\t\t}\n\n\t\t\t\tif (mode === 'retrieve') {\n\t\t\t\t\treturn [{ displayName: \"Vector Store\", type: \"ai_vectorStore\"}]\n\t\t\t\t}\n\t\t\t\treturn [{ displayName: \"\", type: \"main\"}]\n\t\t\t})($parameter)\n\t\t}}","properties":[{"displayName":"Tip: Get a feel for vector stores in n8n with our","name":"ragStarterCallout","type":"callout","typeOptions":{"calloutAction":{"label":"RAG starter template","type":"openSampleWorkflowTemplate","templateId":"rag-starter-template"}},"default":""},{"displayName":"Operation Mode","name":"mode","type":"options","noDataExpression":true,"default":"retrieve","options":[{"name":"Get Many","value":"load","description":"Get many ranked documents from vector store for query","action":"Get ranked documents from vector store"},{"name":"Insert Documents","value":"insert","description":"Insert documents into vector store","action":"Add documents to vector store"},{"name":"Retrieve Documents (As Vector Store for Chain/Tool)","value":"retrieve","description":"Retrieve documents from vector store to be used as vector store with AI nodes","action":"Retrieve documents for Chain/Tool as Vector Store","outputConnectionType":"ai_vectorStore"},{"name":"Retrieve Documents (As Tool for AI Agent)","value":"retrieve-as-tool","description":"Retrieve documents from vector store to be used as tool with AI nodes","action":"Retrieve documents for AI Agent as Tool","outputConnectionType":"ai_tool"},{"name":"Update Documents","value":"update","description":"Update documents in vector store by ID","action":"Update vector store documents"}]},{"displayName":"This node must be connected to a vector store retriever. <a data-action='openSelectiveNodeCreator' data-action-parameter-connectiontype='ai_retriever'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"},"displayOptions":{"show":{"mode":["retrieve"]}}},{"displayName":"Name","name":"toolName","type":"string","default":"","required":true,"description":"Name of the vector store","placeholder":"e.g. company_knowledge_base","validateType":"string-alphanumeric","displayOptions":{"show":{"@version":[{"_cnd":{"lte":1.2}}],"mode":["retrieve-as-tool"]}}},{"displayName":"Description","name":"toolDescription","type":"string","default":"","required":true,"typeOptions":{"rows":2},"description":"Explain to the LLM what this tool does, a good, specific description would allow LLMs to produce expected results much more often","placeholder":"e.g. Work with your data in Azure AI Search Vector Store","displayOptions":{"show":{"mode":["retrieve-as-tool"]}}},{"displayName":"Index Name","name":"indexName","type":"string","default":"n8n-vectorstore","description":"The name of the Azure AI Search index. Will be created automatically if it does not exist.","required":true},{"displayName":"Embedding Batch Size","name":"embeddingBatchSize","type":"number","default":200,"description":"Number of documents to embed in a single batch","displayOptions":{"show":{"mode":["insert"],"@version":[{"_cnd":{"gte":1.1}}]}}},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Clear Index","name":"clearIndex","type":"boolean","default":false,"description":"Whether to delete and recreate the index before inserting new data. Warning: This will reset any custom index configuration (semantic ranking, analyzers, etc.) to defaults."},{"displayName":"Metadata Keys to Insert","name":"metadataKeysToInsert","type":"string","default":"","placeholder":"e.g., source,author,category","description":"Comma-separated list of metadata keys to store in Azure AI Search. Leave empty to include all metadata. Azure AI Search stores metadata in an \"attributes\" array format."}],"displayOptions":{"show":{"mode":["insert"]}}},{"displayName":"Prompt","name":"prompt","type":"string","default":"","required":true,"description":"Search prompt to retrieve matching documents from the vector store using similarity-based ranking","displayOptions":{"show":{"mode":["load"]}}},{"displayName":"Limit","name":"topK","type":"number","default":4,"description":"Number of top results to fetch from vector store","displayOptions":{"show":{"mode":["load","retrieve-as-tool"]}}},{"displayName":"Include Metadata","name":"includeDocumentMetadata","type":"boolean","default":true,"description":"Whether or not to include document metadata","displayOptions":{"show":{"mode":["load","retrieve-as-tool"]}}},{"displayName":"Rerank Results","name":"useReranker","type":"boolean","default":false,"description":"Whether or not to rerank results","displayOptions":{"show":{"mode":["load","retrieve","retrieve-as-tool"]}}},{"displayName":"ID","name":"id","type":"string","default":"","required":true,"description":"ID of an embedding entry","displayOptions":{"show":{"mode":["update"]}}},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Query Type","name":"queryType","type":"options","default":"hybrid","description":"The type of search query to perform","options":[{"name":"Vector","value":"vector","description":"Vector similarity search only"},{"name":"Hybrid","value":"hybrid","description":"Combines vector and keyword search (recommended)"},{"name":"Semantic Hybrid","value":"semanticHybrid","description":"Hybrid search with semantic ranking (requires Basic tier or higher)"}]},{"displayName":"Filter","name":"filter","type":"string","default":"","description":"Filter results using OData syntax. Use metadata/fieldName for metadata fields. <a href=\"https://learn.microsoft.com/en-us/azure/search/search-query-odata-filter\" target=\"_blank\">Learn more</a>.","placeholder":"metadata/category eq 'technology' and metadata/author eq 'John'"},{"displayName":"Semantic Configuration","name":"semanticConfiguration","type":"string","default":"","description":"Name of the semantic configuration for semantic ranking (optional)","displayOptions":{"show":{"queryType":["semanticHybrid"]}}}],"displayOptions":{"show":{"mode":["load","retrieve-as-tool"]}}},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Query Type","name":"queryType","type":"options","default":"hybrid","description":"The type of search query to perform","options":[{"name":"Vector","value":"vector","description":"Vector similarity search only"},{"name":"Hybrid","value":"hybrid","description":"Combines vector and keyword search (recommended)"},{"name":"Semantic Hybrid","value":"semanticHybrid","description":"Hybrid search with semantic ranking (requires Basic tier or higher)"}]},{"displayName":"Filter","name":"filter","type":"string","default":"","description":"Filter results using OData syntax. Use metadata/fieldName for metadata fields. <a href=\"https://learn.microsoft.com/en-us/azure/search/search-query-odata-filter\" target=\"_blank\">Learn more</a>.","placeholder":"metadata/category eq 'technology' and metadata/author eq 'John'"},{"displayName":"Semantic Configuration","name":"semanticConfiguration","type":"string","default":"","description":"Name of the semantic configuration for semantic ranking (optional)","displayOptions":{"show":{"queryType":["semanticHybrid"]}}}],"displayOptions":{"show":{"mode":["retrieve"]}}}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vector_store/VectorStoreAzureAISearch/azure-aisearch.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vector_store/VectorStoreAzureAISearch/azure-aisearch.svg"}},
|
|
100
100
|
{"displayName":"Simple Vector Store","name":"vectorStoreInMemory","description":"The easiest way to experiment with vector stores, without external setup.","icon":"node:simple-vector-store","iconColor":"black","group":["transform"],"version":[1,1.1,1.2,1.3],"defaults":{"name":"Simple Vector Store"},"codex":{"categories":["AI"],"subcategories":{"AI":["Vector Stores","Tools","Root Nodes"],"Vector Stores":["For Beginners"],"Tools":["Other Tools"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstoreinmemory/"}]}},"builderHint":{"relatedNodes":[{"nodeType":"@n8n/n8n-nodes-langchain.retrieverVectorStore","relationHint":"Connect to enable retrieval-augmented generation (RAG) for AI Agent workflows"}],"inputs":{"ai_embedding":{"required":true},"ai_document":{"required":true,"displayOptions":{"show":{"mode":["insert"]}}},"ai_reranker":{"required":true,"displayOptions":{"show":{"mode":["load","retrieve","retrieve-as-tool"],"useReranker":[true]}}}}},"inputs":"={{\n\t\t\t((parameters) => {\n\t\t\t\tconst mode = parameters?.mode;\n\t\t\t\tconst useReranker = parameters?.useReranker;\n\t\t\t\tconst inputs = [{ displayName: \"Embedding\", type: \"ai_embedding\", required: true, maxConnections: 1}]\n\n\t\t\t\tif (['load', 'retrieve', 'retrieve-as-tool'].includes(mode) && useReranker) {\n\t\t\t\t\tinputs.push({ displayName: \"Reranker\", type: \"ai_reranker\", required: true, maxConnections: 1})\n\t\t\t\t}\n\n\t\t\t\tif (mode === 'retrieve-as-tool') {\n\t\t\t\t\treturn inputs;\n\t\t\t\t}\n\n\t\t\t\tif (['insert', 'load', 'update'].includes(mode)) {\n\t\t\t\t\tinputs.push({ displayName: \"\", type: \"main\"})\n\t\t\t\t}\n\n\t\t\t\tif (['insert'].includes(mode)) {\n\t\t\t\t\tinputs.push({ displayName: \"Document\", type: \"ai_document\", required: true, maxConnections: 1})\n\t\t\t\t}\n\t\t\t\treturn inputs\n\t\t\t})($parameter)\n\t\t}}","outputs":"={{\n\t\t\t((parameters) => {\n\t\t\t\tconst mode = parameters?.mode ?? 'retrieve';\n\n\t\t\t\tif (mode === 'retrieve-as-tool') {\n\t\t\t\t\treturn [{ displayName: \"Tool\", type: \"ai_tool\"}]\n\t\t\t\t}\n\n\t\t\t\tif (mode === 'retrieve') {\n\t\t\t\t\treturn [{ displayName: \"Vector Store\", type: \"ai_vectorStore\"}]\n\t\t\t\t}\n\t\t\t\treturn [{ displayName: \"\", type: \"main\"}]\n\t\t\t})($parameter)\n\t\t}}","properties":[{"displayName":"Tip: Get a feel for vector stores in n8n with our","name":"ragStarterCallout","type":"callout","typeOptions":{"calloutAction":{"label":"RAG starter template","type":"openSampleWorkflowTemplate","templateId":"rag-starter-template"}},"default":""},{"displayName":"Operation Mode","name":"mode","type":"options","noDataExpression":true,"default":"retrieve","options":[{"name":"Get Many","value":"load","description":"Get many ranked documents from vector store for query","action":"Get ranked documents from vector store"},{"name":"Insert Documents","value":"insert","description":"Insert documents into vector store","action":"Add documents to vector store"},{"name":"Retrieve Documents (As Vector Store for Chain/Tool)","value":"retrieve","description":"Retrieve documents from vector store to be used as vector store with AI nodes","action":"Retrieve documents for Chain/Tool as Vector Store","outputConnectionType":"ai_vectorStore"},{"name":"Retrieve Documents (As Tool for AI Agent)","value":"retrieve-as-tool","description":"Retrieve documents from vector store to be used as tool with AI nodes","action":"Retrieve documents for AI Agent as Tool","outputConnectionType":"ai_tool"}]},{"displayName":"This node must be connected to a vector store retriever. <a data-action='openSelectiveNodeCreator' data-action-parameter-connectiontype='ai_retriever'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"},"displayOptions":{"show":{"mode":["retrieve"]}}},{"displayName":"Name","name":"toolName","type":"string","default":"","required":true,"description":"Name of the vector store","placeholder":"e.g. company_knowledge_base","validateType":"string-alphanumeric","displayOptions":{"show":{"@version":[{"_cnd":{"lte":1.2}}],"mode":["retrieve-as-tool"]}}},{"displayName":"Description","name":"toolDescription","type":"string","default":"","required":true,"typeOptions":{"rows":2},"description":"Explain to the LLM what this tool does, a good, specific description would allow LLMs to produce expected results much more often","placeholder":"e.g. The easiest way to experiment with vector stores, without external setup.","displayOptions":{"show":{"mode":["retrieve-as-tool"]}}},{"displayName":"Memory Key","name":"memoryKey","type":"string","default":"vector_store_key","description":"The key to use to store the vector memory in the workflow data. The key will be prefixed with the workflow ID to avoid collisions.","displayOptions":{"show":{"@version":[{"_cnd":{"lte":1.1}}]}}},{"displayName":"Memory Key","name":"memoryKey","type":"resourceLocator","required":true,"default":{"mode":"list","value":"vector_store_key"},"description":"The key to use to store the vector memory in the workflow data. These keys are shared between workflows.","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.2}}]}},"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"vectorStoresSearch","searchable":true,"allowNewResource":{"label":"resourceLocator.mode.list.addNewResource.vectorStoreInMemory","defaultName":"vector_store_key","method":"createVectorStore"}}},{"displayName":"Manual","name":"id","type":"string","placeholder":"vector_store_key"}]},{"displayName":"Embedding Batch Size","name":"embeddingBatchSize","type":"number","default":200,"description":"Number of documents to embed in a single batch","displayOptions":{"show":{"mode":["insert"],"@version":[{"_cnd":{"gte":1.1}}]}}},{"displayName":"Clear Store","name":"clearStore","type":"boolean","default":false,"description":"Whether to clear the store before inserting new data","displayOptions":{"show":{"mode":["insert"]}}},{"displayName":"<strong>For experimental use only</strong>: Data is stored in memory and will be lost if n8n restarts. Data may also be cleared if available memory gets low, and is accessible to all users of this instance. <a href=\"https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstoreinmemory/\">More info</a>","name":"notice","type":"notice","default":"","displayOptions":{"show":{"mode":["insert"]}}},{"displayName":"Prompt","name":"prompt","type":"string","default":"","required":true,"description":"Search prompt to retrieve matching documents from the vector store using similarity-based ranking","displayOptions":{"show":{"mode":["load"]}}},{"displayName":"Limit","name":"topK","type":"number","default":4,"description":"Number of top results to fetch from vector store","displayOptions":{"show":{"mode":["load","retrieve-as-tool"]}}},{"displayName":"Include Metadata","name":"includeDocumentMetadata","type":"boolean","default":true,"description":"Whether or not to include document metadata","displayOptions":{"show":{"mode":["load","retrieve-as-tool"]}}},{"displayName":"Rerank Results","name":"useReranker","type":"boolean","default":false,"description":"Whether or not to rerank results","displayOptions":{"show":{"mode":["load","retrieve","retrieve-as-tool"]}}},{"displayName":"ID","name":"id","type":"string","default":"","required":true,"description":"ID of an embedding entry","displayOptions":{"show":{"mode":["update"]}}},{"displayName":"<strong>For experimental use only</strong>: Data is stored in memory and will be lost if n8n restarts. Data may also be cleared if available memory gets low, and is accessible to all users of this instance. <a href=\"https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstoreinmemory/\">More info</a>","name":"notice","type":"notice","default":"","displayOptions":{"show":{"mode":["load","retrieve-as-tool"]}}},{"displayName":"<strong>For experimental use only</strong>: Data is stored in memory and will be lost if n8n restarts. Data may also be cleared if available memory gets low, and is accessible to all users of this instance. <a href=\"https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstoreinmemory/\">More info</a>","name":"notice","type":"notice","default":"","displayOptions":{"show":{"mode":["retrieve"]}}}]},
|