@n8n/n8n-nodes-langchain 1.87.0 → 1.88.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/credentials/MilvusApi.credentials.js +72 -0
- package/dist/credentials/MilvusApi.credentials.js.map +1 -0
- package/dist/known/credentials.json +7 -0
- package/dist/known/nodes.json +12 -0
- package/dist/methods/defined.json +3 -0
- package/dist/methods/referenced.json +3 -0
- package/dist/nodes/mcp/McpClientTool/McpClientTool.node.js +258 -0
- package/dist/nodes/mcp/McpClientTool/McpClientTool.node.js.map +1 -0
- package/dist/nodes/mcp/McpClientTool/loadOptions.js +51 -0
- package/dist/nodes/mcp/McpClientTool/loadOptions.js.map +1 -0
- package/dist/nodes/mcp/McpClientTool/types.js +17 -0
- package/dist/nodes/mcp/McpClientTool/types.js.map +1 -0
- package/dist/nodes/mcp/McpClientTool/utils.js +192 -0
- package/dist/nodes/mcp/McpClientTool/utils.js.map +1 -0
- package/dist/nodes/mcp/McpTrigger/FlushingSSEServerTransport.js +39 -0
- package/dist/nodes/mcp/McpTrigger/FlushingSSEServerTransport.js.map +1 -0
- package/dist/nodes/mcp/McpTrigger/McpServer.js +179 -0
- package/dist/nodes/mcp/McpTrigger/McpServer.js.map +1 -0
- package/dist/nodes/mcp/McpTrigger/McpTrigger.node.js +181 -0
- package/dist/nodes/mcp/McpTrigger/McpTrigger.node.js.map +1 -0
- package/dist/nodes/mcp/mcp.dark.svg +7 -0
- package/dist/nodes/mcp/mcp.svg +7 -0
- package/dist/nodes/vector_store/VectorStoreMilvus/VectorStoreMilvus.node.js +106 -0
- package/dist/nodes/vector_store/VectorStoreMilvus/VectorStoreMilvus.node.js.map +1 -0
- package/dist/nodes/vector_store/VectorStoreMilvus/milvus-icon-black.svg +1 -0
- package/dist/nodes/vector_store/VectorStoreMilvus/milvus-icon-white.svg +1 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/methods/listSearch.js +16 -0
- package/dist/nodes/vector_store/shared/createVectorStoreNode/methods/listSearch.js.map +1 -1
- package/dist/nodes/vector_store/shared/descriptions.js +24 -0
- package/dist/nodes/vector_store/shared/descriptions.js.map +1 -1
- package/dist/types/credentials.json +1 -0
- package/dist/types/nodes.json +3 -0
- package/dist/utils/helpers.js +7 -1
- package/dist/utils/helpers.js.map +1 -1
- package/dist/utils/logWrapper.js +9 -2
- package/dist/utils/logWrapper.js.map +1 -1
- package/package.json +10 -4
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
<svg width="180" height="180" viewBox="0 0 195 195" fill="none" xmlns="http://www.w3.org/2000/svg">
|
|
2
|
+
<g stroke="#fff" stroke-width="12" stroke-linecap="round">
|
|
3
|
+
<path d="M25 97.8528L92.8823 29.9706C102.255 20.598 117.451 20.598 126.823 29.9706V29.9706C136.196 39.3431 136.196 54.5391 126.823 63.9117L75.5581 115.177"/>
|
|
4
|
+
<path d="M76.2653 114.47L126.823 63.9117C136.196 54.5391 151.392 54.5391 160.765 63.9117L161.118 64.2652C170.491 73.6378 170.491 88.8338 161.118 98.2063L99.7248 159.6C96.6006 162.724 96.6006 167.789 99.7248 170.913L112.331 183.52"/>
|
|
5
|
+
<path d="M109.853 46.9411L59.6482 97.1457C50.2757 106.518 50.2757 121.714 59.6482 131.087V131.087C69.0208 140.459 84.2168 140.459 93.5894 131.087L143.794 80.8822"/>
|
|
6
|
+
</g>
|
|
7
|
+
</svg>
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
<svg width="180" height="180" viewBox="0 0 195 195" fill="none" xmlns="http://www.w3.org/2000/svg">
|
|
2
|
+
<g stroke="#000" stroke-width="12" stroke-linecap="round">
|
|
3
|
+
<path d="M25 97.8528L92.8823 29.9706C102.255 20.598 117.451 20.598 126.823 29.9706V29.9706C136.196 39.3431 136.196 54.5391 126.823 63.9117L75.5581 115.177"/>
|
|
4
|
+
<path d="M76.2653 114.47L126.823 63.9117C136.196 54.5391 151.392 54.5391 160.765 63.9117L161.118 64.2652C170.491 73.6378 170.491 88.8338 161.118 98.2063L99.7248 159.6C96.6006 162.724 96.6006 167.789 99.7248 170.913L112.331 183.52"/>
|
|
5
|
+
<path d="M109.853 46.9411L59.6482 97.1457C50.2757 106.518 50.2757 121.714 59.6482 131.087V131.087C69.0208 140.459 84.2168 140.459 93.5894 131.087L143.794 80.8822"/>
|
|
6
|
+
</g>
|
|
7
|
+
</svg>
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
var VectorStoreMilvus_node_exports = {};
|
|
20
|
+
__export(VectorStoreMilvus_node_exports, {
|
|
21
|
+
VectorStoreMilvus: () => VectorStoreMilvus
|
|
22
|
+
});
|
|
23
|
+
module.exports = __toCommonJS(VectorStoreMilvus_node_exports);
|
|
24
|
+
var import_milvus = require("@langchain/community/vectorstores/milvus");
|
|
25
|
+
var import_milvus2_sdk_node = require("@zilliz/milvus2-sdk-node");
|
|
26
|
+
var import_createVectorStoreNode = require("../shared/createVectorStoreNode/createVectorStoreNode");
|
|
27
|
+
var import_listSearch = require("../shared/createVectorStoreNode/methods/listSearch");
|
|
28
|
+
var import_descriptions = require("../shared/descriptions");
|
|
29
|
+
const sharedFields = [import_descriptions.milvusCollectionRLC];
|
|
30
|
+
const insertFields = [
|
|
31
|
+
{
|
|
32
|
+
displayName: "Options",
|
|
33
|
+
name: "options",
|
|
34
|
+
type: "collection",
|
|
35
|
+
placeholder: "Add Option",
|
|
36
|
+
default: {},
|
|
37
|
+
options: [
|
|
38
|
+
{
|
|
39
|
+
displayName: "Clear Collection",
|
|
40
|
+
name: "clearCollection",
|
|
41
|
+
type: "boolean",
|
|
42
|
+
default: false,
|
|
43
|
+
description: "Whether to clear the collection before inserting new data"
|
|
44
|
+
}
|
|
45
|
+
]
|
|
46
|
+
}
|
|
47
|
+
];
|
|
48
|
+
class VectorStoreMilvus extends (0, import_createVectorStoreNode.createVectorStoreNode)({
|
|
49
|
+
meta: {
|
|
50
|
+
displayName: "Milvus Vector Store",
|
|
51
|
+
name: "vectorStoreMilvus",
|
|
52
|
+
description: "Work with your data in Milvus Vector Store",
|
|
53
|
+
icon: { light: "file:milvus-icon-black.svg", dark: "file:milvus-icon-white.svg" },
|
|
54
|
+
docsUrl: "https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstoremilvus/",
|
|
55
|
+
credentials: [
|
|
56
|
+
{
|
|
57
|
+
name: "milvusApi",
|
|
58
|
+
required: true
|
|
59
|
+
}
|
|
60
|
+
],
|
|
61
|
+
operationModes: ["load", "insert", "retrieve", "retrieve-as-tool"]
|
|
62
|
+
},
|
|
63
|
+
methods: { listSearch: { milvusCollectionsSearch: import_listSearch.milvusCollectionsSearch } },
|
|
64
|
+
sharedFields,
|
|
65
|
+
insertFields,
|
|
66
|
+
async getVectorStoreClient(context, _filter, embeddings, itemIndex) {
|
|
67
|
+
const collection = context.getNodeParameter("milvusCollection", itemIndex, "", {
|
|
68
|
+
extractValue: true
|
|
69
|
+
});
|
|
70
|
+
const credentials = await context.getCredentials("milvusApi");
|
|
71
|
+
const config = {
|
|
72
|
+
url: credentials.baseUrl,
|
|
73
|
+
username: credentials.username,
|
|
74
|
+
password: credentials.password,
|
|
75
|
+
collectionName: collection
|
|
76
|
+
};
|
|
77
|
+
return await import_milvus.Milvus.fromExistingCollection(embeddings, config);
|
|
78
|
+
},
|
|
79
|
+
async populateVectorStore(context, embeddings, documents, itemIndex) {
|
|
80
|
+
const collection = context.getNodeParameter("milvusCollection", itemIndex, "", {
|
|
81
|
+
extractValue: true
|
|
82
|
+
});
|
|
83
|
+
const options = context.getNodeParameter("options", itemIndex, {});
|
|
84
|
+
const credentials = await context.getCredentials("milvusApi");
|
|
85
|
+
const config = {
|
|
86
|
+
url: credentials.baseUrl,
|
|
87
|
+
username: credentials.username,
|
|
88
|
+
password: credentials.password,
|
|
89
|
+
collectionName: collection
|
|
90
|
+
};
|
|
91
|
+
if (options.clearCollection) {
|
|
92
|
+
const client = new import_milvus2_sdk_node.MilvusClient({
|
|
93
|
+
address: credentials.baseUrl,
|
|
94
|
+
token: `${credentials.username}:${credentials.password}`
|
|
95
|
+
});
|
|
96
|
+
await client.dropCollection({ collection_name: collection });
|
|
97
|
+
}
|
|
98
|
+
await import_milvus.Milvus.fromDocuments(documents, embeddings, config);
|
|
99
|
+
}
|
|
100
|
+
}) {
|
|
101
|
+
}
|
|
102
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
103
|
+
0 && (module.exports = {
|
|
104
|
+
VectorStoreMilvus
|
|
105
|
+
});
|
|
106
|
+
//# sourceMappingURL=VectorStoreMilvus.node.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../nodes/vector_store/VectorStoreMilvus/VectorStoreMilvus.node.ts"],"sourcesContent":["import { Milvus } from '@langchain/community/vectorstores/milvus';\nimport type { MilvusLibArgs } from '@langchain/community/vectorstores/milvus';\nimport { MilvusClient } from '@zilliz/milvus2-sdk-node';\nimport type { INodeProperties } from 'n8n-workflow';\n\nimport { createVectorStoreNode } from '../shared/createVectorStoreNode/createVectorStoreNode';\nimport { milvusCollectionsSearch } from '../shared/createVectorStoreNode/methods/listSearch';\nimport { milvusCollectionRLC } from '../shared/descriptions';\n\nconst sharedFields: INodeProperties[] = [milvusCollectionRLC];\nconst insertFields: INodeProperties[] = [\n\t{\n\t\tdisplayName: 'Options',\n\t\tname: 'options',\n\t\ttype: 'collection',\n\t\tplaceholder: 'Add Option',\n\t\tdefault: {},\n\t\toptions: [\n\t\t\t{\n\t\t\t\tdisplayName: 'Clear Collection',\n\t\t\t\tname: 'clearCollection',\n\t\t\t\ttype: 'boolean',\n\t\t\t\tdefault: false,\n\t\t\t\tdescription: 'Whether to clear the collection before inserting new data',\n\t\t\t},\n\t\t],\n\t},\n];\n\nexport class VectorStoreMilvus extends createVectorStoreNode<Milvus>({\n\tmeta: {\n\t\tdisplayName: 'Milvus Vector Store',\n\t\tname: 'vectorStoreMilvus',\n\t\tdescription: 'Work with your data in Milvus Vector Store',\n\t\ticon: { light: 'file:milvus-icon-black.svg', dark: 'file:milvus-icon-white.svg' },\n\t\tdocsUrl:\n\t\t\t'https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstoremilvus/',\n\t\tcredentials: [\n\t\t\t{\n\t\t\t\tname: 'milvusApi',\n\t\t\t\trequired: true,\n\t\t\t},\n\t\t],\n\t\toperationModes: ['load', 'insert', 'retrieve', 'retrieve-as-tool'],\n\t},\n\tmethods: { listSearch: { milvusCollectionsSearch } },\n\tsharedFields,\n\tinsertFields,\n\tasync getVectorStoreClient(context, _filter, embeddings, itemIndex): Promise<Milvus> {\n\t\tconst collection = context.getNodeParameter('milvusCollection', itemIndex, '', {\n\t\t\textractValue: true,\n\t\t}) as string;\n\t\tconst credentials = await context.getCredentials<{\n\t\t\tbaseUrl: string;\n\t\t\tusername: string;\n\t\t\tpassword: string;\n\t\t}>('milvusApi');\n\t\tconst config: MilvusLibArgs = {\n\t\t\turl: credentials.baseUrl,\n\t\t\tusername: credentials.username,\n\t\t\tpassword: credentials.password,\n\t\t\tcollectionName: collection,\n\t\t};\n\n\t\treturn await Milvus.fromExistingCollection(embeddings, config);\n\t},\n\tasync populateVectorStore(context, embeddings, documents, itemIndex): Promise<void> {\n\t\tconst collection = context.getNodeParameter('milvusCollection', itemIndex, '', {\n\t\t\textractValue: true,\n\t\t}) as string;\n\t\tconst options = context.getNodeParameter('options', itemIndex, {}) as {\n\t\t\tclearCollection?: boolean;\n\t\t};\n\t\tconst credentials = await context.getCredentials<{\n\t\t\tbaseUrl: string;\n\t\t\tusername: string;\n\t\t\tpassword: string;\n\t\t}>('milvusApi');\n\t\tconst config: MilvusLibArgs = {\n\t\t\turl: credentials.baseUrl,\n\t\t\tusername: credentials.username,\n\t\t\tpassword: credentials.password,\n\t\t\tcollectionName: collection,\n\t\t};\n\n\t\tif (options.clearCollection) {\n\t\t\tconst client = new MilvusClient({\n\t\t\t\taddress: credentials.baseUrl,\n\t\t\t\ttoken: `${credentials.username}:${credentials.password}`,\n\t\t\t});\n\t\t\tawait client.dropCollection({ collection_name: collection });\n\t\t}\n\n\t\tawait Milvus.fromDocuments(documents, embeddings, config);\n\t},\n}) {}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAuB;AAEvB,8BAA6B;AAG7B,mCAAsC;AACtC,wBAAwC;AACxC,0BAAoC;AAEpC,MAAM,eAAkC,CAAC,uCAAmB;AAC5D,MAAM,eAAkC;AAAA,EACvC;AAAA,IACC,aAAa;AAAA,IACb,MAAM;AAAA,IACN,MAAM;AAAA,IACN,aAAa;AAAA,IACb,SAAS,CAAC;AAAA,IACV,SAAS;AAAA,MACR;AAAA,QACC,aAAa;AAAA,QACb,MAAM;AAAA,QACN,MAAM;AAAA,QACN,SAAS;AAAA,QACT,aAAa;AAAA,MACd;AAAA,IACD;AAAA,EACD;AACD;AAEO,MAAM,8BAA0B,oDAA8B;AAAA,EACpE,MAAM;AAAA,IACL,aAAa;AAAA,IACb,MAAM;AAAA,IACN,aAAa;AAAA,IACb,MAAM,EAAE,OAAO,8BAA8B,MAAM,6BAA6B;AAAA,IAChF,SACC;AAAA,IACD,aAAa;AAAA,MACZ;AAAA,QACC,MAAM;AAAA,QACN,UAAU;AAAA,MACX;AAAA,IACD;AAAA,IACA,gBAAgB,CAAC,QAAQ,UAAU,YAAY,kBAAkB;AAAA,EAClE;AAAA,EACA,SAAS,EAAE,YAAY,EAAE,mEAAwB,EAAE;AAAA,EACnD;AAAA,EACA;AAAA,EACA,MAAM,qBAAqB,SAAS,SAAS,YAAY,WAA4B;AACpF,UAAM,aAAa,QAAQ,iBAAiB,oBAAoB,WAAW,IAAI;AAAA,MAC9E,cAAc;AAAA,IACf,CAAC;AACD,UAAM,cAAc,MAAM,QAAQ,eAI/B,WAAW;AACd,UAAM,SAAwB;AAAA,MAC7B,KAAK,YAAY;AAAA,MACjB,UAAU,YAAY;AAAA,MACtB,UAAU,YAAY;AAAA,MACtB,gBAAgB;AAAA,IACjB;AAEA,WAAO,MAAM,qBAAO,uBAAuB,YAAY,MAAM;AAAA,EAC9D;AAAA,EACA,MAAM,oBAAoB,SAAS,YAAY,WAAW,WAA0B;AACnF,UAAM,aAAa,QAAQ,iBAAiB,oBAAoB,WAAW,IAAI;AAAA,MAC9E,cAAc;AAAA,IACf,CAAC;AACD,UAAM,UAAU,QAAQ,iBAAiB,WAAW,WAAW,CAAC,CAAC;AAGjE,UAAM,cAAc,MAAM,QAAQ,eAI/B,WAAW;AACd,UAAM,SAAwB;AAAA,MAC7B,KAAK,YAAY;AAAA,MACjB,UAAU,YAAY;AAAA,MACtB,UAAU,YAAY;AAAA,MACtB,gBAAgB;AAAA,IACjB;AAEA,QAAI,QAAQ,iBAAiB;AAC5B,YAAM,SAAS,IAAI,qCAAa;AAAA,QAC/B,SAAS,YAAY;AAAA,QACrB,OAAO,GAAG,YAAY,QAAQ,IAAI,YAAY,QAAQ;AAAA,MACvD,CAAC;AACD,YAAM,OAAO,eAAe,EAAE,iBAAiB,WAAW,CAAC;AAAA,IAC5D;AAEA,UAAM,qBAAO,cAAc,WAAW,YAAY,MAAM;AAAA,EACzD;AACD,CAAC,EAAE;AAAC;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
<svg id="Layer_1" data-name="Layer 1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 360 360"><title>milvus-icon-black</title><path d="M169.11689,299.04939c-27.78535-.02915-51.298-8.31411-72.45031-23.69338a122.33707,122.33707,0,0,1-14.00922-12.00616q-16.556-16.35613-33.14895-32.67482Q35.31712,216.704,21.10726,202.7519c-4.27753-4.20375-8.581-8.38131-12.83638-12.60737-5.78529-5.74539-5.692-12.21736.17135-17.91294,4.35916-4.23447,8.6808-8.50759,13.01531-12.76743q21.09033-20.727,42.18351-41.45112c7.82349-7.66748,15.53733-15.458,23.59542-22.87264A117.61725,117.61725,0,0,1,142.926,66.46075,112.79714,112.79714,0,0,1,167.7708,63.678c27.822.22006,53.14231,8.0315,75.21837,25.35039a117.49179,117.49179,0,0,1,33.56725,40.74071,111.30862,111.30862,0,0,1,11.18191,37.59258c3.75648,35.37535-6.093,66.46091-30.10087,92.86236a114.32952,114.32952,0,0,1-57.75872,34.79075A132.31853,132.31853,0,0,1,169.11689,299.04939Zm5.35425-31.43461a85.592,85.592,0,0,0,8.90555-.52289c1.86733-.18941,3.736-.42361,5.58158-.7604,22.43092-4.09372,40.60221-15.4505,54.49679-33.26538,13.51833-17.33245,18.827-37.40452,17.16875-59.24969a82.75341,82.75341,0,0,0-9.27492-31.89563c-9.21938-17.8418-23.47591-30.41362-41.44061-38.92072-16.12138-7.6342-33.07061-9.547-50.63693-6.8248a85.52038,85.52038,0,0,0-47.48221,23.39486c-12.62025,12.19885-25.1024,24.54066-37.63746,36.82758-6.28192,6.15757-12.51006,12.37048-18.83189,18.48667-4.04633,3.91467-4.21333,8.6494-.20763,12.6018q11.8256,11.66819,23.69873,23.28819c10.60865,10.42576,21.17268,20.89736,31.83058,31.27252C128.07334,259.01635,148.957,267.6,174.47114,267.61478Z"/><path d="M357.01654,180.71583a12.11267,12.11267,0,0,1-3.67773,9.07927q-16.50428,16.58232-33.07758,33.09591c-1.20808,1.20715-2.42168,1.44863-3.67166.79033-1.35017-.71105-1.82827-1.79263-1.442-3.56058a187.7592,187.7592,0,0,0,3.66284-23.911,178.77219,178.77219,0,0,0,.42091-21.92036,170.04764,170.04764,0,0,0-3.96975-31.1,3.757,3.757,0,0,1,.24665-3.12677,2.86044,2.86044,0,0,1,4.014-.76856,9.243,9.243,0,0,1,1.2578,1.10625q15.99768,15.98621,31.98832,31.97946C355.28369,174.89374,357.02471,177.76063,357.01654,180.71583Z"/><path d="M232.52066,181.32156a58.91788,58.91788,0,0,1-59.06395,59.10475c-34.72229-.0744-59.571-28.74548-58.99573-60.0188a59.03419,59.03419,0,0,1,118.05968.91405Z"/></svg>
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
<svg id="Layer_1" data-name="Layer 1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 360 360"><defs><style>.cls-1{fill:#fff;}</style></defs><title>milvus-icon-white</title><path class="cls-1" d="M168.50983,63.3293c31.95671.507,59.93559,11.11286,83.27686,33.18535,18.7006,17.68407,30.22667,39.32512,34.81328,64.64452A105.32115,105.32115,0,0,1,288.30948,182.05c-.56967,28.47039-9.94153,53.66667-28.65343,75.19713-18.13667,20.86872-40.90888,34.11493-68.05469,39.10222-36.61066,6.7263-69.82614-1.44613-99.22656-24.47337-6.10735-4.78341-11.40667-10.44927-16.92918-15.86568q-17.83359-17.49069-35.625-35.0246Q23.68152,205.12557,7.53535,189.27287c-4.99811-4.92643-5.00166-11.76177-.00618-16.68775q16.58239-16.35193,33.222-32.64579c10.415-10.22513,20.7782-20.50417,31.27381-30.64606,6.968-6.73308,13.5621-13.8667,21.20515-19.88878a119.68976,119.68976,0,0,1,51.34517-23.68667,121.162,121.162,0,0,1,23.93459-2.38869Zm92.371,117.69085a91.99822,91.99822,0,0,0-1.48906-15.09071c-4.6589-24.42132-17.48944-43.49544-38.23938-56.98664-20.01283-13.01188-42.00792-17.05568-65.46628-12.61428a85.61952,85.61952,0,0,0-44.01862,22.6468c-8.21582,7.84726-16.269,15.86505-24.38163,23.82013q-16.31332,15.99666-32.59837,32.0219c-3.75,3.69749-3.73394,8.52908-.01266,12.20989,5.32758,5.2695,10.69285,10.50085,16.03635,15.75429q19.54138,19.21189,39.08246,38.424,28.33431,27.75336,67.98,25.94035a83.03279,83.03279,0,0,0,39.17421-11.52729c28.23729-16.81519,42.66039-41.87957,43.93305-74.59841Zm58.23612-.07522a171.37566,171.37566,0,0,0-4.09075-38.11425,11.73723,11.73723,0,0,1-.20111-1.1565,2.63478,2.63478,0,0,1,1.27481-2.74752,2.67092,2.67092,0,0,1,3.16194.02147,9.13711,9.13711,0,0,1,1.18864,1.0856q16.15512,16.14834,32.305,32.30226c5.30658,5.3075,5.2967,12.06515-.02255,17.38455q-16.09957,16.10016-32.1997,32.19985c-.277.27695-.54682.56179-.83445.82717a2.73085,2.73085,0,0,1-3.50192.44023,2.6834,2.6834,0,0,1-1.351-3.11577c1.13192-5.20587,2.10232-10.43969,2.798-15.72573a164.22388,164.22388,0,0,0,1.46944-18.85586c.0238-1.5147.00355-3.03034.00355-4.54551Z"/><path class="cls-1" d="M232.20625,180.965a70.48931,70.48931,0,1,0-.00015,0Z"/></svg>
|
|
@@ -18,6 +18,7 @@ var __copyProps = (to, from, except, desc) => {
|
|
|
18
18
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
19
|
var listSearch_exports = {};
|
|
20
20
|
__export(listSearch_exports, {
|
|
21
|
+
milvusCollectionsSearch: () => milvusCollectionsSearch,
|
|
21
22
|
pineconeIndexSearch: () => pineconeIndexSearch,
|
|
22
23
|
qdrantCollectionsSearch: () => qdrantCollectionsSearch,
|
|
23
24
|
supabaseTableNameSearch: () => supabaseTableNameSearch
|
|
@@ -25,6 +26,7 @@ __export(listSearch_exports, {
|
|
|
25
26
|
module.exports = __toCommonJS(listSearch_exports);
|
|
26
27
|
var import_pinecone = require("@pinecone-database/pinecone");
|
|
27
28
|
var import_js_client_rest = require("@qdrant/js-client-rest");
|
|
29
|
+
var import_milvus2_sdk_node = require("@zilliz/milvus2-sdk-node");
|
|
28
30
|
var import_n8n_workflow = require("n8n-workflow");
|
|
29
31
|
async function pineconeIndexSearch() {
|
|
30
32
|
const credentials = await this.getCredentials("pineconeApi");
|
|
@@ -74,8 +76,22 @@ async function qdrantCollectionsSearch() {
|
|
|
74
76
|
}));
|
|
75
77
|
return { results };
|
|
76
78
|
}
|
|
79
|
+
async function milvusCollectionsSearch() {
|
|
80
|
+
const credentials = await this.getCredentials("milvusApi");
|
|
81
|
+
const client = new import_milvus2_sdk_node.MilvusClient({
|
|
82
|
+
address: credentials.baseUrl,
|
|
83
|
+
token: `${credentials.username}:${credentials.password}`
|
|
84
|
+
});
|
|
85
|
+
const response = await client.listCollections();
|
|
86
|
+
const results = response.data.map((collection) => ({
|
|
87
|
+
name: collection.name,
|
|
88
|
+
value: collection.name
|
|
89
|
+
}));
|
|
90
|
+
return { results };
|
|
91
|
+
}
|
|
77
92
|
// Annotate the CommonJS export names for ESM import in node:
|
|
78
93
|
0 && (module.exports = {
|
|
94
|
+
milvusCollectionsSearch,
|
|
79
95
|
pineconeIndexSearch,
|
|
80
96
|
qdrantCollectionsSearch,
|
|
81
97
|
supabaseTableNameSearch
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../../../nodes/vector_store/shared/createVectorStoreNode/methods/listSearch.ts"],"sourcesContent":["import { Pinecone } from '@pinecone-database/pinecone';\nimport { QdrantClient } from '@qdrant/js-client-rest';\nimport { ApplicationError, type IDataObject, type ILoadOptionsFunctions } from 'n8n-workflow';\n\nexport async function pineconeIndexSearch(this: ILoadOptionsFunctions) {\n\tconst credentials = await this.getCredentials('pineconeApi');\n\n\tconst client = new Pinecone({\n\t\tapiKey: credentials.apiKey as string,\n\t});\n\n\tconst indexes = await client.listIndexes();\n\n\tconst results = (indexes.indexes ?? []).map((index) => ({\n\t\tname: index.name,\n\t\tvalue: index.name,\n\t}));\n\n\treturn { results };\n}\n\nexport async function supabaseTableNameSearch(this: ILoadOptionsFunctions) {\n\tconst credentials = await this.getCredentials('supabaseApi');\n\n\tconst results = [];\n\n\tif (typeof credentials.host !== 'string') {\n\t\tthrow new ApplicationError('Expected Supabase credentials host to be a string');\n\t}\n\n\tconst { paths } = (await this.helpers.requestWithAuthentication.call(this, 'supabaseApi', {\n\t\theaders: {\n\t\t\tPrefer: 'return=representation',\n\t\t},\n\t\tmethod: 'GET',\n\t\turi: `${credentials.host}/rest/v1/`,\n\t\tjson: true,\n\t})) as { paths: IDataObject };\n\n\tfor (const path of Object.keys(paths)) {\n\t\t//omit introspection path\n\t\tif (path === '/') continue;\n\n\t\tresults.push({\n\t\t\tname: path.replace('/', ''),\n\t\t\tvalue: path.replace('/', ''),\n\t\t});\n\t}\n\n\treturn { results };\n}\n\nexport async function qdrantCollectionsSearch(this: ILoadOptionsFunctions) {\n\tconst credentials = await this.getCredentials('qdrantApi');\n\n\tconst client = new QdrantClient({\n\t\turl: credentials.qdrantUrl as string,\n\t\tapiKey: credentials.apiKey as string,\n\t});\n\n\tconst response = await client.getCollections();\n\n\tconst results = response.collections.map((collection) => ({\n\t\tname: collection.name,\n\t\tvalue: collection.name,\n\t}));\n\n\treturn { results };\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAyB;AACzB,4BAA6B;AAC7B,0BAA+E;AAE/E,eAAsB,sBAAiD;AACtE,QAAM,cAAc,MAAM,KAAK,eAAe,aAAa;AAE3D,QAAM,SAAS,IAAI,yBAAS;AAAA,IAC3B,QAAQ,YAAY;AAAA,EACrB,CAAC;AAED,QAAM,UAAU,MAAM,OAAO,YAAY;AAEzC,QAAM,WAAW,QAAQ,WAAW,CAAC,GAAG,IAAI,CAAC,WAAW;AAAA,IACvD,MAAM,MAAM;AAAA,IACZ,OAAO,MAAM;AAAA,EACd,EAAE;AAEF,SAAO,EAAE,QAAQ;AAClB;AAEA,eAAsB,0BAAqD;AAC1E,QAAM,cAAc,MAAM,KAAK,eAAe,aAAa;AAE3D,QAAM,UAAU,CAAC;AAEjB,MAAI,OAAO,YAAY,SAAS,UAAU;AACzC,UAAM,IAAI,qCAAiB,mDAAmD;AAAA,EAC/E;AAEA,QAAM,EAAE,MAAM,IAAK,MAAM,KAAK,QAAQ,0BAA0B,KAAK,MAAM,eAAe;AAAA,IACzF,SAAS;AAAA,MACR,QAAQ;AAAA,IACT;AAAA,IACA,QAAQ;AAAA,IACR,KAAK,GAAG,YAAY,IAAI;AAAA,IACxB,MAAM;AAAA,EACP,CAAC;AAED,aAAW,QAAQ,OAAO,KAAK,KAAK,GAAG;AAEtC,QAAI,SAAS,IAAK;AAElB,YAAQ,KAAK;AAAA,MACZ,MAAM,KAAK,QAAQ,KAAK,EAAE;AAAA,MAC1B,OAAO,KAAK,QAAQ,KAAK,EAAE;AAAA,IAC5B,CAAC;AAAA,EACF;AAEA,SAAO,EAAE,QAAQ;AAClB;AAEA,eAAsB,0BAAqD;AAC1E,QAAM,cAAc,MAAM,KAAK,eAAe,WAAW;AAEzD,QAAM,SAAS,IAAI,mCAAa;AAAA,IAC/B,KAAK,YAAY;AAAA,IACjB,QAAQ,YAAY;AAAA,EACrB,CAAC;AAED,QAAM,WAAW,MAAM,OAAO,eAAe;AAE7C,QAAM,UAAU,SAAS,YAAY,IAAI,CAAC,gBAAgB;AAAA,IACzD,MAAM,WAAW;AAAA,IACjB,OAAO,WAAW;AAAA,EACnB,EAAE;AAEF,SAAO,EAAE,QAAQ;AAClB;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../../../../../../nodes/vector_store/shared/createVectorStoreNode/methods/listSearch.ts"],"sourcesContent":["import { Pinecone } from '@pinecone-database/pinecone';\nimport { QdrantClient } from '@qdrant/js-client-rest';\nimport { MilvusClient } from '@zilliz/milvus2-sdk-node';\nimport { ApplicationError, type IDataObject, type ILoadOptionsFunctions } from 'n8n-workflow';\n\nexport async function pineconeIndexSearch(this: ILoadOptionsFunctions) {\n\tconst credentials = await this.getCredentials('pineconeApi');\n\n\tconst client = new Pinecone({\n\t\tapiKey: credentials.apiKey as string,\n\t});\n\n\tconst indexes = await client.listIndexes();\n\n\tconst results = (indexes.indexes ?? []).map((index) => ({\n\t\tname: index.name,\n\t\tvalue: index.name,\n\t}));\n\n\treturn { results };\n}\n\nexport async function supabaseTableNameSearch(this: ILoadOptionsFunctions) {\n\tconst credentials = await this.getCredentials('supabaseApi');\n\n\tconst results = [];\n\n\tif (typeof credentials.host !== 'string') {\n\t\tthrow new ApplicationError('Expected Supabase credentials host to be a string');\n\t}\n\n\tconst { paths } = (await this.helpers.requestWithAuthentication.call(this, 'supabaseApi', {\n\t\theaders: {\n\t\t\tPrefer: 'return=representation',\n\t\t},\n\t\tmethod: 'GET',\n\t\turi: `${credentials.host}/rest/v1/`,\n\t\tjson: true,\n\t})) as { paths: IDataObject };\n\n\tfor (const path of Object.keys(paths)) {\n\t\t//omit introspection path\n\t\tif (path === '/') continue;\n\n\t\tresults.push({\n\t\t\tname: path.replace('/', ''),\n\t\t\tvalue: path.replace('/', ''),\n\t\t});\n\t}\n\n\treturn { results };\n}\n\nexport async function qdrantCollectionsSearch(this: ILoadOptionsFunctions) {\n\tconst credentials = await this.getCredentials('qdrantApi');\n\n\tconst client = new QdrantClient({\n\t\turl: credentials.qdrantUrl as string,\n\t\tapiKey: credentials.apiKey as string,\n\t});\n\n\tconst response = await client.getCollections();\n\n\tconst results = response.collections.map((collection) => ({\n\t\tname: collection.name,\n\t\tvalue: collection.name,\n\t}));\n\n\treturn { results };\n}\n\nexport async function milvusCollectionsSearch(this: ILoadOptionsFunctions) {\n\tconst credentials = await this.getCredentials<{\n\t\tbaseUrl: string;\n\t\tusername: string;\n\t\tpassword: string;\n\t}>('milvusApi');\n\n\tconst client = new MilvusClient({\n\t\taddress: credentials.baseUrl,\n\t\ttoken: `${credentials.username}:${credentials.password}`,\n\t});\n\n\tconst response = await client.listCollections();\n\n\tconst results = response.data.map((collection) => ({\n\t\tname: collection.name,\n\t\tvalue: collection.name,\n\t}));\n\n\treturn { results };\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAyB;AACzB,4BAA6B;AAC7B,8BAA6B;AAC7B,0BAA+E;AAE/E,eAAsB,sBAAiD;AACtE,QAAM,cAAc,MAAM,KAAK,eAAe,aAAa;AAE3D,QAAM,SAAS,IAAI,yBAAS;AAAA,IAC3B,QAAQ,YAAY;AAAA,EACrB,CAAC;AAED,QAAM,UAAU,MAAM,OAAO,YAAY;AAEzC,QAAM,WAAW,QAAQ,WAAW,CAAC,GAAG,IAAI,CAAC,WAAW;AAAA,IACvD,MAAM,MAAM;AAAA,IACZ,OAAO,MAAM;AAAA,EACd,EAAE;AAEF,SAAO,EAAE,QAAQ;AAClB;AAEA,eAAsB,0BAAqD;AAC1E,QAAM,cAAc,MAAM,KAAK,eAAe,aAAa;AAE3D,QAAM,UAAU,CAAC;AAEjB,MAAI,OAAO,YAAY,SAAS,UAAU;AACzC,UAAM,IAAI,qCAAiB,mDAAmD;AAAA,EAC/E;AAEA,QAAM,EAAE,MAAM,IAAK,MAAM,KAAK,QAAQ,0BAA0B,KAAK,MAAM,eAAe;AAAA,IACzF,SAAS;AAAA,MACR,QAAQ;AAAA,IACT;AAAA,IACA,QAAQ;AAAA,IACR,KAAK,GAAG,YAAY,IAAI;AAAA,IACxB,MAAM;AAAA,EACP,CAAC;AAED,aAAW,QAAQ,OAAO,KAAK,KAAK,GAAG;AAEtC,QAAI,SAAS,IAAK;AAElB,YAAQ,KAAK;AAAA,MACZ,MAAM,KAAK,QAAQ,KAAK,EAAE;AAAA,MAC1B,OAAO,KAAK,QAAQ,KAAK,EAAE;AAAA,IAC5B,CAAC;AAAA,EACF;AAEA,SAAO,EAAE,QAAQ;AAClB;AAEA,eAAsB,0BAAqD;AAC1E,QAAM,cAAc,MAAM,KAAK,eAAe,WAAW;AAEzD,QAAM,SAAS,IAAI,mCAAa;AAAA,IAC/B,KAAK,YAAY;AAAA,IACjB,QAAQ,YAAY;AAAA,EACrB,CAAC;AAED,QAAM,WAAW,MAAM,OAAO,eAAe;AAE7C,QAAM,UAAU,SAAS,YAAY,IAAI,CAAC,gBAAgB;AAAA,IACzD,MAAM,WAAW;AAAA,IACjB,OAAO,WAAW;AAAA,EACnB,EAAE;AAEF,SAAO,EAAE,QAAQ;AAClB;AAEA,eAAsB,0BAAqD;AAC1E,QAAM,cAAc,MAAM,KAAK,eAI5B,WAAW;AAEd,QAAM,SAAS,IAAI,qCAAa;AAAA,IAC/B,SAAS,YAAY;AAAA,IACrB,OAAO,GAAG,YAAY,QAAQ,IAAI,YAAY,QAAQ;AAAA,EACvD,CAAC;AAED,QAAM,WAAW,MAAM,OAAO,gBAAgB;AAE9C,QAAM,UAAU,SAAS,KAAK,IAAI,CAAC,gBAAgB;AAAA,IAClD,MAAM,WAAW;AAAA,IACjB,OAAO,WAAW;AAAA,EACnB,EAAE;AAEF,SAAO,EAAE,QAAQ;AAClB;","names":[]}
|
|
@@ -18,6 +18,7 @@ var __copyProps = (to, from, except, desc) => {
|
|
|
18
18
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
19
|
var descriptions_exports = {};
|
|
20
20
|
__export(descriptions_exports, {
|
|
21
|
+
milvusCollectionRLC: () => milvusCollectionRLC,
|
|
21
22
|
pineconeIndexRLC: () => pineconeIndexRLC,
|
|
22
23
|
qdrantCollectionRLC: () => qdrantCollectionRLC,
|
|
23
24
|
supabaseTableNameRLC: () => supabaseTableNameRLC
|
|
@@ -89,8 +90,31 @@ const qdrantCollectionRLC = {
|
|
|
89
90
|
}
|
|
90
91
|
]
|
|
91
92
|
};
|
|
93
|
+
const milvusCollectionRLC = {
|
|
94
|
+
displayName: "Milvus Collection",
|
|
95
|
+
name: "milvusCollection",
|
|
96
|
+
type: "resourceLocator",
|
|
97
|
+
default: { mode: "list", value: "" },
|
|
98
|
+
required: true,
|
|
99
|
+
modes: [
|
|
100
|
+
{
|
|
101
|
+
displayName: "From List",
|
|
102
|
+
name: "list",
|
|
103
|
+
type: "list",
|
|
104
|
+
typeOptions: {
|
|
105
|
+
searchListMethod: "milvusCollectionsSearch"
|
|
106
|
+
}
|
|
107
|
+
},
|
|
108
|
+
{
|
|
109
|
+
displayName: "ID",
|
|
110
|
+
name: "id",
|
|
111
|
+
type: "string"
|
|
112
|
+
}
|
|
113
|
+
]
|
|
114
|
+
};
|
|
92
115
|
// Annotate the CommonJS export names for ESM import in node:
|
|
93
116
|
0 && (module.exports = {
|
|
117
|
+
milvusCollectionRLC,
|
|
94
118
|
pineconeIndexRLC,
|
|
95
119
|
qdrantCollectionRLC,
|
|
96
120
|
supabaseTableNameRLC
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../nodes/vector_store/shared/descriptions.ts"],"sourcesContent":["import type { INodeProperties } from 'n8n-workflow';\n\nexport const pineconeIndexRLC: INodeProperties = {\n\tdisplayName: 'Pinecone Index',\n\tname: 'pineconeIndex',\n\ttype: 'resourceLocator',\n\tdefault: { mode: 'list', value: '' },\n\trequired: true,\n\tmodes: [\n\t\t{\n\t\t\tdisplayName: 'From List',\n\t\t\tname: 'list',\n\t\t\ttype: 'list',\n\t\t\ttypeOptions: {\n\t\t\t\tsearchListMethod: 'pineconeIndexSearch',\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tdisplayName: 'ID',\n\t\t\tname: 'id',\n\t\t\ttype: 'string',\n\t\t},\n\t],\n};\n\nexport const supabaseTableNameRLC: INodeProperties = {\n\tdisplayName: 'Table Name',\n\tname: 'tableName',\n\ttype: 'resourceLocator',\n\tdefault: { mode: 'list', value: '' },\n\trequired: true,\n\tmodes: [\n\t\t{\n\t\t\tdisplayName: 'From List',\n\t\t\tname: 'list',\n\t\t\ttype: 'list',\n\t\t\ttypeOptions: {\n\t\t\t\tsearchListMethod: 'supabaseTableNameSearch',\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tdisplayName: 'ID',\n\t\t\tname: 'id',\n\t\t\ttype: 'string',\n\t\t},\n\t],\n};\n\nexport const qdrantCollectionRLC: INodeProperties = {\n\tdisplayName: 'Qdrant Collection',\n\tname: 'qdrantCollection',\n\ttype: 'resourceLocator',\n\tdefault: { mode: 'list', value: '' },\n\trequired: true,\n\tmodes: [\n\t\t{\n\t\t\tdisplayName: 'From List',\n\t\t\tname: 'list',\n\t\t\ttype: 'list',\n\t\t\ttypeOptions: {\n\t\t\t\tsearchListMethod: 'qdrantCollectionsSearch',\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tdisplayName: 'ID',\n\t\t\tname: 'id',\n\t\t\ttype: 'string',\n\t\t},\n\t],\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAEO,MAAM,mBAAoC;AAAA,EAChD,aAAa;AAAA,EACb,MAAM;AAAA,EACN,MAAM;AAAA,EACN,SAAS,EAAE,MAAM,QAAQ,OAAO,GAAG;AAAA,EACnC,UAAU;AAAA,EACV,OAAO;AAAA,IACN;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,aAAa;AAAA,QACZ,kBAAkB;AAAA,MACnB;AAAA,IACD;AAAA,IACA;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,IACP;AAAA,EACD;AACD;AAEO,MAAM,uBAAwC;AAAA,EACpD,aAAa;AAAA,EACb,MAAM;AAAA,EACN,MAAM;AAAA,EACN,SAAS,EAAE,MAAM,QAAQ,OAAO,GAAG;AAAA,EACnC,UAAU;AAAA,EACV,OAAO;AAAA,IACN;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,aAAa;AAAA,QACZ,kBAAkB;AAAA,MACnB;AAAA,IACD;AAAA,IACA;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,IACP;AAAA,EACD;AACD;AAEO,MAAM,sBAAuC;AAAA,EACnD,aAAa;AAAA,EACb,MAAM;AAAA,EACN,MAAM;AAAA,EACN,SAAS,EAAE,MAAM,QAAQ,OAAO,GAAG;AAAA,EACnC,UAAU;AAAA,EACV,OAAO;AAAA,IACN;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,aAAa;AAAA,QACZ,kBAAkB;AAAA,MACnB;AAAA,IACD;AAAA,IACA;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,IACP;AAAA,EACD;AACD;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../../../../nodes/vector_store/shared/descriptions.ts"],"sourcesContent":["import type { INodeProperties } from 'n8n-workflow';\n\nexport const pineconeIndexRLC: INodeProperties = {\n\tdisplayName: 'Pinecone Index',\n\tname: 'pineconeIndex',\n\ttype: 'resourceLocator',\n\tdefault: { mode: 'list', value: '' },\n\trequired: true,\n\tmodes: [\n\t\t{\n\t\t\tdisplayName: 'From List',\n\t\t\tname: 'list',\n\t\t\ttype: 'list',\n\t\t\ttypeOptions: {\n\t\t\t\tsearchListMethod: 'pineconeIndexSearch',\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tdisplayName: 'ID',\n\t\t\tname: 'id',\n\t\t\ttype: 'string',\n\t\t},\n\t],\n};\n\nexport const supabaseTableNameRLC: INodeProperties = {\n\tdisplayName: 'Table Name',\n\tname: 'tableName',\n\ttype: 'resourceLocator',\n\tdefault: { mode: 'list', value: '' },\n\trequired: true,\n\tmodes: [\n\t\t{\n\t\t\tdisplayName: 'From List',\n\t\t\tname: 'list',\n\t\t\ttype: 'list',\n\t\t\ttypeOptions: {\n\t\t\t\tsearchListMethod: 'supabaseTableNameSearch',\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tdisplayName: 'ID',\n\t\t\tname: 'id',\n\t\t\ttype: 'string',\n\t\t},\n\t],\n};\n\nexport const qdrantCollectionRLC: INodeProperties = {\n\tdisplayName: 'Qdrant Collection',\n\tname: 'qdrantCollection',\n\ttype: 'resourceLocator',\n\tdefault: { mode: 'list', value: '' },\n\trequired: true,\n\tmodes: [\n\t\t{\n\t\t\tdisplayName: 'From List',\n\t\t\tname: 'list',\n\t\t\ttype: 'list',\n\t\t\ttypeOptions: {\n\t\t\t\tsearchListMethod: 'qdrantCollectionsSearch',\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tdisplayName: 'ID',\n\t\t\tname: 'id',\n\t\t\ttype: 'string',\n\t\t},\n\t],\n};\n\nexport const milvusCollectionRLC: INodeProperties = {\n\tdisplayName: 'Milvus Collection',\n\tname: 'milvusCollection',\n\ttype: 'resourceLocator',\n\tdefault: { mode: 'list', value: '' },\n\trequired: true,\n\tmodes: [\n\t\t{\n\t\t\tdisplayName: 'From List',\n\t\t\tname: 'list',\n\t\t\ttype: 'list',\n\t\t\ttypeOptions: {\n\t\t\t\tsearchListMethod: 'milvusCollectionsSearch',\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tdisplayName: 'ID',\n\t\t\tname: 'id',\n\t\t\ttype: 'string',\n\t\t},\n\t],\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAEO,MAAM,mBAAoC;AAAA,EAChD,aAAa;AAAA,EACb,MAAM;AAAA,EACN,MAAM;AAAA,EACN,SAAS,EAAE,MAAM,QAAQ,OAAO,GAAG;AAAA,EACnC,UAAU;AAAA,EACV,OAAO;AAAA,IACN;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,aAAa;AAAA,QACZ,kBAAkB;AAAA,MACnB;AAAA,IACD;AAAA,IACA;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,IACP;AAAA,EACD;AACD;AAEO,MAAM,uBAAwC;AAAA,EACpD,aAAa;AAAA,EACb,MAAM;AAAA,EACN,MAAM;AAAA,EACN,SAAS,EAAE,MAAM,QAAQ,OAAO,GAAG;AAAA,EACnC,UAAU;AAAA,EACV,OAAO;AAAA,IACN;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,aAAa;AAAA,QACZ,kBAAkB;AAAA,MACnB;AAAA,IACD;AAAA,IACA;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,IACP;AAAA,EACD;AACD;AAEO,MAAM,sBAAuC;AAAA,EACnD,aAAa;AAAA,EACb,MAAM;AAAA,EACN,MAAM;AAAA,EACN,SAAS,EAAE,MAAM,QAAQ,OAAO,GAAG;AAAA,EACnC,UAAU;AAAA,EACV,OAAO;AAAA,IACN;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,aAAa;AAAA,QACZ,kBAAkB;AAAA,MACnB;AAAA,IACD;AAAA,IACA;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,IACP;AAAA,EACD;AACD;AAEO,MAAM,sBAAuC;AAAA,EACnD,aAAa;AAAA,EACb,MAAM;AAAA,EACN,MAAM;AAAA,EACN,SAAS,EAAE,MAAM,QAAQ,OAAO,GAAG;AAAA,EACnC,UAAU;AAAA,EACV,OAAO;AAAA,IACN;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,aAAa;AAAA,QACZ,kBAAkB;AAAA,MACnB;AAAA,IACD;AAAA,IACA;AAAA,MACC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,IACP;AAAA,EACD;AACD;","names":[]}
|
|
@@ -7,6 +7,7 @@
|
|
|
7
7
|
{"name":"groqApi","displayName":"Groq","documentationUrl":"groq","properties":[{"displayName":"API Key","name":"apiKey","type":"string","typeOptions":{"password":true},"required":true,"default":""}],"authenticate":{"type":"generic","properties":{"headers":{"Authorization":"=Bearer {{$credentials.apiKey}}"}}},"test":{"request":{"baseURL":"https://api.groq.com/openai/v1","url":"/models"}},"supportedNodes":["lmChatGroq"],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatGroq/groq.svg"},
|
|
8
8
|
{"name":"huggingFaceApi","displayName":"HuggingFaceApi","documentationUrl":"huggingface","properties":[{"displayName":"API Key","name":"apiKey","type":"string","typeOptions":{"password":true},"required":true,"default":""}],"authenticate":{"type":"generic","properties":{"headers":{"Authorization":"=Bearer {{$credentials.apiKey}}"}}},"test":{"request":{"baseURL":"https://api-inference.huggingface.co","url":"/models/gpt2"}},"supportedNodes":["embeddingsHuggingFaceInference","lmOpenHuggingFaceInference"],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/embeddings/EmbeddingsHuggingFaceInference/huggingface.svg"},
|
|
9
9
|
{"name":"motorheadApi","displayName":"MotorheadApi","documentationUrl":"motorhead","properties":[{"displayName":"Host","name":"host","required":true,"type":"string","default":"https://api.getmetal.io/v1"},{"displayName":"API Key","name":"apiKey","type":"string","typeOptions":{"password":true},"required":true,"default":""},{"displayName":"Client ID","name":"clientId","type":"string","default":""}],"authenticate":{"type":"generic","properties":{"headers":{"x-metal-client-id":"={{$credentials.clientId}}","x-metal-api-key":"={{$credentials.apiKey}}"}}},"test":{"request":{"baseURL":"={{$credentials.host}}/keys/current"}},"supportedNodes":["memoryMotorhead"],"icon":"fa:file-export","iconColor":"black"},
|
|
10
|
+
{"name":"milvusApi","displayName":"Milvus","documentationUrl":"milvus","properties":[{"displayName":"Base URL","name":"baseUrl","required":true,"type":"string","default":"http://localhost:19530"},{"displayName":"Username","name":"username","type":"string","default":""},{"displayName":"Password","name":"password","type":"string","typeOptions":{"password":true},"default":""}],"authenticate":{"type":"generic","properties":{"headers":{"Authorization":"=Bearer {{$credentials.username}}:{{$credentials.password}}"}}},"test":{"request":{"baseURL":"={{ $credentials.baseUrl }}","url":"/v1/vector/collections","method":"GET"}},"supportedNodes":["vectorStoreMilvus"],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vector_store/VectorStoreMilvus/milvus-icon-black.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vector_store/VectorStoreMilvus/milvus-icon-white.svg"}},
|
|
10
11
|
{"name":"mistralCloudApi","displayName":"Mistral Cloud API","documentationUrl":"mistral","properties":[{"displayName":"API Key","name":"apiKey","type":"string","typeOptions":{"password":true},"required":true,"default":""}],"authenticate":{"type":"generic","properties":{"headers":{"Authorization":"=Bearer {{$credentials.apiKey}}"}}},"test":{"request":{"baseURL":"https://api.mistral.ai/v1","url":"/models","method":"GET"}},"supportedNodes":["embeddingsMistralCloud","lmChatMistralCloud"],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/embeddings/EmbeddingsMistralCloud/mistral.svg"},
|
|
11
12
|
{"name":"ollamaApi","displayName":"Ollama","documentationUrl":"ollama","properties":[{"displayName":"Base URL","name":"baseUrl","required":true,"type":"string","default":"http://localhost:11434"}],"test":{"request":{"baseURL":"={{ $credentials.baseUrl }}","url":"/","method":"GET"}},"supportedNodes":["embeddingsOllama","lmChatOllama","lmOllama"],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/embeddings/EmbeddingsOllama/ollama.svg"},
|
|
12
13
|
{"name":"openRouterApi","displayName":"OpenRouter","documentationUrl":"openrouter","properties":[{"displayName":"API Key","name":"apiKey","type":"string","typeOptions":{"password":true},"required":true,"default":""},{"displayName":"Base URL","name":"url","type":"hidden","default":"https://openrouter.ai/api/v1"}],"authenticate":{"type":"generic","properties":{"headers":{"Authorization":"=Bearer {{$credentials.apiKey}}"}}},"test":{"request":{"baseURL":"={{ $credentials.url }}","url":"/models"}},"supportedNodes":["lmChatOpenRouter"],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatOpenRouter/openrouter.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatOpenRouter/openrouter.dark.svg"}},
|
package/dist/types/nodes.json
CHANGED
|
@@ -39,6 +39,8 @@
|
|
|
39
39
|
{"displayName":"Cohere Model","name":"lmCohere","group":["transform"],"version":1,"description":"Language Model Cohere","defaults":{"name":"Cohere Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Text Completion Models"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmcohere/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"cohereApi","required":true}],"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":250,"description":"The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).","type":"number","typeOptions":{"maxValue":32768}},{"displayName":"Model","name":"model","type":"string","description":"The name of the model to use","default":""},{"displayName":"Sampling Temperature","name":"temperature","default":0,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"}]}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LMCohere/cohere.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LMCohere/cohere.dark.svg"}},
|
|
40
40
|
{"displayName":"Ollama Model","name":"lmOllama","group":["transform"],"version":1,"description":"Language Model Ollama","defaults":{"name":"Ollama Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Text Completion Models"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmollama/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"ollamaApi","required":true}],"requestDefaults":{"ignoreHttpStatusErrors":true,"baseURL":"={{ $credentials.baseUrl.replace(new RegExp(\"/$\"), \"\") }}"},"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Model","name":"model","type":"options","default":"llama3.2","description":"The model which will generate the completion. To download models, visit <a href=\"https://ollama.ai/library\">Ollama Models Library</a>.","typeOptions":{"loadOptions":{"routing":{"request":{"method":"GET","url":"/api/tags"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"models"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.name}}","value":"={{$responseItem.name}}"}},{"type":"sort","properties":{"key":"name"}}]}}}},"routing":{"send":{"type":"body","property":"model"}},"required":true},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Sampling Temperature","name":"temperature","default":0.7,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls the randomness of the generated text. Lower values make the output more focused and deterministic, while higher values make it more diverse and random.","type":"number"},{"displayName":"Top K","name":"topK","default":-1,"typeOptions":{"maxValue":100,"minValue":-1,"numberPrecision":1},"description":"Limits the number of highest probability vocabulary tokens to consider at each step. A higher value increases diversity but may reduce coherence. Set to -1 to disable.","type":"number"},{"displayName":"Top P","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Chooses from the smallest possible set of tokens whose cumulative probability exceeds the probability top_p. Helps generate more human-like text by reducing repetitions.","type":"number"},{"displayName":"Frequency Penalty","name":"frequencyPenalty","type":"number","default":0,"typeOptions":{"minValue":0},"description":"Adjusts the penalty for tokens that have already appeared in the generated text. Higher values discourage repetition."},{"displayName":"Keep Alive","name":"keepAlive","type":"string","default":"5m","description":"Specifies the duration to keep the loaded model in memory after use. Useful for frequently used models. Format: 1h30m (1 hour 30 minutes)."},{"displayName":"Low VRAM Mode","name":"lowVram","type":"boolean","default":false,"description":"Whether to Activate low VRAM mode, which reduces memory usage at the cost of slower generation speed. Useful for GPUs with limited memory."},{"displayName":"Main GPU ID","name":"mainGpu","type":"number","default":0,"description":"Specifies the ID of the GPU to use for the main computation. Only change this if you have multiple GPUs."},{"displayName":"Context Batch Size","name":"numBatch","type":"number","default":512,"description":"Sets the batch size for prompt processing. Larger batch sizes may improve generation speed but increase memory usage."},{"displayName":"Context Length","name":"numCtx","type":"number","default":2048,"description":"The maximum number of tokens to use as context for generating the next token. Smaller values reduce memory usage, while larger values provide more context to the model."},{"displayName":"Number of GPUs","name":"numGpu","type":"number","default":-1,"description":"Specifies the number of GPUs to use for parallel processing. Set to -1 for auto-detection."},{"displayName":"Max Tokens to Generate","name":"numPredict","type":"number","default":-1,"description":"The maximum number of tokens to generate. Set to -1 for no limit. Be cautious when setting this to a large value, as it can lead to very long outputs."},{"displayName":"Number of CPU Threads","name":"numThread","type":"number","default":0,"description":"Specifies the number of CPU threads to use for processing. Set to 0 for auto-detection."},{"displayName":"Penalize Newlines","name":"penalizeNewline","type":"boolean","default":true,"description":"Whether the model will be less likely to generate newline characters, encouraging longer continuous sequences of text"},{"displayName":"Presence Penalty","name":"presencePenalty","type":"number","default":0,"description":"Adjusts the penalty for tokens based on their presence in the generated text so far. Positive values penalize tokens that have already appeared, encouraging diversity."},{"displayName":"Repetition Penalty","name":"repeatPenalty","type":"number","default":1,"description":"Adjusts the penalty factor for repeated tokens. Higher values more strongly discourage repetition. Set to 1.0 to disable repetition penalty."},{"displayName":"Use Memory Locking","name":"useMLock","type":"boolean","default":false,"description":"Whether to lock the model in memory to prevent swapping. This can improve performance but requires sufficient available memory."},{"displayName":"Use Memory Mapping","name":"useMMap","type":"boolean","default":true,"description":"Whether to use memory mapping for loading the model. This can reduce memory usage but may impact performance. Recommended to keep enabled."},{"displayName":"Load Vocabulary Only","name":"vocabOnly","type":"boolean","default":false,"description":"Whether to only load the model vocabulary without the weights. Useful for quickly testing tokenization."},{"displayName":"Output Format","name":"format","type":"options","options":[{"name":"Default","value":"default"},{"name":"JSON","value":"json"}],"default":"default","description":"Specifies the format of the API response"}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LMOllama/ollama.svg"},
|
|
41
41
|
{"displayName":"Hugging Face Inference Model","name":"lmOpenHuggingFaceInference","group":["transform"],"version":1,"description":"Language Model HuggingFaceInference","defaults":{"name":"Hugging Face Inference Model"},"codex":{"categories":["AI"],"subcategories":{"AI":["Language Models","Root Nodes"],"Language Models":["Text Completion Models"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmopenhuggingfaceinference/"}]}},"inputs":[],"outputs":["ai_languageModel"],"outputNames":["Model"],"credentials":[{"name":"huggingFaceApi","required":true}],"properties":[{"displayName":"This node must be connected to an AI chain. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Model","name":"model","type":"string","default":"gpt2"},{"displayName":"Options","name":"options","placeholder":"Add Option","description":"Additional options to add","type":"collection","default":{},"options":[{"displayName":"Custom Inference Endpoint","name":"endpointUrl","default":"","description":"Custom endpoint URL","type":"string"},{"displayName":"Frequency Penalty","name":"frequencyPenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim","type":"number"},{"displayName":"Maximum Number of Tokens","name":"maxTokens","default":128,"description":"The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).","type":"number","typeOptions":{"maxValue":32768}},{"displayName":"Presence Penalty","name":"presencePenalty","default":0,"typeOptions":{"maxValue":2,"minValue":-2,"numberPrecision":1},"description":"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics","type":"number"},{"displayName":"Sampling Temperature","name":"temperature","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.","type":"number"},{"displayName":"Top K","name":"topK","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls the top tokens to consider within the sample operation to create new text","type":"number"},{"displayName":"Top P","name":"topP","default":1,"typeOptions":{"maxValue":1,"minValue":0,"numberPrecision":1},"description":"Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.","type":"number"}]}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LMOpenHuggingFaceInference/huggingface.svg"},
|
|
42
|
+
{"displayName":"MCP Client Tool","name":"mcpClientTool","group":["output"],"version":1,"description":"Connect tools from an MCP Server","defaults":{"name":"MCP Client"},"codex":{"categories":["AI"],"subcategories":{"AI":["Model Context Protocol","Tools"]},"alias":["Model Context Protocol","MCP Client"],"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.mcpclienttool/"}]}},"inputs":[],"outputs":[{"type":"ai_tool","displayName":"Tools"}],"credentials":[{"name":"httpBearerAuth","required":true,"displayOptions":{"show":{"authentication":["bearerAuth"]}}},{"name":"httpHeaderAuth","required":true,"displayOptions":{"show":{"authentication":["headerAuth"]}}}],"properties":[{"displayName":"This node must be connected to an AI agent. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"SSE Endpoint","name":"sseEndpoint","type":"string","description":"SSE Endpoint of your MCP server","placeholder":"e.g. https://my-mcp-server.ai/sse","default":"","required":true},{"displayName":"Authentication","name":"authentication","type":"options","options":[{"name":"Bearer Auth","value":"bearerAuth"},{"name":"Header Auth","value":"headerAuth"},{"name":"None","value":"none"}],"default":"none","description":"The way to authenticate with your SSE endpoint"},{"displayName":"Credentials","name":"credentials","type":"credentials","default":"","displayOptions":{"show":{"authentication":["headerAuth","bearerAuth"]}}},{"displayName":"Tools to Include","name":"include","type":"options","description":"How to select the tools you want to be exposed to the AI Agent","default":"all","options":[{"name":"All","value":"all","description":"Also include all unchanged fields from the input"},{"name":"Selected","value":"selected","description":"Also include the tools listed in the parameter \"Tools to Include\""},{"name":"All Except","value":"except","description":"Exclude the tools listed in the parameter \"Tools to Exclude\""}]},{"displayName":"Tools to Include","name":"includeTools","type":"multiOptions","default":[],"description":"Choose from the list, or specify IDs using an <a href=\"https://docs.n8n.io/code/expressions/\">expression</a>","typeOptions":{"loadOptionsMethod":"getTools","loadOptionsDependsOn":["sseEndpoint"]},"displayOptions":{"show":{"include":["selected"]}}},{"displayName":"Tools to Exclude","name":"excludeTools","type":"multiOptions","default":[],"description":"Choose from the list, or specify IDs using an <a href=\"https://docs.n8n.io/code/expressions/\">expression</a>","typeOptions":{"loadOptionsMethod":"getTools"},"displayOptions":{"show":{"include":["except"]}}}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/mcp/mcp.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/mcp/mcp.dark.svg"}},
|
|
43
|
+
{"displayName":"MCP Server Trigger","name":"mcpTrigger","group":["trigger"],"version":1,"description":"Expose n8n tools as an MCP Server endpoint","activationMessage":"You can now connect your MCP Clients to the SSE URL.","defaults":{"name":"MCP Server Trigger"},"codex":{"categories":["AI","Core Nodes"],"subcategories":{"AI":["Root Nodes","Model Context Protocol"],"Core Nodes":["Other Trigger Nodes"]},"alias":["Model Context Protocol","MCP Server"],"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-langchain.mcptrigger/"}]}},"triggerPanel":{"header":"Listen for MCP events","executionsHelp":{"inactive":"This trigger has two modes: test and production.<br /><br /><b>Use test mode while you build your workflow</b>. Click the 'test step' button, then make an MCP request to the test URL. The executions will show up in the editor.<br /><br /><b>Use production mode to run your workflow automatically</b>. <a data-key='activate'>Activate</a> the workflow, then make requests to the production URL. These executions will show up in the <a data-key='executions'>executions list</a>, but not the editor.","active":"This trigger has two modes: test and production.<br /><br /><b>Use test mode while you build your workflow</b>. Click the 'test step' button, then make an MCP request to the test URL. The executions will show up in the editor.<br /><br /><b>Use production mode to run your workflow automatically</b>. Since your workflow is activated, you can make requests to the production URL. These executions will show up in the <a data-key='executions'>executions list</a>, but not the editor."},"activationHint":"Once you’ve finished building your workflow, run it without having to click this button by using the production URL."},"inputs":[{"type":"ai_tool","displayName":"Tools"}],"outputs":[],"credentials":[{"name":"httpBearerAuth","required":true,"displayOptions":{"show":{"authentication":["bearerAuth"]}}},{"name":"httpHeaderAuth","required":true,"displayOptions":{"show":{"authentication":["headerAuth"]}}}],"properties":[{"displayName":"Authentication","name":"authentication","type":"options","options":[{"name":"None","value":"none"},{"name":"Bearer Auth","value":"bearerAuth"},{"name":"Header Auth","value":"headerAuth"}],"default":"none","description":"The way to authenticate"},{"displayName":"Path","name":"path","type":"string","default":"","placeholder":"webhook","required":true,"description":"The base path for this MCP server"}],"webhooks":[{"name":"setup","httpMethod":"GET","responseMode":"onReceived","isFullPath":true,"path":"={{$parameter[\"path\"]}}/sse","nodeType":"mcp","ndvHideMethod":true,"ndvHideUrl":false},{"name":"default","httpMethod":"POST","responseMode":"onReceived","isFullPath":true,"path":"={{$parameter[\"path\"]}}/messages","nodeType":"mcp","ndvHideMethod":true,"ndvHideUrl":true}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/mcp/mcp.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/mcp/mcp.dark.svg"}},
|
|
42
44
|
{"displayName":"Simple Memory","name":"memoryBufferWindow","icon":"fa:database","iconColor":"black","group":["transform"],"version":[1,1.1,1.2,1.3],"description":"Stores in n8n memory, so no credentials required","defaults":{"name":"Simple Memory"},"codex":{"categories":["AI"],"subcategories":{"AI":["Memory"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.memorybufferwindow/"}]}},"inputs":[],"outputs":["ai_memory"],"outputNames":["Memory"],"properties":[{"displayName":"This node must be connected to an AI agent. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Session Key","name":"sessionKey","type":"string","default":"chat_history","description":"The key to use to store the memory in the workflow data","displayOptions":{"show":{"@version":[1]}}},{"displayName":"Session ID","name":"sessionKey","type":"string","default":"={{ $json.sessionId }}","description":"The key to use to store the memory","displayOptions":{"show":{"@version":[1.1]}}},{"displayName":"Session ID","name":"sessionIdType","type":"options","options":[{"name":"Connected Chat Trigger Node","value":"fromInput","description":"Looks for an input field called 'sessionId' that is coming from a directly connected Chat Trigger"},{"name":"Define below","value":"customKey","description":"Use an expression to reference data in previous nodes or enter static text"}],"default":"fromInput","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.2}}]}}},{"displayName":"Session Key From Previous Node","name":"sessionKey","type":"string","default":"={{ $json.sessionId }}","disabledOptions":{"show":{"sessionIdType":["fromInput"]}},"displayOptions":{"show":{"sessionIdType":["fromInput"],"@version":[{"_cnd":{"gte":1.3}}]}}},{"displayName":"Key","name":"sessionKey","type":"string","default":"","description":"The key to use to store session ID in the memory","displayOptions":{"show":{"sessionIdType":["customKey"]}}},{"displayName":"Context Window Length","name":"contextWindowLength","type":"number","default":5,"hint":"How many past interactions the model receives as context"}]},
|
|
43
45
|
{"displayName":"Motorhead","name":"memoryMotorhead","icon":"fa:file-export","iconColor":"black","group":["transform"],"version":[1,1.1,1.2,1.3],"description":"Use Motorhead Memory","defaults":{"name":"Motorhead"},"codex":{"categories":["AI"],"subcategories":{"AI":["Memory"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.memorymotorhead/"}]}},"inputs":[],"outputs":["ai_memory"],"outputNames":["Memory"],"credentials":[{"name":"motorheadApi","required":true}],"properties":[{"displayName":"This node must be connected to an AI agent. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Session ID","name":"sessionId","type":"string","required":true,"default":"","displayOptions":{"show":{"@version":[1]}}},{"displayName":"Session ID","name":"sessionId","type":"string","default":"={{ $json.sessionId }}","description":"The key to use to store the memory","displayOptions":{"show":{"@version":[1.1]}}},{"displayName":"Session ID","name":"sessionIdType","type":"options","options":[{"name":"Connected Chat Trigger Node","value":"fromInput","description":"Looks for an input field called 'sessionId' that is coming from a directly connected Chat Trigger"},{"name":"Define below","value":"customKey","description":"Use an expression to reference data in previous nodes or enter static text"}],"default":"fromInput","displayOptions":{"show":{"@version":[{"_cnd":{"gte":1.2}}]}}},{"displayName":"Session Key From Previous Node","name":"sessionKey","type":"string","default":"={{ $json.sessionId }}","disabledOptions":{"show":{"sessionIdType":["fromInput"]}},"displayOptions":{"show":{"sessionIdType":["fromInput"],"@version":[{"_cnd":{"gte":1.3}}]}}},{"displayName":"Key","name":"sessionKey","type":"string","default":"","description":"The key to use to store session ID in the memory","displayOptions":{"show":{"sessionIdType":["customKey"]}}}]},
|
|
44
46
|
{"displayName":"Postgres Chat Memory","name":"memoryPostgresChat","group":["transform"],"version":[1,1.1,1.2,1.3],"description":"Stores the chat history in Postgres table.","defaults":{"name":"Postgres Chat Memory"},"credentials":[{"name":"postgres","required":true,"testedBy":"postgresConnectionTest"}],"codex":{"categories":["AI"],"subcategories":{"AI":["Memory"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.memorypostgreschat/"}]}},"inputs":[],"outputs":["ai_memory"],"outputNames":["Memory"],"properties":[{"displayName":"This node must be connected to an AI agent. <a data-action='openSelectiveNodeCreator' data-action-parameter-creatorview='AI'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"}},{"displayName":"Session ID","name":"sessionIdType","type":"options","options":[{"name":"Connected Chat Trigger Node","value":"fromInput","description":"Looks for an input field called 'sessionId' that is coming from a directly connected Chat Trigger"},{"name":"Define below","value":"customKey","description":"Use an expression to reference data in previous nodes or enter static text"}],"default":"fromInput"},{"displayName":"Session Key From Previous Node","name":"sessionKey","type":"string","default":"={{ $json.sessionId }}","disabledOptions":{"show":{"sessionIdType":["fromInput"]}},"displayOptions":{"show":{"sessionIdType":["fromInput"],"@version":[{"_cnd":{"gte":1.2}}]}}},{"displayName":"Key","name":"sessionKey","type":"string","default":"","description":"The key to use to store session ID in the memory","displayOptions":{"show":{"sessionIdType":["customKey"]}}},{"displayName":"Table Name","name":"tableName","type":"string","default":"n8n_chat_histories","description":"The table name to store the chat history in. If table does not exist, it will be created."},{"displayName":"Context Window Length","name":"contextWindowLength","type":"number","default":5,"hint":"How many past interactions the model receives as context","displayOptions":{"hide":{"@version":[{"_cnd":{"lt":1.1}}]}}}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/memory/MemoryPostgresChat/postgres.svg"},
|
|
@@ -73,6 +75,7 @@
|
|
|
73
75
|
{"displayName":"Simple Vector Store","name":"vectorStoreInMemory","description":"Work with your data in a Simple Vector Store. Don't use this for production usage.","icon":"fa:database","iconColor":"black","group":["transform"],"version":[1,1.1],"defaults":{"name":"Simple Vector Store"},"codex":{"categories":["AI"],"subcategories":{"AI":["Vector Stores","Tools","Root Nodes"],"Tools":["Other Tools"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstoreinmemory/"}]}},"inputs":"={{\n\t\t\t((parameters) => {\n\t\t\t\tconst mode = parameters?.mode;\n\t\t\t\tconst inputs = [{ displayName: \"Embedding\", type: \"ai_embedding\", required: true, maxConnections: 1}]\n\n\t\t\t\tif (mode === 'retrieve-as-tool') {\n\t\t\t\t\treturn inputs;\n\t\t\t\t}\n\n\t\t\t\tif (['insert', 'load', 'update'].includes(mode)) {\n\t\t\t\t\tinputs.push({ displayName: \"\", type: \"main\"})\n\t\t\t\t}\n\n\t\t\t\tif (['insert'].includes(mode)) {\n\t\t\t\t\tinputs.push({ displayName: \"Document\", type: \"ai_document\", required: true, maxConnections: 1})\n\t\t\t\t}\n\t\t\t\treturn inputs\n\t\t\t})($parameter)\n\t\t}}","outputs":"={{\n\t\t\t((parameters) => {\n\t\t\t\tconst mode = parameters?.mode ?? 'retrieve';\n\n\t\t\t\tif (mode === 'retrieve-as-tool') {\n\t\t\t\t\treturn [{ displayName: \"Tool\", type: \"ai_tool\"}]\n\t\t\t\t}\n\n\t\t\t\tif (mode === 'retrieve') {\n\t\t\t\t\treturn [{ displayName: \"Vector Store\", type: \"ai_vectorStore\"}]\n\t\t\t\t}\n\t\t\t\treturn [{ displayName: \"\", type: \"main\"}]\n\t\t\t})($parameter)\n\t\t}}","properties":[{"displayName":"Operation Mode","name":"mode","type":"options","noDataExpression":true,"default":"retrieve","options":[{"name":"Get Many","value":"load","description":"Get many ranked documents from vector store for query","action":"Get ranked documents from vector store"},{"name":"Insert Documents","value":"insert","description":"Insert documents into vector store","action":"Add documents to vector store"},{"name":"Retrieve Documents (As Vector Store for Chain/Tool)","value":"retrieve","description":"Retrieve documents from vector store to be used as vector store with AI nodes","action":"Retrieve documents for Chain/Tool as Vector Store","outputConnectionType":"ai_vectorStore"},{"name":"Retrieve Documents (As Tool for AI Agent)","value":"retrieve-as-tool","description":"Retrieve documents from vector store to be used as tool with AI nodes","action":"Retrieve documents for AI Agent as Tool","outputConnectionType":"ai_tool"}]},{"displayName":"This node must be connected to a vector store retriever. <a data-action='openSelectiveNodeCreator' data-action-parameter-connectiontype='ai_retriever'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"},"displayOptions":{"show":{"mode":["retrieve"]}}},{"displayName":"Name","name":"toolName","type":"string","default":"","required":true,"description":"Name of the vector store","placeholder":"e.g. company_knowledge_base","validateType":"string-alphanumeric","displayOptions":{"show":{"mode":["retrieve-as-tool"]}}},{"displayName":"Description","name":"toolDescription","type":"string","default":"","required":true,"typeOptions":{"rows":2},"description":"Explain to the LLM what this tool does, a good, specific description would allow LLMs to produce expected results much more often","placeholder":"e.g. Work with your data in a Simple Vector Store. Don't use this for production usage.","displayOptions":{"show":{"mode":["retrieve-as-tool"]}}},{"displayName":"Memory Key","name":"memoryKey","type":"string","default":"vector_store_key","description":"The key to use to store the vector memory in the workflow data. The key will be prefixed with the workflow ID to avoid collisions."},{"displayName":"Embedding Batch Size","name":"embeddingBatchSize","type":"number","default":200,"description":"Number of documents to embed in a single batch","displayOptions":{"show":{"mode":["insert"],"@version":[{"_cnd":{"gte":1.1}}]}}},{"displayName":"<strong>For experimental use only</strong>: Data is stored in memory and will be lost if n8n restarts. Data may also be cleared if available memory gets low. <a href=\"https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstoreinmemory/\">More info</a>","name":"notice","type":"notice","default":"","displayOptions":{"show":{"mode":["insert"]}}},{"displayName":"Clear Store","name":"clearStore","type":"boolean","default":false,"description":"Whether to clear the store before inserting new data","displayOptions":{"show":{"mode":["insert"]}}},{"displayName":"Prompt","name":"prompt","type":"string","default":"","required":true,"description":"Search prompt to retrieve matching documents from the vector store using similarity-based ranking","displayOptions":{"show":{"mode":["load"]}}},{"displayName":"Limit","name":"topK","type":"number","default":4,"description":"Number of top results to fetch from vector store","displayOptions":{"show":{"mode":["load","retrieve-as-tool"]}}},{"displayName":"Include Metadata","name":"includeDocumentMetadata","type":"boolean","default":true,"description":"Whether or not to include document metadata","displayOptions":{"show":{"mode":["load","retrieve-as-tool"]}}},{"displayName":"ID","name":"id","type":"string","default":"","required":true,"description":"ID of an embedding entry","displayOptions":{"show":{"mode":["update"]}}}]},
|
|
74
76
|
{"displayName":"In Memory Vector Store Insert","name":"vectorStoreInMemoryInsert","icon":"fa:database","group":["transform"],"version":1,"hidden":true,"description":"Insert data into an in-memory vector store","defaults":{"name":"In Memory Vector Store Insert"},"codex":{"categories":["AI"],"subcategories":{"AI":["Vector Stores"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstoreinmemory/"}]}},"inputs":["main",{"displayName":"Document","maxConnections":1,"type":"ai_document","required":true},{"displayName":"Embedding","maxConnections":1,"type":"ai_embedding","required":true}],"outputs":["main"],"properties":[{"displayName":"The embbded data are stored in the server memory, so they will be lost when the server is restarted. Additionally, if the amount of data is too large, it may cause the server to crash due to insufficient memory.","name":"notice","type":"notice","default":""},{"displayName":"Clear Store","name":"clearStore","type":"boolean","default":false,"description":"Whether to clear the store before inserting new data"},{"displayName":"Memory Key","name":"memoryKey","type":"string","default":"vector_store_key","description":"The key to use to store the vector memory in the workflow data. The key will be prefixed with the workflow ID to avoid collisions."}]},
|
|
75
77
|
{"displayName":"In Memory Vector Store Load","name":"vectorStoreInMemoryLoad","icon":"fa:database","group":["transform"],"version":1,"hidden":true,"description":"Load embedded data from an in-memory vector store","defaults":{"name":"In Memory Vector Store Load"},"codex":{"categories":["AI"],"subcategories":{"AI":["Vector Stores"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstoreinmemory/"}]}},"inputs":[{"displayName":"Embedding","maxConnections":1,"type":"ai_embedding","required":true}],"outputs":["ai_vectorStore"],"outputNames":["Vector Store"],"properties":[{"displayName":"Memory Key","name":"memoryKey","type":"string","default":"vector_store_key","description":"The key to use to store the vector memory in the workflow data. The key will be prefixed with the workflow ID to avoid collisions."}]},
|
|
78
|
+
{"displayName":"Milvus Vector Store","name":"vectorStoreMilvus","description":"Work with your data in Milvus Vector Store","group":["transform"],"version":[1,1.1],"defaults":{"name":"Milvus Vector Store"},"codex":{"categories":["AI"],"subcategories":{"AI":["Vector Stores","Tools","Root Nodes"],"Tools":["Other Tools"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstoremilvus/"}]}},"credentials":[{"name":"milvusApi","required":true}],"inputs":"={{\n\t\t\t((parameters) => {\n\t\t\t\tconst mode = parameters?.mode;\n\t\t\t\tconst inputs = [{ displayName: \"Embedding\", type: \"ai_embedding\", required: true, maxConnections: 1}]\n\n\t\t\t\tif (mode === 'retrieve-as-tool') {\n\t\t\t\t\treturn inputs;\n\t\t\t\t}\n\n\t\t\t\tif (['insert', 'load', 'update'].includes(mode)) {\n\t\t\t\t\tinputs.push({ displayName: \"\", type: \"main\"})\n\t\t\t\t}\n\n\t\t\t\tif (['insert'].includes(mode)) {\n\t\t\t\t\tinputs.push({ displayName: \"Document\", type: \"ai_document\", required: true, maxConnections: 1})\n\t\t\t\t}\n\t\t\t\treturn inputs\n\t\t\t})($parameter)\n\t\t}}","outputs":"={{\n\t\t\t((parameters) => {\n\t\t\t\tconst mode = parameters?.mode ?? 'retrieve';\n\n\t\t\t\tif (mode === 'retrieve-as-tool') {\n\t\t\t\t\treturn [{ displayName: \"Tool\", type: \"ai_tool\"}]\n\t\t\t\t}\n\n\t\t\t\tif (mode === 'retrieve') {\n\t\t\t\t\treturn [{ displayName: \"Vector Store\", type: \"ai_vectorStore\"}]\n\t\t\t\t}\n\t\t\t\treturn [{ displayName: \"\", type: \"main\"}]\n\t\t\t})($parameter)\n\t\t}}","properties":[{"displayName":"Operation Mode","name":"mode","type":"options","noDataExpression":true,"default":"retrieve","options":[{"name":"Get Many","value":"load","description":"Get many ranked documents from vector store for query","action":"Get ranked documents from vector store"},{"name":"Insert Documents","value":"insert","description":"Insert documents into vector store","action":"Add documents to vector store"},{"name":"Retrieve Documents (As Vector Store for Chain/Tool)","value":"retrieve","description":"Retrieve documents from vector store to be used as vector store with AI nodes","action":"Retrieve documents for Chain/Tool as Vector Store","outputConnectionType":"ai_vectorStore"},{"name":"Retrieve Documents (As Tool for AI Agent)","value":"retrieve-as-tool","description":"Retrieve documents from vector store to be used as tool with AI nodes","action":"Retrieve documents for AI Agent as Tool","outputConnectionType":"ai_tool"}]},{"displayName":"This node must be connected to a vector store retriever. <a data-action='openSelectiveNodeCreator' data-action-parameter-connectiontype='ai_retriever'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"},"displayOptions":{"show":{"mode":["retrieve"]}}},{"displayName":"Name","name":"toolName","type":"string","default":"","required":true,"description":"Name of the vector store","placeholder":"e.g. company_knowledge_base","validateType":"string-alphanumeric","displayOptions":{"show":{"mode":["retrieve-as-tool"]}}},{"displayName":"Description","name":"toolDescription","type":"string","default":"","required":true,"typeOptions":{"rows":2},"description":"Explain to the LLM what this tool does, a good, specific description would allow LLMs to produce expected results much more often","placeholder":"e.g. Work with your data in Milvus Vector Store","displayOptions":{"show":{"mode":["retrieve-as-tool"]}}},{"displayName":"Milvus Collection","name":"milvusCollection","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"milvusCollectionsSearch"}},{"displayName":"ID","name":"id","type":"string"}]},{"displayName":"Embedding Batch Size","name":"embeddingBatchSize","type":"number","default":200,"description":"Number of documents to embed in a single batch","displayOptions":{"show":{"mode":["insert"],"@version":[{"_cnd":{"gte":1.1}}]}}},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Clear Collection","name":"clearCollection","type":"boolean","default":false,"description":"Whether to clear the collection before inserting new data"}],"displayOptions":{"show":{"mode":["insert"]}}},{"displayName":"Prompt","name":"prompt","type":"string","default":"","required":true,"description":"Search prompt to retrieve matching documents from the vector store using similarity-based ranking","displayOptions":{"show":{"mode":["load"]}}},{"displayName":"Limit","name":"topK","type":"number","default":4,"description":"Number of top results to fetch from vector store","displayOptions":{"show":{"mode":["load","retrieve-as-tool"]}}},{"displayName":"Include Metadata","name":"includeDocumentMetadata","type":"boolean","default":true,"description":"Whether or not to include document metadata","displayOptions":{"show":{"mode":["load","retrieve-as-tool"]}}},{"displayName":"ID","name":"id","type":"string","default":"","required":true,"description":"ID of an embedding entry","displayOptions":{"show":{"mode":["update"]}}}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vector_store/VectorStoreMilvus/milvus-icon-black.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vector_store/VectorStoreMilvus/milvus-icon-white.svg"}},
|
|
76
79
|
{"displayName":"MongoDB Atlas Vector Store","name":"vectorStoreMongoDBAtlas","description":"Work with your data in MongoDB Atlas Vector Store","group":["transform"],"version":[1,1.1],"defaults":{"name":"MongoDB Atlas Vector Store"},"codex":{"categories":["AI"],"subcategories":{"AI":["Vector Stores","Tools","Root Nodes"],"Tools":["Other Tools"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstoremongodbatlas/"}]}},"credentials":[{"name":"mongoDb","required":true}],"inputs":"={{\n\t\t\t((parameters) => {\n\t\t\t\tconst mode = parameters?.mode;\n\t\t\t\tconst inputs = [{ displayName: \"Embedding\", type: \"ai_embedding\", required: true, maxConnections: 1}]\n\n\t\t\t\tif (mode === 'retrieve-as-tool') {\n\t\t\t\t\treturn inputs;\n\t\t\t\t}\n\n\t\t\t\tif (['insert', 'load', 'update'].includes(mode)) {\n\t\t\t\t\tinputs.push({ displayName: \"\", type: \"main\"})\n\t\t\t\t}\n\n\t\t\t\tif (['insert'].includes(mode)) {\n\t\t\t\t\tinputs.push({ displayName: \"Document\", type: \"ai_document\", required: true, maxConnections: 1})\n\t\t\t\t}\n\t\t\t\treturn inputs\n\t\t\t})($parameter)\n\t\t}}","outputs":"={{\n\t\t\t((parameters) => {\n\t\t\t\tconst mode = parameters?.mode ?? 'retrieve';\n\n\t\t\t\tif (mode === 'retrieve-as-tool') {\n\t\t\t\t\treturn [{ displayName: \"Tool\", type: \"ai_tool\"}]\n\t\t\t\t}\n\n\t\t\t\tif (mode === 'retrieve') {\n\t\t\t\t\treturn [{ displayName: \"Vector Store\", type: \"ai_vectorStore\"}]\n\t\t\t\t}\n\t\t\t\treturn [{ displayName: \"\", type: \"main\"}]\n\t\t\t})($parameter)\n\t\t}}","properties":[{"displayName":"Operation Mode","name":"mode","type":"options","noDataExpression":true,"default":"retrieve","options":[{"name":"Get Many","value":"load","description":"Get many ranked documents from vector store for query","action":"Get ranked documents from vector store"},{"name":"Insert Documents","value":"insert","description":"Insert documents into vector store","action":"Add documents to vector store"},{"name":"Retrieve Documents (As Vector Store for Chain/Tool)","value":"retrieve","description":"Retrieve documents from vector store to be used as vector store with AI nodes","action":"Retrieve documents for Chain/Tool as Vector Store","outputConnectionType":"ai_vectorStore"},{"name":"Retrieve Documents (As Tool for AI Agent)","value":"retrieve-as-tool","description":"Retrieve documents from vector store to be used as tool with AI nodes","action":"Retrieve documents for AI Agent as Tool","outputConnectionType":"ai_tool"},{"name":"Update Documents","value":"update","description":"Update documents in vector store by ID","action":"Update vector store documents"}]},{"displayName":"This node must be connected to a vector store retriever. <a data-action='openSelectiveNodeCreator' data-action-parameter-connectiontype='ai_retriever'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"},"displayOptions":{"show":{"mode":["retrieve"]}}},{"displayName":"Name","name":"toolName","type":"string","default":"","required":true,"description":"Name of the vector store","placeholder":"e.g. company_knowledge_base","validateType":"string-alphanumeric","displayOptions":{"show":{"mode":["retrieve-as-tool"]}}},{"displayName":"Description","name":"toolDescription","type":"string","default":"","required":true,"typeOptions":{"rows":2},"description":"Explain to the LLM what this tool does, a good, specific description would allow LLMs to produce expected results much more often","placeholder":"e.g. Work with your data in MongoDB Atlas Vector Store","displayOptions":{"show":{"mode":["retrieve-as-tool"]}}},{"displayName":"MongoDB Collection","name":"mongoCollection","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"mongoCollectionSearch"}},{"displayName":"Name","name":"name","type":"string","placeholder":"e.g. my_collection"}]},{"displayName":"Embedding","name":"embedding","type":"string","default":"embedding","description":"The field with the embedding array","required":true},{"displayName":"Metadata Field","name":"metadata_field","type":"string","default":"text","description":"The text field of the raw data","required":true},{"displayName":"Vector Index Name","name":"vectorIndexName","type":"string","default":"","description":"The name of the vector index","required":true},{"displayName":"Embedding Batch Size","name":"embeddingBatchSize","type":"number","default":200,"description":"Number of documents to embed in a single batch","displayOptions":{"show":{"mode":["insert"],"@version":[{"_cnd":{"gte":1.1}}]}}},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Clear Namespace","name":"clearNamespace","type":"boolean","default":false,"description":"Whether to clear documents in the namespace before inserting new data"},{"displayName":"Namespace","name":"namespace","type":"string","description":"Logical partition for documents. Uses metadata.namespace field for filtering.","default":""}],"displayOptions":{"show":{"mode":["insert"]}}},{"displayName":"Prompt","name":"prompt","type":"string","default":"","required":true,"description":"Search prompt to retrieve matching documents from the vector store using similarity-based ranking","displayOptions":{"show":{"mode":["load"]}}},{"displayName":"Limit","name":"topK","type":"number","default":4,"description":"Number of top results to fetch from vector store","displayOptions":{"show":{"mode":["load","retrieve-as-tool"]}}},{"displayName":"Include Metadata","name":"includeDocumentMetadata","type":"boolean","default":true,"description":"Whether or not to include document metadata","displayOptions":{"show":{"mode":["load","retrieve-as-tool"]}}},{"displayName":"ID","name":"id","type":"string","default":"","required":true,"description":"ID of an embedding entry","displayOptions":{"show":{"mode":["update"]}}},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Namespace","name":"namespace","type":"string","description":"Logical partition for documents. Uses metadata.namespace field for filtering.","default":""},{"displayName":"Metadata Filter","name":"metadata","type":"fixedCollection","description":"Metadata to filter the document by","typeOptions":{"multipleValues":true},"default":{},"placeholder":"Add filter field","options":[{"name":"metadataValues","displayName":"Fields to Set","values":[{"displayName":"Name","name":"name","type":"string","default":"","required":true},{"displayName":"Value","name":"value","type":"string","default":""}]}]}],"displayOptions":{"show":{"mode":["load","retrieve-as-tool"]}}},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Namespace","name":"namespace","type":"string","description":"Logical partition for documents. Uses metadata.namespace field for filtering.","default":""},{"displayName":"Metadata Filter","name":"metadata","type":"fixedCollection","description":"Metadata to filter the document by","typeOptions":{"multipleValues":true},"default":{},"placeholder":"Add filter field","options":[{"name":"metadataValues","displayName":"Fields to Set","values":[{"displayName":"Name","name":"name","type":"string","default":"","required":true},{"displayName":"Value","name":"value","type":"string","default":""}]}]}],"displayOptions":{"show":{"mode":["retrieve"]}}}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vector_store/VectorStoreMongoDBAtlas/mongodb.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vector_store/VectorStoreMongoDBAtlas/mongodb.dark.svg"}},
|
|
77
80
|
{"displayName":"Postgres PGVector Store","name":"vectorStorePGVector","description":"Work with your data in Postgresql with the PGVector extension","group":["transform"],"version":[1,1.1],"defaults":{"name":"Postgres PGVector Store"},"codex":{"categories":["AI"],"subcategories":{"AI":["Vector Stores","Tools","Root Nodes"],"Tools":["Other Tools"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstorepgvector/"}]}},"credentials":[{"name":"postgres","required":true,"testedBy":"postgresConnectionTest"}],"inputs":"={{\n\t\t\t((parameters) => {\n\t\t\t\tconst mode = parameters?.mode;\n\t\t\t\tconst inputs = [{ displayName: \"Embedding\", type: \"ai_embedding\", required: true, maxConnections: 1}]\n\n\t\t\t\tif (mode === 'retrieve-as-tool') {\n\t\t\t\t\treturn inputs;\n\t\t\t\t}\n\n\t\t\t\tif (['insert', 'load', 'update'].includes(mode)) {\n\t\t\t\t\tinputs.push({ displayName: \"\", type: \"main\"})\n\t\t\t\t}\n\n\t\t\t\tif (['insert'].includes(mode)) {\n\t\t\t\t\tinputs.push({ displayName: \"Document\", type: \"ai_document\", required: true, maxConnections: 1})\n\t\t\t\t}\n\t\t\t\treturn inputs\n\t\t\t})($parameter)\n\t\t}}","outputs":"={{\n\t\t\t((parameters) => {\n\t\t\t\tconst mode = parameters?.mode ?? 'retrieve';\n\n\t\t\t\tif (mode === 'retrieve-as-tool') {\n\t\t\t\t\treturn [{ displayName: \"Tool\", type: \"ai_tool\"}]\n\t\t\t\t}\n\n\t\t\t\tif (mode === 'retrieve') {\n\t\t\t\t\treturn [{ displayName: \"Vector Store\", type: \"ai_vectorStore\"}]\n\t\t\t\t}\n\t\t\t\treturn [{ displayName: \"\", type: \"main\"}]\n\t\t\t})($parameter)\n\t\t}}","properties":[{"displayName":"Operation Mode","name":"mode","type":"options","noDataExpression":true,"default":"retrieve","options":[{"name":"Get Many","value":"load","description":"Get many ranked documents from vector store for query","action":"Get ranked documents from vector store"},{"name":"Insert Documents","value":"insert","description":"Insert documents into vector store","action":"Add documents to vector store"},{"name":"Retrieve Documents (As Vector Store for Chain/Tool)","value":"retrieve","description":"Retrieve documents from vector store to be used as vector store with AI nodes","action":"Retrieve documents for Chain/Tool as Vector Store","outputConnectionType":"ai_vectorStore"},{"name":"Retrieve Documents (As Tool for AI Agent)","value":"retrieve-as-tool","description":"Retrieve documents from vector store to be used as tool with AI nodes","action":"Retrieve documents for AI Agent as Tool","outputConnectionType":"ai_tool"}]},{"displayName":"This node must be connected to a vector store retriever. <a data-action='openSelectiveNodeCreator' data-action-parameter-connectiontype='ai_retriever'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"},"displayOptions":{"show":{"mode":["retrieve"]}}},{"displayName":"Name","name":"toolName","type":"string","default":"","required":true,"description":"Name of the vector store","placeholder":"e.g. company_knowledge_base","validateType":"string-alphanumeric","displayOptions":{"show":{"mode":["retrieve-as-tool"]}}},{"displayName":"Description","name":"toolDescription","type":"string","default":"","required":true,"typeOptions":{"rows":2},"description":"Explain to the LLM what this tool does, a good, specific description would allow LLMs to produce expected results much more often","placeholder":"e.g. Work with your data in Postgresql with the PGVector extension","displayOptions":{"show":{"mode":["retrieve-as-tool"]}}},{"displayName":"Table Name","name":"tableName","type":"string","default":"n8n_vectors","description":"The table name to store the vectors in. If table does not exist, it will be created."},{"displayName":"Embedding Batch Size","name":"embeddingBatchSize","type":"number","default":200,"description":"Number of documents to embed in a single batch","displayOptions":{"show":{"mode":["insert"],"@version":[{"_cnd":{"gte":1.1}}]}}},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Collection","name":"collection","type":"fixedCollection","description":"Collection of vectors","default":{"values":{"useCollection":false,"collectionName":"n8n","collectionTable":"n8n_vector_collections"}},"typeOptions":{},"placeholder":"Add Collection Settings","options":[{"name":"values","displayName":"Collection Settings","values":[{"displayName":"Use Collection","name":"useCollection","type":"boolean","default":false},{"displayName":"Collection Name","name":"collectionName","type":"string","default":"n8n","required":true,"displayOptions":{"show":{"useCollection":[true]}}},{"displayName":"Collection Table Name","name":"collectionTableName","type":"string","default":"n8n_vector_collections","required":true,"displayOptions":{"show":{"useCollection":[true]}}}]}]},{"displayName":"Column Names","name":"columnNames","type":"fixedCollection","description":"The names of the columns in the PGVector table","default":{"values":{"idColumnName":"id","vectorColumnName":"embedding","contentColumnName":"text","metadataColumnName":"metadata"}},"typeOptions":{},"placeholder":"Set Column Names","options":[{"name":"values","displayName":"Column Name Settings","values":[{"displayName":"ID Column Name","name":"idColumnName","type":"string","default":"id","required":true},{"displayName":"Vector Column Name","name":"vectorColumnName","type":"string","default":"embedding","required":true},{"displayName":"Content Column Name","name":"contentColumnName","type":"string","default":"text","required":true},{"displayName":"Metadata Column Name","name":"metadataColumnName","type":"string","default":"metadata","required":true}]}]}],"displayOptions":{"show":{"mode":["insert"]}}},{"displayName":"Prompt","name":"prompt","type":"string","default":"","required":true,"description":"Search prompt to retrieve matching documents from the vector store using similarity-based ranking","displayOptions":{"show":{"mode":["load"]}}},{"displayName":"Limit","name":"topK","type":"number","default":4,"description":"Number of top results to fetch from vector store","displayOptions":{"show":{"mode":["load","retrieve-as-tool"]}}},{"displayName":"Include Metadata","name":"includeDocumentMetadata","type":"boolean","default":true,"description":"Whether or not to include document metadata","displayOptions":{"show":{"mode":["load","retrieve-as-tool"]}}},{"displayName":"ID","name":"id","type":"string","default":"","required":true,"description":"ID of an embedding entry","displayOptions":{"show":{"mode":["update"]}}},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Distance Strategy","name":"distanceStrategy","type":"options","default":"cosine","description":"The method to calculate the distance between two vectors","options":[{"name":"Cosine","value":"cosine"},{"name":"Inner Product","value":"innerProduct"},{"name":"Euclidean","value":"euclidean"}]},{"displayName":"Collection","name":"collection","type":"fixedCollection","description":"Collection of vectors","default":{"values":{"useCollection":false,"collectionName":"n8n","collectionTable":"n8n_vector_collections"}},"typeOptions":{},"placeholder":"Add Collection Settings","options":[{"name":"values","displayName":"Collection Settings","values":[{"displayName":"Use Collection","name":"useCollection","type":"boolean","default":false},{"displayName":"Collection Name","name":"collectionName","type":"string","default":"n8n","required":true,"displayOptions":{"show":{"useCollection":[true]}}},{"displayName":"Collection Table Name","name":"collectionTableName","type":"string","default":"n8n_vector_collections","required":true,"displayOptions":{"show":{"useCollection":[true]}}}]}]},{"displayName":"Column Names","name":"columnNames","type":"fixedCollection","description":"The names of the columns in the PGVector table","default":{"values":{"idColumnName":"id","vectorColumnName":"embedding","contentColumnName":"text","metadataColumnName":"metadata"}},"typeOptions":{},"placeholder":"Set Column Names","options":[{"name":"values","displayName":"Column Name Settings","values":[{"displayName":"ID Column Name","name":"idColumnName","type":"string","default":"id","required":true},{"displayName":"Vector Column Name","name":"vectorColumnName","type":"string","default":"embedding","required":true},{"displayName":"Content Column Name","name":"contentColumnName","type":"string","default":"text","required":true},{"displayName":"Metadata Column Name","name":"metadataColumnName","type":"string","default":"metadata","required":true}]}]},{"displayName":"Metadata Filter","name":"metadata","type":"fixedCollection","description":"Metadata to filter the document by","typeOptions":{"multipleValues":true},"default":{},"placeholder":"Add filter field","options":[{"name":"metadataValues","displayName":"Fields to Set","values":[{"displayName":"Name","name":"name","type":"string","default":"","required":true},{"displayName":"Value","name":"value","type":"string","default":""}]}]}],"displayOptions":{"show":{"mode":["load","retrieve-as-tool"]}}},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Distance Strategy","name":"distanceStrategy","type":"options","default":"cosine","description":"The method to calculate the distance between two vectors","options":[{"name":"Cosine","value":"cosine"},{"name":"Inner Product","value":"innerProduct"},{"name":"Euclidean","value":"euclidean"}]},{"displayName":"Collection","name":"collection","type":"fixedCollection","description":"Collection of vectors","default":{"values":{"useCollection":false,"collectionName":"n8n","collectionTable":"n8n_vector_collections"}},"typeOptions":{},"placeholder":"Add Collection Settings","options":[{"name":"values","displayName":"Collection Settings","values":[{"displayName":"Use Collection","name":"useCollection","type":"boolean","default":false},{"displayName":"Collection Name","name":"collectionName","type":"string","default":"n8n","required":true,"displayOptions":{"show":{"useCollection":[true]}}},{"displayName":"Collection Table Name","name":"collectionTableName","type":"string","default":"n8n_vector_collections","required":true,"displayOptions":{"show":{"useCollection":[true]}}}]}]},{"displayName":"Column Names","name":"columnNames","type":"fixedCollection","description":"The names of the columns in the PGVector table","default":{"values":{"idColumnName":"id","vectorColumnName":"embedding","contentColumnName":"text","metadataColumnName":"metadata"}},"typeOptions":{},"placeholder":"Set Column Names","options":[{"name":"values","displayName":"Column Name Settings","values":[{"displayName":"ID Column Name","name":"idColumnName","type":"string","default":"id","required":true},{"displayName":"Vector Column Name","name":"vectorColumnName","type":"string","default":"embedding","required":true},{"displayName":"Content Column Name","name":"contentColumnName","type":"string","default":"text","required":true},{"displayName":"Metadata Column Name","name":"metadataColumnName","type":"string","default":"metadata","required":true}]}]},{"displayName":"Metadata Filter","name":"metadata","type":"fixedCollection","description":"Metadata to filter the document by","typeOptions":{"multipleValues":true},"default":{},"placeholder":"Add filter field","options":[{"name":"metadataValues","displayName":"Fields to Set","values":[{"displayName":"Name","name":"name","type":"string","default":"","required":true},{"displayName":"Value","name":"value","type":"string","default":""}]}]}],"displayOptions":{"show":{"mode":["retrieve"]}}}],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vector_store/VectorStorePGVector/postgres.svg"},
|
|
78
81
|
{"displayName":"Pinecone Vector Store","name":"vectorStorePinecone","description":"Work with your data in Pinecone Vector Store","group":["transform"],"version":[1,1.1],"defaults":{"name":"Pinecone Vector Store"},"codex":{"categories":["AI"],"subcategories":{"AI":["Vector Stores","Tools","Root Nodes"],"Tools":["Other Tools"]},"resources":{"primaryDocumentation":[{"url":"https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstorepinecone/"}]}},"credentials":[{"name":"pineconeApi","required":true}],"inputs":"={{\n\t\t\t((parameters) => {\n\t\t\t\tconst mode = parameters?.mode;\n\t\t\t\tconst inputs = [{ displayName: \"Embedding\", type: \"ai_embedding\", required: true, maxConnections: 1}]\n\n\t\t\t\tif (mode === 'retrieve-as-tool') {\n\t\t\t\t\treturn inputs;\n\t\t\t\t}\n\n\t\t\t\tif (['insert', 'load', 'update'].includes(mode)) {\n\t\t\t\t\tinputs.push({ displayName: \"\", type: \"main\"})\n\t\t\t\t}\n\n\t\t\t\tif (['insert'].includes(mode)) {\n\t\t\t\t\tinputs.push({ displayName: \"Document\", type: \"ai_document\", required: true, maxConnections: 1})\n\t\t\t\t}\n\t\t\t\treturn inputs\n\t\t\t})($parameter)\n\t\t}}","outputs":"={{\n\t\t\t((parameters) => {\n\t\t\t\tconst mode = parameters?.mode ?? 'retrieve';\n\n\t\t\t\tif (mode === 'retrieve-as-tool') {\n\t\t\t\t\treturn [{ displayName: \"Tool\", type: \"ai_tool\"}]\n\t\t\t\t}\n\n\t\t\t\tif (mode === 'retrieve') {\n\t\t\t\t\treturn [{ displayName: \"Vector Store\", type: \"ai_vectorStore\"}]\n\t\t\t\t}\n\t\t\t\treturn [{ displayName: \"\", type: \"main\"}]\n\t\t\t})($parameter)\n\t\t}}","properties":[{"displayName":"Operation Mode","name":"mode","type":"options","noDataExpression":true,"default":"retrieve","options":[{"name":"Get Many","value":"load","description":"Get many ranked documents from vector store for query","action":"Get ranked documents from vector store"},{"name":"Insert Documents","value":"insert","description":"Insert documents into vector store","action":"Add documents to vector store"},{"name":"Retrieve Documents (As Vector Store for Chain/Tool)","value":"retrieve","description":"Retrieve documents from vector store to be used as vector store with AI nodes","action":"Retrieve documents for Chain/Tool as Vector Store","outputConnectionType":"ai_vectorStore"},{"name":"Retrieve Documents (As Tool for AI Agent)","value":"retrieve-as-tool","description":"Retrieve documents from vector store to be used as tool with AI nodes","action":"Retrieve documents for AI Agent as Tool","outputConnectionType":"ai_tool"},{"name":"Update Documents","value":"update","description":"Update documents in vector store by ID","action":"Update vector store documents"}]},{"displayName":"This node must be connected to a vector store retriever. <a data-action='openSelectiveNodeCreator' data-action-parameter-connectiontype='ai_retriever'>Insert one</a>","name":"notice","type":"notice","default":"","typeOptions":{"containerClass":"ndv-connection-hint-notice"},"displayOptions":{"show":{"mode":["retrieve"]}}},{"displayName":"Name","name":"toolName","type":"string","default":"","required":true,"description":"Name of the vector store","placeholder":"e.g. company_knowledge_base","validateType":"string-alphanumeric","displayOptions":{"show":{"mode":["retrieve-as-tool"]}}},{"displayName":"Description","name":"toolDescription","type":"string","default":"","required":true,"typeOptions":{"rows":2},"description":"Explain to the LLM what this tool does, a good, specific description would allow LLMs to produce expected results much more often","placeholder":"e.g. Work with your data in Pinecone Vector Store","displayOptions":{"show":{"mode":["retrieve-as-tool"]}}},{"displayName":"Pinecone Index","name":"pineconeIndex","type":"resourceLocator","default":{"mode":"list","value":""},"required":true,"modes":[{"displayName":"From List","name":"list","type":"list","typeOptions":{"searchListMethod":"pineconeIndexSearch"}},{"displayName":"ID","name":"id","type":"string"}]},{"displayName":"Embedding Batch Size","name":"embeddingBatchSize","type":"number","default":200,"description":"Number of documents to embed in a single batch","displayOptions":{"show":{"mode":["insert"],"@version":[{"_cnd":{"gte":1.1}}]}}},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Clear Namespace","name":"clearNamespace","type":"boolean","default":false,"description":"Whether to clear the namespace before inserting new data"},{"displayName":"Pinecone Namespace","name":"pineconeNamespace","type":"string","description":"Partition the records in an index into namespaces. Queries and other operations are then limited to one namespace, so different requests can search different subsets of your index.","default":""}],"displayOptions":{"show":{"mode":["insert"]}}},{"displayName":"Prompt","name":"prompt","type":"string","default":"","required":true,"description":"Search prompt to retrieve matching documents from the vector store using similarity-based ranking","displayOptions":{"show":{"mode":["load"]}}},{"displayName":"Limit","name":"topK","type":"number","default":4,"description":"Number of top results to fetch from vector store","displayOptions":{"show":{"mode":["load","retrieve-as-tool"]}}},{"displayName":"Include Metadata","name":"includeDocumentMetadata","type":"boolean","default":true,"description":"Whether or not to include document metadata","displayOptions":{"show":{"mode":["load","retrieve-as-tool"]}}},{"displayName":"ID","name":"id","type":"string","default":"","required":true,"description":"ID of an embedding entry","displayOptions":{"show":{"mode":["update"]}}},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Pinecone Namespace","name":"pineconeNamespace","type":"string","description":"Partition the records in an index into namespaces. Queries and other operations are then limited to one namespace, so different requests can search different subsets of your index.","default":""},{"displayName":"Metadata Filter","name":"metadata","type":"fixedCollection","description":"Metadata to filter the document by","typeOptions":{"multipleValues":true},"default":{},"placeholder":"Add filter field","options":[{"name":"metadataValues","displayName":"Fields to Set","values":[{"displayName":"Name","name":"name","type":"string","default":"","required":true},{"displayName":"Value","name":"value","type":"string","default":""}]}]}],"displayOptions":{"show":{"mode":["load","retrieve-as-tool"]}}},{"displayName":"Options","name":"options","type":"collection","placeholder":"Add Option","default":{},"options":[{"displayName":"Pinecone Namespace","name":"pineconeNamespace","type":"string","description":"Partition the records in an index into namespaces. Queries and other operations are then limited to one namespace, so different requests can search different subsets of your index.","default":""},{"displayName":"Metadata Filter","name":"metadata","type":"fixedCollection","description":"Metadata to filter the document by","typeOptions":{"multipleValues":true},"default":{},"placeholder":"Add filter field","options":[{"name":"metadataValues","displayName":"Fields to Set","values":[{"displayName":"Name","name":"name","type":"string","default":"","required":true},{"displayName":"Value","name":"value","type":"string","default":""}]}]}],"displayOptions":{"show":{"mode":["retrieve"]}}}],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vector_store/VectorStorePinecone/pinecone.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vector_store/VectorStorePinecone/pinecone.dark.svg"}},
|
package/dist/utils/helpers.js
CHANGED
|
@@ -32,6 +32,7 @@ __export(helpers_exports, {
|
|
|
32
32
|
unwrapNestedOutput: () => unwrapNestedOutput
|
|
33
33
|
});
|
|
34
34
|
module.exports = __toCommonJS(helpers_exports);
|
|
35
|
+
var import_agents = require("langchain/agents");
|
|
35
36
|
var import_n8n_workflow = require("n8n-workflow");
|
|
36
37
|
var import_N8nTool = require("./N8nTool");
|
|
37
38
|
function hasMethods(obj, ...methodNames) {
|
|
@@ -136,7 +137,12 @@ function escapeSingleCurlyBrackets(text) {
|
|
|
136
137
|
return result;
|
|
137
138
|
}
|
|
138
139
|
const getConnectedTools = async (ctx, enforceUniqueNames, convertStructuredTool = true, escapeCurlyBrackets = false) => {
|
|
139
|
-
const connectedTools = await ctx.getInputConnectionData(import_n8n_workflow.NodeConnectionTypes.AiTool, 0)
|
|
140
|
+
const connectedTools = (await ctx.getInputConnectionData(import_n8n_workflow.NodeConnectionTypes.AiTool, 0) ?? []).flatMap((toolOrToolkit) => {
|
|
141
|
+
if (toolOrToolkit instanceof import_agents.Toolkit) {
|
|
142
|
+
return toolOrToolkit.getTools();
|
|
143
|
+
}
|
|
144
|
+
return toolOrToolkit;
|
|
145
|
+
});
|
|
140
146
|
if (!enforceUniqueNames) return connectedTools;
|
|
141
147
|
const seenNames = /* @__PURE__ */ new Set();
|
|
142
148
|
const finalTools = [];
|