n8n-nodes-vercel-ai-sdk-universal-temp 0.1.20 → 0.1.22
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/nodes/UniversalAI/UniversalAI.node.d.ts +2 -2
- package/dist/nodes/UniversalAI/UniversalAI.node.js +542 -1150
- package/dist/nodes/UniversalAI/UniversalAI.node.js.map +1 -1
- package/dist/nodes/UniversalEmbedding/UniversalEmbedding.node.d.ts +2 -2
- package/dist/nodes/UniversalEmbedding/UniversalEmbedding.node.js +2 -250
- package/dist/nodes/UniversalEmbedding/UniversalEmbedding.node.js.map +1 -1
- package/dist/nodes/UniversalImageGen/UniversalImageGen.node.d.ts +2 -2
- package/dist/nodes/UniversalImageGen/UniversalImageGen.node.js +2 -220
- package/dist/nodes/UniversalImageGen/UniversalImageGen.node.js.map +1 -1
- package/dist/nodes/UniversalSpeechGen/UniversalSpeechGen.node.d.ts +2 -2
- package/dist/nodes/UniversalSpeechGen/UniversalSpeechGen.node.js +2 -135
- package/dist/nodes/UniversalSpeechGen/UniversalSpeechGen.node.js.map +1 -1
- package/dist/nodes/UniversalTranscription/UniversalTranscription.node.d.ts +2 -2
- package/dist/nodes/UniversalTranscription/UniversalTranscription.node.js +5 -131
- package/dist/nodes/UniversalTranscription/UniversalTranscription.node.js.map +1 -1
- package/dist/nodes/shared/descriptions.d.ts +6 -0
- package/dist/nodes/shared/descriptions.js +1456 -0
- package/dist/nodes/shared/descriptions.js.map +1 -0
- package/dist/package.json +1 -1
- package/dist/tsconfig.tsbuildinfo +1 -1
- package/package.json +1 -1
|
@@ -1,149 +1,268 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
2
35
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
36
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
37
|
};
|
|
5
38
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
39
|
exports.UniversalAI = void 0;
|
|
7
40
|
const n8n_workflow_1 = require("n8n-workflow");
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
41
|
+
let googleProvider;
|
|
42
|
+
let deepseekProvider;
|
|
43
|
+
let groqProvider;
|
|
44
|
+
let openrouterProvider;
|
|
12
45
|
const ai_1 = require("ai");
|
|
13
46
|
const zod_1 = require("zod");
|
|
14
47
|
const ajv_1 = __importDefault(require("ajv"));
|
|
15
|
-
const
|
|
16
|
-
|
|
17
|
-
|
|
48
|
+
const descriptions_1 = require("../shared/descriptions");
|
|
49
|
+
class Cache {
|
|
50
|
+
constructor(maxSize = 100, ttl = 5 * 60 * 1000) {
|
|
51
|
+
this.cache = new Map();
|
|
52
|
+
this.totalHits = 0;
|
|
53
|
+
this.totalMisses = 0;
|
|
54
|
+
this.maxSize = maxSize;
|
|
55
|
+
this.ttl = ttl;
|
|
56
|
+
}
|
|
57
|
+
get(key) {
|
|
58
|
+
const item = this.cache.get(key);
|
|
59
|
+
if (!item) {
|
|
60
|
+
this.totalMisses++;
|
|
61
|
+
return undefined;
|
|
62
|
+
}
|
|
63
|
+
if (Date.now() - item.timestamp > this.ttl) {
|
|
64
|
+
this.cache.delete(key);
|
|
65
|
+
this.totalMisses++;
|
|
66
|
+
return undefined;
|
|
67
|
+
}
|
|
68
|
+
item.hits++;
|
|
69
|
+
this.totalHits++;
|
|
70
|
+
return item.value;
|
|
71
|
+
}
|
|
72
|
+
set(key, value) {
|
|
73
|
+
if (this.cache.size >= this.maxSize) {
|
|
74
|
+
let minHits = Infinity;
|
|
75
|
+
let keyToDelete;
|
|
76
|
+
for (const [k, v] of this.cache.entries()) {
|
|
77
|
+
if (v.hits < minHits) {
|
|
78
|
+
minHits = v.hits;
|
|
79
|
+
keyToDelete = k;
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
if (keyToDelete) {
|
|
83
|
+
this.cache.delete(keyToDelete);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
this.cache.set(key, { value, timestamp: Date.now(), hits: 0 });
|
|
87
|
+
}
|
|
88
|
+
getStats() {
|
|
89
|
+
return {
|
|
90
|
+
size: this.cache.size,
|
|
91
|
+
hits: this.totalHits,
|
|
92
|
+
misses: this.totalMisses,
|
|
93
|
+
hitRate: this.totalHits / (this.totalHits + this.totalMisses) || 0
|
|
94
|
+
};
|
|
95
|
+
}
|
|
96
|
+
clear() {
|
|
97
|
+
this.cache.clear();
|
|
98
|
+
this.totalHits = 0;
|
|
99
|
+
this.totalMisses = 0;
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
const modelCache = new Cache(50);
|
|
103
|
+
const providerCache = new Cache(20);
|
|
104
|
+
const schemaCache = new Cache(30);
|
|
105
|
+
const messageSchema = zod_1.z.object({
|
|
106
|
+
role: zod_1.z.enum(['system', 'user', 'assistant']),
|
|
107
|
+
content: zod_1.z.any(),
|
|
108
|
+
});
|
|
109
|
+
const messagesArraySchema = zod_1.z.array(messageSchema);
|
|
110
|
+
const ajv = new ajv_1.default({
|
|
111
|
+
allErrors: true,
|
|
112
|
+
verbose: true,
|
|
113
|
+
strict: false,
|
|
114
|
+
});
|
|
115
|
+
const isUrl = (str) => {
|
|
116
|
+
return str.startsWith('http://') ||
|
|
117
|
+
str.startsWith('https://') ||
|
|
118
|
+
str.startsWith('data:');
|
|
119
|
+
};
|
|
120
|
+
const isLikelyBase64 = (str) => {
|
|
121
|
+
if (str.length % 4 !== 0)
|
|
122
|
+
return false;
|
|
123
|
+
if (!/^[A-Za-z0-9+/]*={0,2}$/.test(str))
|
|
124
|
+
return false;
|
|
125
|
+
if (str.length > 10000)
|
|
126
|
+
return true;
|
|
127
|
+
return true;
|
|
128
|
+
};
|
|
18
129
|
async function buildInput(exec, itemIndex) {
|
|
19
130
|
const inputType = exec.getNodeParameter('inputType', itemIndex);
|
|
20
131
|
if (inputType === 'prompt') {
|
|
21
|
-
const promptVal = exec.getNodeParameter('prompt', itemIndex);
|
|
22
|
-
const systemVal = exec.getNodeParameter('system', itemIndex);
|
|
23
132
|
return {
|
|
24
|
-
prompt:
|
|
25
|
-
system:
|
|
133
|
+
prompt: exec.getNodeParameter('prompt', itemIndex),
|
|
134
|
+
system: exec.getNodeParameter('system', itemIndex),
|
|
26
135
|
};
|
|
27
136
|
}
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
137
|
+
const messageAsJson = exec.getNodeParameter('messageAsJson', itemIndex, false);
|
|
138
|
+
return messageAsJson
|
|
139
|
+
? buildMessagesFromJson(exec, itemIndex)
|
|
140
|
+
: buildMessagesFromUI(exec, itemIndex);
|
|
141
|
+
}
|
|
142
|
+
async function buildMessagesFromJson(exec, itemIndex) {
|
|
143
|
+
const rawJson = exec.getNodeParameter('messagesJson', itemIndex);
|
|
144
|
+
try {
|
|
145
|
+
const parsed = JSON.parse(rawJson);
|
|
146
|
+
const result = messagesArraySchema.safeParse(parsed);
|
|
147
|
+
if (!result.success) {
|
|
148
|
+
throw new n8n_workflow_1.NodeOperationError(exec.getNode(), 'Messages must be an array of objects with role and content.');
|
|
149
|
+
}
|
|
150
|
+
return { messages: result.data };
|
|
151
|
+
}
|
|
152
|
+
catch (error) {
|
|
153
|
+
throw new n8n_workflow_1.NodeOperationError(exec.getNode(), `Invalid JSON in "Messages (JSON)" field: ${error.message}`);
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
async function buildMessagesFromUI(exec, itemIndex) {
|
|
157
|
+
var _a;
|
|
158
|
+
const items = exec.getInputData();
|
|
159
|
+
const messagesUi = exec.getNodeParameter('messages.messagesUi', itemIndex, []);
|
|
160
|
+
const builtMessages = [];
|
|
161
|
+
const itemBinary = items[itemIndex].binary;
|
|
162
|
+
for (const msg of messagesUi) {
|
|
163
|
+
const role = msg.role;
|
|
164
|
+
if (role === 'system') {
|
|
165
|
+
builtMessages.push({ role, content: msg.systemContent || '' });
|
|
166
|
+
continue;
|
|
167
|
+
}
|
|
168
|
+
const attachments = ((_a = msg.attachments) === null || _a === void 0 ? void 0 : _a.attachment) || [];
|
|
169
|
+
if (attachments.length === 0) {
|
|
170
|
+
builtMessages.push({ role, content: msg.content || '' });
|
|
53
171
|
}
|
|
54
172
|
else {
|
|
55
|
-
const
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
for (const msg of messagesUi) {
|
|
59
|
-
const role = msg.role;
|
|
60
|
-
if (role === 'system') {
|
|
61
|
-
builtMessages.push({
|
|
62
|
-
role,
|
|
63
|
-
content: msg.systemContent || '',
|
|
64
|
-
});
|
|
65
|
-
continue;
|
|
66
|
-
}
|
|
67
|
-
if (msg.contentType === 'text') {
|
|
68
|
-
builtMessages.push({
|
|
69
|
-
role,
|
|
70
|
-
content: msg.content || '',
|
|
71
|
-
});
|
|
72
|
-
}
|
|
73
|
-
else {
|
|
74
|
-
const parts = [];
|
|
75
|
-
if (msg.content) {
|
|
76
|
-
parts.push({
|
|
77
|
-
type: 'text',
|
|
78
|
-
text: msg.content,
|
|
79
|
-
});
|
|
80
|
-
}
|
|
81
|
-
const fileContentInput = msg.fileContent || '';
|
|
82
|
-
let fileData;
|
|
83
|
-
let detectedMimeType = msg.mimeType || undefined;
|
|
84
|
-
if (detectedMimeType === 'other') {
|
|
85
|
-
detectedMimeType = msg.mimeTypeOther || undefined;
|
|
86
|
-
}
|
|
87
|
-
if (fileContentInput.startsWith('http://') || fileContentInput.startsWith('https://') || fileContentInput.startsWith('data:')) {
|
|
88
|
-
fileData = fileContentInput;
|
|
89
|
-
}
|
|
90
|
-
else {
|
|
91
|
-
const itemBinary = items[itemIndex].binary;
|
|
92
|
-
if (itemBinary && itemBinary[fileContentInput]) {
|
|
93
|
-
const binaryData = itemBinary[fileContentInput];
|
|
94
|
-
const buffer = Buffer.from(binaryData.data, binaryData.data ? 'base64' : undefined);
|
|
95
|
-
fileData = buffer;
|
|
96
|
-
if (!detectedMimeType && binaryData.mimeType) {
|
|
97
|
-
detectedMimeType = binaryData.mimeType;
|
|
98
|
-
}
|
|
99
|
-
}
|
|
100
|
-
else {
|
|
101
|
-
try {
|
|
102
|
-
fileData = Buffer.from(fileContentInput, 'base64');
|
|
103
|
-
}
|
|
104
|
-
catch (error) {
|
|
105
|
-
throw new n8n_workflow_1.NodeOperationError(exec.getNode(), `Invalid base64 data in file content for message ${itemIndex + 1}: ${error.message}`);
|
|
106
|
-
}
|
|
107
|
-
}
|
|
108
|
-
}
|
|
109
|
-
if (fileData == null || (Buffer.isBuffer(fileData) && fileData.length === 0)) {
|
|
110
|
-
continue;
|
|
111
|
-
}
|
|
112
|
-
const mediaType = detectedMimeType || 'application/octet-stream';
|
|
113
|
-
parts.push({
|
|
114
|
-
type: 'file',
|
|
115
|
-
data: fileData,
|
|
116
|
-
mediaType,
|
|
117
|
-
});
|
|
118
|
-
if (parts.length > 0) {
|
|
119
|
-
builtMessages.push({
|
|
120
|
-
role,
|
|
121
|
-
content: parts,
|
|
122
|
-
});
|
|
123
|
-
}
|
|
124
|
-
}
|
|
173
|
+
const messageWithAttachments = await buildMessageWithAttachments(role, msg.content, attachments, itemBinary, exec, itemIndex);
|
|
174
|
+
if (messageWithAttachments) {
|
|
175
|
+
builtMessages.push(messageWithAttachments);
|
|
125
176
|
}
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
const convertMessagesToModel = exec.getNodeParameter('convertMessagesToModel', itemIndex, false);
|
|
180
|
+
if (convertMessagesToModel) {
|
|
181
|
+
return { messages: (0, ai_1.convertToModelMessages)(builtMessages) };
|
|
182
|
+
}
|
|
183
|
+
return { messages: builtMessages };
|
|
184
|
+
}
|
|
185
|
+
async function buildMessageWithAttachments(role, content, attachments, itemBinary, exec, itemIndex) {
|
|
186
|
+
const parts = [];
|
|
187
|
+
if (content) {
|
|
188
|
+
parts.push({ type: 'text', text: content });
|
|
189
|
+
}
|
|
190
|
+
const MAX_CONCURRENT_ATTACHMENTS = 3;
|
|
191
|
+
const processedAttachments = [];
|
|
192
|
+
for (let i = 0; i < attachments.length; i += MAX_CONCURRENT_ATTACHMENTS) {
|
|
193
|
+
const batch = attachments.slice(i, i + MAX_CONCURRENT_ATTACHMENTS);
|
|
194
|
+
const batchPromises = batch.map(attachment => processAttachment(attachment, itemBinary, exec, itemIndex));
|
|
195
|
+
const batchResults = await Promise.all(batchPromises);
|
|
196
|
+
processedAttachments.push(...batchResults);
|
|
197
|
+
}
|
|
198
|
+
for (const attachment of processedAttachments) {
|
|
199
|
+
if (attachment) {
|
|
200
|
+
parts.push(attachment);
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
return parts.length > 0 ? { role, content: parts } : null;
|
|
204
|
+
}
|
|
205
|
+
async function processAttachment(attachment, itemBinary, exec, itemIndex) {
|
|
206
|
+
const fileContentInput = attachment.fileContent;
|
|
207
|
+
if (!fileContentInput)
|
|
208
|
+
return null;
|
|
209
|
+
let mimeType = getMimeType(attachment);
|
|
210
|
+
let fileData;
|
|
211
|
+
if (isUrl(fileContentInput)) {
|
|
212
|
+
fileData = fileContentInput;
|
|
213
|
+
}
|
|
214
|
+
else {
|
|
215
|
+
fileData = await getBinaryData(fileContentInput, itemBinary, exec, itemIndex);
|
|
216
|
+
if (!fileData)
|
|
217
|
+
return null;
|
|
218
|
+
const binaryItem = itemBinary === null || itemBinary === void 0 ? void 0 : itemBinary[fileContentInput];
|
|
219
|
+
if (!mimeType && (binaryItem === null || binaryItem === void 0 ? void 0 : binaryItem.mimeType)) {
|
|
220
|
+
mimeType = binaryItem.mimeType;
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
if (!fileData || (Buffer.isBuffer(fileData) && fileData.length === 0)) {
|
|
224
|
+
return null;
|
|
225
|
+
}
|
|
226
|
+
return {
|
|
227
|
+
type: 'file',
|
|
228
|
+
data: fileData,
|
|
229
|
+
mediaType: mimeType || 'application/octet-stream',
|
|
230
|
+
};
|
|
231
|
+
}
|
|
232
|
+
function getMimeType(attachment) {
|
|
233
|
+
return attachment.mimeType === 'other'
|
|
234
|
+
? attachment.mimeTypeOther
|
|
235
|
+
: attachment.mimeType;
|
|
236
|
+
}
|
|
237
|
+
async function getBinaryData(fileContentInput, itemBinary, exec, itemIndex) {
|
|
238
|
+
if (itemBinary === null || itemBinary === void 0 ? void 0 : itemBinary[fileContentInput]) {
|
|
239
|
+
const binaryData = itemBinary[fileContentInput];
|
|
240
|
+
return Buffer.from(binaryData.data, 'base64');
|
|
241
|
+
}
|
|
242
|
+
try {
|
|
243
|
+
if (isLikelyBase64(fileContentInput)) {
|
|
244
|
+
const buffer = Buffer.from(fileContentInput, 'base64');
|
|
245
|
+
if (buffer.length > 0 && buffer.length < 50 * 1024 * 1024) {
|
|
246
|
+
return buffer;
|
|
130
247
|
}
|
|
131
|
-
return { messages: finalMessages };
|
|
132
248
|
}
|
|
133
249
|
}
|
|
250
|
+
catch (error) {
|
|
251
|
+
throw new n8n_workflow_1.NodeOperationError(exec.getNode(), `Invalid file content for attachment: ${error.message}`);
|
|
252
|
+
}
|
|
253
|
+
return null;
|
|
134
254
|
}
|
|
135
255
|
function formatTextResult(result, includeRequestBody, provider) {
|
|
136
|
-
var _a, _b, _c
|
|
137
|
-
let text = result
|
|
138
|
-
let reasoning = result.reasoning;
|
|
256
|
+
var _a, _b, _c;
|
|
257
|
+
let { text, reasoning } = result;
|
|
139
258
|
if (provider === 'groq' && text.includes('<think>')) {
|
|
140
259
|
const thinkMatch = text.match(/<think>(.*?)<\/think>/s);
|
|
141
260
|
if (thinkMatch) {
|
|
142
|
-
reasoning = [{ text: thinkMatch[1].trim() }];
|
|
261
|
+
reasoning = [{ type: 'reasoning', text: thinkMatch[1].trim() }];
|
|
143
262
|
text = text.replace(/<think>.*?<\/think>\s*/s, '').trim();
|
|
144
263
|
}
|
|
145
264
|
}
|
|
146
|
-
const
|
|
265
|
+
const baseResult = {
|
|
147
266
|
text,
|
|
148
267
|
reasoning,
|
|
149
268
|
reasoningText: result.reasoningText,
|
|
@@ -152,855 +271,169 @@ function formatTextResult(result, includeRequestBody, provider) {
|
|
|
152
271
|
finishReason: result.finishReason,
|
|
153
272
|
sources: result.sources || [],
|
|
154
273
|
files: result.files || [],
|
|
155
|
-
usage:
|
|
156
|
-
|
|
157
|
-
completionTokens: (_b = result.usage) === null || _b === void 0 ? void 0 : _b.completionTokens,
|
|
158
|
-
totalTokens: (_c = result.usage) === null || _c === void 0 ? void 0 : _c.totalTokens,
|
|
159
|
-
...(provider === 'deepseek' && {
|
|
160
|
-
cacheMetrics: {
|
|
161
|
-
promptCacheHitTokens: (_e = (_d = result.experimental_providerMetadata) === null || _d === void 0 ? void 0 : _d.deepseek) === null || _e === void 0 ? void 0 : _e.promptCacheHitTokens,
|
|
162
|
-
promptCacheMissTokens: (_g = (_f = result.experimental_providerMetadata) === null || _f === void 0 ? void 0 : _f.deepseek) === null || _g === void 0 ? void 0 : _g.promptCacheMissTokens,
|
|
163
|
-
},
|
|
164
|
-
}),
|
|
165
|
-
...(provider === 'groq' && {
|
|
166
|
-
cacheMetrics: {
|
|
167
|
-
promptCacheHitTokens: (_j = (_h = result.experimental_providerMetadata) === null || _h === void 0 ? void 0 : _h.groq) === null || _j === void 0 ? void 0 : _j.promptCacheHitTokens,
|
|
168
|
-
promptCacheMissTokens: (_l = (_k = result.experimental_providerMetadata) === null || _k === void 0 ? void 0 : _k.groq) === null || _l === void 0 ? void 0 : _l.promptCacheMissTokens,
|
|
169
|
-
},
|
|
170
|
-
}),
|
|
171
|
-
...(provider === 'google' && {
|
|
172
|
-
cacheMetrics: {
|
|
173
|
-
cachedContentTokenCount: (_p = (_o = (_m = result.experimental_providerMetadata) === null || _m === void 0 ? void 0 : _m.google) === null || _o === void 0 ? void 0 : _o.usageMetadata) === null || _p === void 0 ? void 0 : _p.cachedContentTokenCount,
|
|
174
|
-
thoughtsTokenCount: (_s = (_r = (_q = result.experimental_providerMetadata) === null || _q === void 0 ? void 0 : _q.google) === null || _r === void 0 ? void 0 : _r.usageMetadata) === null || _s === void 0 ? void 0 : _s.thoughtsTokenCount,
|
|
175
|
-
},
|
|
176
|
-
}),
|
|
177
|
-
},
|
|
178
|
-
response: {
|
|
179
|
-
id: (_t = result.response) === null || _t === void 0 ? void 0 : _t.id,
|
|
180
|
-
modelId: (_u = result.response) === null || _u === void 0 ? void 0 : _u.modelId,
|
|
181
|
-
timestamp: (_v = result.response) === null || _v === void 0 ? void 0 : _v.timestamp,
|
|
182
|
-
headers: (_w = result.response) === null || _w === void 0 ? void 0 : _w.headers,
|
|
183
|
-
},
|
|
274
|
+
usage: formatUsage(result, provider),
|
|
275
|
+
response: formatResponse(result),
|
|
184
276
|
steps: result.steps || [],
|
|
185
277
|
warnings: result.warnings || [],
|
|
186
278
|
experimental_providerMetadata: result.experimental_providerMetadata,
|
|
187
|
-
...(provider === 'google' && {
|
|
188
|
-
groundingMetadata: (_y = (_x = result.experimental_providerMetadata) === null || _x === void 0 ? void 0 : _x.google) === null || _y === void 0 ? void 0 : _y.groundingMetadata,
|
|
189
|
-
safetyRatings: (_0 = (_z = result.experimental_providerMetadata) === null || _z === void 0 ? void 0 : _z.google) === null || _0 === void 0 ? void 0 : _0.safetyRatings,
|
|
190
|
-
}),
|
|
191
279
|
};
|
|
280
|
+
if (provider === 'google') {
|
|
281
|
+
const providerMetadata = result.experimental_providerMetadata;
|
|
282
|
+
baseResult.groundingMetadata = (_a = providerMetadata === null || providerMetadata === void 0 ? void 0 : providerMetadata.google) === null || _a === void 0 ? void 0 : _a.groundingMetadata;
|
|
283
|
+
baseResult.safetyRatings = (_b = providerMetadata === null || providerMetadata === void 0 ? void 0 : providerMetadata.google) === null || _b === void 0 ? void 0 : _b.safetyRatings;
|
|
284
|
+
}
|
|
192
285
|
if (includeRequestBody) {
|
|
193
|
-
|
|
286
|
+
const requestBody = (_c = result.request) === null || _c === void 0 ? void 0 : _c.body;
|
|
287
|
+
if (requestBody !== undefined) {
|
|
288
|
+
baseResult.request = { body: requestBody };
|
|
289
|
+
}
|
|
194
290
|
}
|
|
195
|
-
return
|
|
291
|
+
return baseResult;
|
|
196
292
|
}
|
|
197
|
-
function formatObjectResult(result, includeRequestBody) {
|
|
198
|
-
var _a
|
|
293
|
+
function formatObjectResult(result, includeRequestBody, provider) {
|
|
294
|
+
var _a;
|
|
199
295
|
const out = {
|
|
200
296
|
object: result.object,
|
|
201
297
|
finishReason: result.finishReason,
|
|
202
|
-
usage:
|
|
203
|
-
|
|
204
|
-
completionTokens: (_b = result.usage) === null || _b === void 0 ? void 0 : _b.completionTokens,
|
|
205
|
-
totalTokens: (_c = result.usage) === null || _c === void 0 ? void 0 : _c.totalTokens,
|
|
206
|
-
},
|
|
207
|
-
response: {
|
|
208
|
-
id: (_d = result.response) === null || _d === void 0 ? void 0 : _d.id,
|
|
209
|
-
modelId: (_e = result.response) === null || _e === void 0 ? void 0 : _e.modelId,
|
|
210
|
-
timestamp: (_f = result.response) === null || _f === void 0 ? void 0 : _f.timestamp,
|
|
211
|
-
headers: (_g = result.response) === null || _g === void 0 ? void 0 : _g.headers,
|
|
212
|
-
},
|
|
298
|
+
usage: formatUsage(result, provider),
|
|
299
|
+
response: formatResponse(result),
|
|
213
300
|
warnings: result.warnings || [],
|
|
214
301
|
experimental_providerMetadata: result.experimental_providerMetadata,
|
|
215
302
|
};
|
|
216
303
|
if (includeRequestBody) {
|
|
217
|
-
out.request = { body: (
|
|
304
|
+
out.request = { body: (_a = result.request) === null || _a === void 0 ? void 0 : _a.body };
|
|
218
305
|
}
|
|
219
306
|
return out;
|
|
220
307
|
}
|
|
308
|
+
function formatUsage(result, provider) {
|
|
309
|
+
var _a, _b, _c;
|
|
310
|
+
const usage = {
|
|
311
|
+
promptTokens: (_a = result.usage) === null || _a === void 0 ? void 0 : _a.promptTokens,
|
|
312
|
+
completionTokens: (_b = result.usage) === null || _b === void 0 ? void 0 : _b.completionTokens,
|
|
313
|
+
totalTokens: (_c = result.usage) === null || _c === void 0 ? void 0 : _c.totalTokens,
|
|
314
|
+
};
|
|
315
|
+
const cacheMetrics = getCacheMetrics(result, provider);
|
|
316
|
+
if (Object.keys(cacheMetrics).length > 0) {
|
|
317
|
+
usage.cacheMetrics = cacheMetrics;
|
|
318
|
+
}
|
|
319
|
+
return usage;
|
|
320
|
+
}
|
|
321
|
+
function getCacheMetrics(result, provider) {
|
|
322
|
+
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
323
|
+
const metadata = result.experimental_providerMetadata;
|
|
324
|
+
switch (provider) {
|
|
325
|
+
case 'deepseek':
|
|
326
|
+
return {
|
|
327
|
+
promptCacheHitTokens: (_a = metadata === null || metadata === void 0 ? void 0 : metadata.deepseek) === null || _a === void 0 ? void 0 : _a.promptCacheHitTokens,
|
|
328
|
+
promptCacheMissTokens: (_b = metadata === null || metadata === void 0 ? void 0 : metadata.deepseek) === null || _b === void 0 ? void 0 : _b.promptCacheMissTokens,
|
|
329
|
+
};
|
|
330
|
+
case 'groq':
|
|
331
|
+
return {
|
|
332
|
+
promptCacheHitTokens: (_c = metadata === null || metadata === void 0 ? void 0 : metadata.groq) === null || _c === void 0 ? void 0 : _c.promptCacheHitTokens,
|
|
333
|
+
promptCacheMissTokens: (_d = metadata === null || metadata === void 0 ? void 0 : metadata.groq) === null || _d === void 0 ? void 0 : _d.promptCacheMissTokens,
|
|
334
|
+
};
|
|
335
|
+
case 'google':
|
|
336
|
+
return {
|
|
337
|
+
cachedContentTokenCount: (_f = (_e = metadata === null || metadata === void 0 ? void 0 : metadata.google) === null || _e === void 0 ? void 0 : _e.usageMetadata) === null || _f === void 0 ? void 0 : _f.cachedContentTokenCount,
|
|
338
|
+
thoughtsTokenCount: (_h = (_g = metadata === null || metadata === void 0 ? void 0 : metadata.google) === null || _g === void 0 ? void 0 : _g.usageMetadata) === null || _h === void 0 ? void 0 : _h.thoughtsTokenCount,
|
|
339
|
+
};
|
|
340
|
+
default:
|
|
341
|
+
return {};
|
|
342
|
+
}
|
|
343
|
+
}
|
|
344
|
+
function formatResponse(result) {
|
|
345
|
+
var _a, _b, _c, _d;
|
|
346
|
+
return {
|
|
347
|
+
id: (_a = result.response) === null || _a === void 0 ? void 0 : _a.id,
|
|
348
|
+
modelId: (_b = result.response) === null || _b === void 0 ? void 0 : _b.modelId,
|
|
349
|
+
timestamp: (_c = result.response) === null || _c === void 0 ? void 0 : _c.timestamp,
|
|
350
|
+
headers: (_d = result.response) === null || _d === void 0 ? void 0 : _d.headers,
|
|
351
|
+
};
|
|
352
|
+
}
|
|
353
|
+
async function getProvider(provider, apiKey, baseURL) {
|
|
354
|
+
const cacheKey = `${provider}:${apiKey}:${baseURL || ''}`;
|
|
355
|
+
const cached = providerCache.get(cacheKey);
|
|
356
|
+
if (cached)
|
|
357
|
+
return cached;
|
|
358
|
+
let providerInstance;
|
|
359
|
+
try {
|
|
360
|
+
switch (provider) {
|
|
361
|
+
case 'google':
|
|
362
|
+
if (!googleProvider) {
|
|
363
|
+
googleProvider = require('@ai-sdk/google').createGoogleGenerativeAI;
|
|
364
|
+
}
|
|
365
|
+
providerInstance = googleProvider({ apiKey, ...(baseURL && { baseURL }) });
|
|
366
|
+
break;
|
|
367
|
+
case 'deepseek':
|
|
368
|
+
if (!deepseekProvider) {
|
|
369
|
+
deepseekProvider = require('@ai-sdk/deepseek').createDeepSeek;
|
|
370
|
+
}
|
|
371
|
+
providerInstance = deepseekProvider({ apiKey, ...(baseURL && { baseURL }) });
|
|
372
|
+
break;
|
|
373
|
+
case 'groq':
|
|
374
|
+
if (!groqProvider) {
|
|
375
|
+
groqProvider = require('@ai-sdk/groq').createGroq;
|
|
376
|
+
}
|
|
377
|
+
providerInstance = groqProvider({ apiKey, ...(baseURL && { baseURL }) });
|
|
378
|
+
break;
|
|
379
|
+
case 'openrouter':
|
|
380
|
+
if (!openrouterProvider) {
|
|
381
|
+
openrouterProvider = require('@openrouter/ai-sdk-provider').createOpenRouter;
|
|
382
|
+
}
|
|
383
|
+
providerInstance = openrouterProvider({ apiKey, ...(baseURL && { baseURL }) });
|
|
384
|
+
break;
|
|
385
|
+
default:
|
|
386
|
+
throw new Error(`Unsupported provider: ${provider}`);
|
|
387
|
+
}
|
|
388
|
+
providerCache.set(cacheKey, providerInstance);
|
|
389
|
+
return providerInstance;
|
|
390
|
+
}
|
|
391
|
+
catch (error) {
|
|
392
|
+
throw new Error(`Failed to initialize ${provider} provider: ${error.message}`);
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
function parseAndValidateSchema(rawSchema, exec) {
|
|
396
|
+
const cacheKey = `schema:${Buffer.from(rawSchema).toString('base64').substring(0, 50)}`;
|
|
397
|
+
const cached = schemaCache.get(cacheKey);
|
|
398
|
+
if (cached)
|
|
399
|
+
return cached;
|
|
400
|
+
let parsedSchema;
|
|
401
|
+
try {
|
|
402
|
+
parsedSchema = JSON.parse(rawSchema);
|
|
403
|
+
}
|
|
404
|
+
catch (err) {
|
|
405
|
+
throw new n8n_workflow_1.NodeOperationError(exec.getNode(), 'Schema is not valid JSON: ' + err.message);
|
|
406
|
+
}
|
|
407
|
+
if (!ajv.validateSchema(parsedSchema)) {
|
|
408
|
+
throw new n8n_workflow_1.NodeOperationError(exec.getNode(), `Invalid JSON Schema: ${ajv.errorsText(ajv.errors)}`);
|
|
409
|
+
}
|
|
410
|
+
schemaCache.set(cacheKey, parsedSchema);
|
|
411
|
+
return parsedSchema;
|
|
412
|
+
}
|
|
413
|
+
function parseStopSequences(stopSequencesStr) {
|
|
414
|
+
if (!stopSequencesStr)
|
|
415
|
+
return undefined;
|
|
416
|
+
return stopSequencesStr.split(',').map(s => s.trim()).filter(s => s.length > 0);
|
|
417
|
+
}
|
|
221
418
|
class UniversalAI {
|
|
222
419
|
constructor() {
|
|
223
|
-
this.description =
|
|
224
|
-
displayName: 'Universal AI',
|
|
225
|
-
name: 'universalAi',
|
|
226
|
-
icon: 'file:icons/UniversalAI.svg',
|
|
227
|
-
group: ['transform'],
|
|
228
|
-
version: 2,
|
|
229
|
-
subtitle: '={{$parameter["resource"] + ": " + $parameter["provider"] + ": " + $parameter["model"]}}',
|
|
230
|
-
description: 'Use multiple AI providers via Vercel AI SDK',
|
|
231
|
-
defaults: {
|
|
232
|
-
name: 'Universal AI',
|
|
233
|
-
},
|
|
234
|
-
inputs: [{ type: "main" }],
|
|
235
|
-
outputs: [{ type: "main" }],
|
|
236
|
-
credentials: [
|
|
237
|
-
{
|
|
238
|
-
name: 'googleGenerativeAIApi',
|
|
239
|
-
required: false,
|
|
240
|
-
displayOptions: {
|
|
241
|
-
show: {
|
|
242
|
-
provider: ['google'],
|
|
243
|
-
},
|
|
244
|
-
},
|
|
245
|
-
},
|
|
246
|
-
{
|
|
247
|
-
name: 'deepSeekApi',
|
|
248
|
-
required: false,
|
|
249
|
-
displayOptions: {
|
|
250
|
-
show: {
|
|
251
|
-
provider: ['deepseek'],
|
|
252
|
-
},
|
|
253
|
-
},
|
|
254
|
-
},
|
|
255
|
-
{
|
|
256
|
-
name: 'groqApi',
|
|
257
|
-
required: false,
|
|
258
|
-
displayOptions: {
|
|
259
|
-
show: {
|
|
260
|
-
provider: ['groq'],
|
|
261
|
-
},
|
|
262
|
-
},
|
|
263
|
-
},
|
|
264
|
-
{
|
|
265
|
-
name: 'openRouterApi',
|
|
266
|
-
required: false,
|
|
267
|
-
displayOptions: {
|
|
268
|
-
show: {
|
|
269
|
-
provider: ['openrouter'],
|
|
270
|
-
},
|
|
271
|
-
},
|
|
272
|
-
},
|
|
273
|
-
],
|
|
274
|
-
properties: [
|
|
275
|
-
{
|
|
276
|
-
displayName: 'Resource',
|
|
277
|
-
name: 'resource',
|
|
278
|
-
type: 'options',
|
|
279
|
-
required: true,
|
|
280
|
-
noDataExpression: true,
|
|
281
|
-
options: [
|
|
282
|
-
{
|
|
283
|
-
name: 'AI',
|
|
284
|
-
value: 'ai',
|
|
285
|
-
},
|
|
286
|
-
],
|
|
287
|
-
default: 'ai',
|
|
288
|
-
description: 'The resource to operate on',
|
|
289
|
-
},
|
|
290
|
-
{
|
|
291
|
-
displayName: 'Provider',
|
|
292
|
-
name: 'provider',
|
|
293
|
-
type: 'options',
|
|
294
|
-
required: true,
|
|
295
|
-
noDataExpression: true,
|
|
296
|
-
displayOptions: {
|
|
297
|
-
show: {
|
|
298
|
-
resource: ['ai'],
|
|
299
|
-
},
|
|
300
|
-
},
|
|
301
|
-
options: [
|
|
302
|
-
{
|
|
303
|
-
name: 'Google Generative AI',
|
|
304
|
-
value: 'google',
|
|
305
|
-
description: 'Google Gemini models with multimodal support',
|
|
306
|
-
},
|
|
307
|
-
{
|
|
308
|
-
name: 'DeepSeek',
|
|
309
|
-
value: 'deepseek',
|
|
310
|
-
description: 'DeepSeek models with strong reasoning capabilities',
|
|
311
|
-
},
|
|
312
|
-
{
|
|
313
|
-
name: 'Groq',
|
|
314
|
-
value: 'groq',
|
|
315
|
-
description: 'High-performance models via Groq LPU',
|
|
316
|
-
},
|
|
317
|
-
{
|
|
318
|
-
name: 'OpenRouter',
|
|
319
|
-
value: 'openrouter',
|
|
320
|
-
description: 'Access multiple models through OpenRouter',
|
|
321
|
-
},
|
|
322
|
-
],
|
|
323
|
-
default: 'google',
|
|
324
|
-
description: 'Choose which AI provider to use',
|
|
325
|
-
},
|
|
326
|
-
{
|
|
327
|
-
displayName: 'Operation',
|
|
328
|
-
name: 'operation',
|
|
329
|
-
type: 'options',
|
|
330
|
-
required: true,
|
|
331
|
-
noDataExpression: true,
|
|
332
|
-
options: [
|
|
333
|
-
{
|
|
334
|
-
name: 'Generate Text',
|
|
335
|
-
value: 'generateText',
|
|
336
|
-
description: 'Generate text using simple prompt or chat messages',
|
|
337
|
-
action: 'Generate text',
|
|
338
|
-
},
|
|
339
|
-
{
|
|
340
|
-
name: 'Generate Object',
|
|
341
|
-
value: 'generateObject',
|
|
342
|
-
description: 'Generate a structured object based on a JSON schema',
|
|
343
|
-
action: 'Generate object',
|
|
344
|
-
},
|
|
345
|
-
],
|
|
346
|
-
default: 'generateText',
|
|
347
|
-
description: 'Which type of output you want to generate',
|
|
348
|
-
},
|
|
349
|
-
{
|
|
350
|
-
displayName: 'Model Name or ID',
|
|
351
|
-
name: 'model',
|
|
352
|
-
type: 'options',
|
|
353
|
-
required: true,
|
|
354
|
-
typeOptions: {
|
|
355
|
-
loadOptionsMethod: 'getModels',
|
|
356
|
-
loadOptionsDependsOn: ['provider'],
|
|
357
|
-
},
|
|
358
|
-
default: '',
|
|
359
|
-
description: 'Select which model to use. Choose from the list, or specify an ID using an <a href="https://docs.n8n.io/code-examples/expressions/">expression</a>.',
|
|
360
|
-
},
|
|
361
|
-
{
|
|
362
|
-
displayName: 'Input Type',
|
|
363
|
-
name: 'inputType',
|
|
364
|
-
type: 'options',
|
|
365
|
-
required: true,
|
|
366
|
-
noDataExpression: true,
|
|
367
|
-
options: [
|
|
368
|
-
{
|
|
369
|
-
name: 'Simple Prompt',
|
|
370
|
-
value: 'prompt',
|
|
371
|
-
description: 'Use a single prompt',
|
|
372
|
-
},
|
|
373
|
-
{
|
|
374
|
-
name: 'Messages',
|
|
375
|
-
value: 'messages',
|
|
376
|
-
description: 'Use a conversation with multiple messages',
|
|
377
|
-
},
|
|
378
|
-
],
|
|
379
|
-
default: 'prompt',
|
|
380
|
-
description: 'Choose how you want to provide input to the model',
|
|
381
|
-
},
|
|
382
|
-
{
|
|
383
|
-
displayName: 'System',
|
|
384
|
-
name: 'system',
|
|
385
|
-
type: 'string',
|
|
386
|
-
typeOptions: {
|
|
387
|
-
rows: 4,
|
|
388
|
-
},
|
|
389
|
-
displayOptions: {
|
|
390
|
-
show: {
|
|
391
|
-
inputType: ['prompt'],
|
|
392
|
-
},
|
|
393
|
-
},
|
|
394
|
-
default: 'You are a helpful assistant.',
|
|
395
|
-
description: "System prompt that specifies the model's behavior",
|
|
396
|
-
hint: "This field is optional, but can help guide the model's responses.",
|
|
397
|
-
requiresDataPath: 'single',
|
|
398
|
-
},
|
|
399
|
-
{
|
|
400
|
-
displayName: 'Prompt',
|
|
401
|
-
name: 'prompt',
|
|
402
|
-
type: 'string',
|
|
403
|
-
typeOptions: {
|
|
404
|
-
rows: 4,
|
|
405
|
-
},
|
|
406
|
-
displayOptions: {
|
|
407
|
-
show: {
|
|
408
|
-
inputType: ['prompt'],
|
|
409
|
-
},
|
|
410
|
-
},
|
|
411
|
-
default: '',
|
|
412
|
-
required: true,
|
|
413
|
-
description: 'The single text prompt to generate a completion for',
|
|
414
|
-
hint: 'You can drag data from previous nodes here using expressions.',
|
|
415
|
-
requiresDataPath: 'single',
|
|
416
|
-
},
|
|
417
|
-
{
|
|
418
|
-
displayName: 'Messages',
|
|
419
|
-
name: 'messages',
|
|
420
|
-
type: 'fixedCollection',
|
|
421
|
-
typeOptions: {
|
|
422
|
-
multipleValues: true,
|
|
423
|
-
sortable: true,
|
|
424
|
-
minValue: 1,
|
|
425
|
-
},
|
|
426
|
-
displayOptions: {
|
|
427
|
-
show: {
|
|
428
|
-
inputType: ['messages'],
|
|
429
|
-
messageAsJson: [false],
|
|
430
|
-
},
|
|
431
|
-
},
|
|
432
|
-
description: 'The messages for the conversation',
|
|
433
|
-
default: {
|
|
434
|
-
messagesUi: [
|
|
435
|
-
{
|
|
436
|
-
role: 'system',
|
|
437
|
-
systemContent: 'You are a helpful assistant.',
|
|
438
|
-
},
|
|
439
|
-
{
|
|
440
|
-
role: 'user',
|
|
441
|
-
contentType: 'text',
|
|
442
|
-
content: 'How can you help me?',
|
|
443
|
-
},
|
|
444
|
-
],
|
|
445
|
-
},
|
|
446
|
-
required: true,
|
|
447
|
-
options: [
|
|
448
|
-
{
|
|
449
|
-
name: 'messagesUi',
|
|
450
|
-
displayName: 'Message',
|
|
451
|
-
values: [
|
|
452
|
-
{
|
|
453
|
-
displayName: 'Role',
|
|
454
|
-
name: 'role',
|
|
455
|
-
type: 'options',
|
|
456
|
-
noDataExpression: true,
|
|
457
|
-
options: [
|
|
458
|
-
{
|
|
459
|
-
name: 'Assistant',
|
|
460
|
-
value: 'assistant',
|
|
461
|
-
},
|
|
462
|
-
{
|
|
463
|
-
name: 'System',
|
|
464
|
-
value: 'system',
|
|
465
|
-
},
|
|
466
|
-
{
|
|
467
|
-
name: 'User',
|
|
468
|
-
value: 'user',
|
|
469
|
-
},
|
|
470
|
-
],
|
|
471
|
-
default: 'user',
|
|
472
|
-
required: true,
|
|
473
|
-
},
|
|
474
|
-
{
|
|
475
|
-
displayName: 'System Content',
|
|
476
|
-
name: 'systemContent',
|
|
477
|
-
type: 'string',
|
|
478
|
-
description: 'The text content if role is System',
|
|
479
|
-
required: true,
|
|
480
|
-
typeOptions: {
|
|
481
|
-
rows: 4,
|
|
482
|
-
},
|
|
483
|
-
default: '',
|
|
484
|
-
displayOptions: {
|
|
485
|
-
show: {
|
|
486
|
-
role: ['system'],
|
|
487
|
-
},
|
|
488
|
-
},
|
|
489
|
-
requiresDataPath: 'single',
|
|
490
|
-
},
|
|
491
|
-
{
|
|
492
|
-
displayName: 'Content Type',
|
|
493
|
-
name: 'contentType',
|
|
494
|
-
type: 'options',
|
|
495
|
-
noDataExpression: true,
|
|
496
|
-
options: [
|
|
497
|
-
{
|
|
498
|
-
name: 'Text',
|
|
499
|
-
value: 'text',
|
|
500
|
-
},
|
|
501
|
-
{
|
|
502
|
-
name: 'Binary File',
|
|
503
|
-
value: 'file',
|
|
504
|
-
},
|
|
505
|
-
],
|
|
506
|
-
default: 'text',
|
|
507
|
-
description: 'The type of content to send',
|
|
508
|
-
required: true,
|
|
509
|
-
displayOptions: {
|
|
510
|
-
show: {
|
|
511
|
-
role: ['assistant', 'user'],
|
|
512
|
-
},
|
|
513
|
-
},
|
|
514
|
-
},
|
|
515
|
-
{
|
|
516
|
-
displayName: 'Text Content',
|
|
517
|
-
name: 'content',
|
|
518
|
-
type: 'string',
|
|
519
|
-
typeOptions: {
|
|
520
|
-
rows: 4,
|
|
521
|
-
},
|
|
522
|
-
displayOptions: {
|
|
523
|
-
show: {
|
|
524
|
-
role: ['assistant', 'user'],
|
|
525
|
-
contentType: ['text'],
|
|
526
|
-
},
|
|
527
|
-
},
|
|
528
|
-
default: '',
|
|
529
|
-
description: 'The text content of the message',
|
|
530
|
-
required: true,
|
|
531
|
-
requiresDataPath: 'single',
|
|
532
|
-
},
|
|
533
|
-
{
|
|
534
|
-
displayName: 'File Content',
|
|
535
|
-
name: 'fileContent',
|
|
536
|
-
type: 'string',
|
|
537
|
-
default: '',
|
|
538
|
-
displayOptions: {
|
|
539
|
-
show: {
|
|
540
|
-
role: ['assistant', 'user'],
|
|
541
|
-
contentType: ['file'],
|
|
542
|
-
},
|
|
543
|
-
},
|
|
544
|
-
description: 'File content: binary property name, base64 string, or http(s)/data: URL',
|
|
545
|
-
required: true,
|
|
546
|
-
},
|
|
547
|
-
{
|
|
548
|
-
displayName: 'MIME Type',
|
|
549
|
-
name: 'mimeType',
|
|
550
|
-
type: 'options',
|
|
551
|
-
default: 'application/octet-stream',
|
|
552
|
-
description: 'Select the MIME type of the file; choose Other to specify a custom MIME type',
|
|
553
|
-
options: [
|
|
554
|
-
{
|
|
555
|
-
name: 'JPEG Image (Image/jpeg)',
|
|
556
|
-
value: 'image/jpeg',
|
|
557
|
-
},
|
|
558
|
-
{
|
|
559
|
-
name: 'JSON (Application/json)',
|
|
560
|
-
value: 'application/json',
|
|
561
|
-
},
|
|
562
|
-
{
|
|
563
|
-
name: 'MP3 Audio (Audio/mpeg)',
|
|
564
|
-
value: 'audio/mpeg',
|
|
565
|
-
},
|
|
566
|
-
{
|
|
567
|
-
name: 'MP4 Video (Video/mp4)',
|
|
568
|
-
value: 'video/mp4',
|
|
569
|
-
},
|
|
570
|
-
{
|
|
571
|
-
name: 'Octet Stream (Default)',
|
|
572
|
-
value: 'application/octet-stream',
|
|
573
|
-
},
|
|
574
|
-
{
|
|
575
|
-
name: 'Other (Specify Below)',
|
|
576
|
-
value: 'other',
|
|
577
|
-
},
|
|
578
|
-
{
|
|
579
|
-
name: 'PDF (Application/pdf)',
|
|
580
|
-
value: 'application/pdf',
|
|
581
|
-
},
|
|
582
|
-
{
|
|
583
|
-
name: 'Plain Text (Text/plain)',
|
|
584
|
-
value: 'text/plain',
|
|
585
|
-
},
|
|
586
|
-
{
|
|
587
|
-
name: 'PNG Image (Image/png)',
|
|
588
|
-
value: 'image/png',
|
|
589
|
-
},
|
|
590
|
-
{
|
|
591
|
-
name: 'WAV Audio (Audio/wav)',
|
|
592
|
-
value: 'audio/wav',
|
|
593
|
-
},
|
|
594
|
-
],
|
|
595
|
-
displayOptions: {
|
|
596
|
-
show: {
|
|
597
|
-
role: ['assistant', 'user'],
|
|
598
|
-
contentType: ['file'],
|
|
599
|
-
},
|
|
600
|
-
},
|
|
601
|
-
},
|
|
602
|
-
{
|
|
603
|
-
displayName: 'Other MIME Type',
|
|
604
|
-
name: 'mimeTypeOther',
|
|
605
|
-
type: 'string',
|
|
606
|
-
default: '',
|
|
607
|
-
description: 'Specify a custom MIME type, e.g. application/x-zip-compressed',
|
|
608
|
-
displayOptions: {
|
|
609
|
-
show: {
|
|
610
|
-
role: ['assistant', 'user'],
|
|
611
|
-
contentType: ['file'],
|
|
612
|
-
mimeType: ['other'],
|
|
613
|
-
},
|
|
614
|
-
},
|
|
615
|
-
},
|
|
616
|
-
{
|
|
617
|
-
displayName: 'Additional Text',
|
|
618
|
-
name: 'content',
|
|
619
|
-
type: 'string',
|
|
620
|
-
typeOptions: {
|
|
621
|
-
rows: 2,
|
|
622
|
-
},
|
|
623
|
-
displayOptions: {
|
|
624
|
-
show: {
|
|
625
|
-
role: ['assistant', 'user'],
|
|
626
|
-
contentType: ['file'],
|
|
627
|
-
},
|
|
628
|
-
},
|
|
629
|
-
default: 'Please analyze this file.',
|
|
630
|
-
description: 'Additional text to include with the file',
|
|
631
|
-
required: true,
|
|
632
|
-
requiresDataPath: 'single',
|
|
633
|
-
},
|
|
634
|
-
],
|
|
635
|
-
},
|
|
636
|
-
],
|
|
637
|
-
},
|
|
638
|
-
{
|
|
639
|
-
displayName: 'Convert Messages to Model Messages',
|
|
640
|
-
name: 'convertMessagesToModel',
|
|
641
|
-
type: 'boolean',
|
|
642
|
-
default: false,
|
|
643
|
-
description: 'Convert UI messages to ModelMessage format using convertToModelMessages. Enable if you get "Invalid prompt: The messages must be a ModelMessage[]" errors.',
|
|
644
|
-
displayOptions: {
|
|
645
|
-
show: {
|
|
646
|
-
inputType: ['messages'],
|
|
647
|
-
},
|
|
648
|
-
},
|
|
649
|
-
},
|
|
650
|
-
{
|
|
651
|
-
displayName: 'Messages as JSON',
|
|
652
|
-
name: 'messageAsJson',
|
|
653
|
-
type: 'boolean',
|
|
654
|
-
default: false,
|
|
655
|
-
noDataExpression: true,
|
|
656
|
-
description: 'Whether to input messages as a JSON array instead of using the UI',
|
|
657
|
-
displayOptions: {
|
|
658
|
-
show: {
|
|
659
|
-
operation: ['generateText', 'generateObject'],
|
|
660
|
-
inputType: ['messages'],
|
|
661
|
-
},
|
|
662
|
-
},
|
|
663
|
-
},
|
|
664
|
-
{
|
|
665
|
-
displayName: 'Messages (JSON)',
|
|
666
|
-
name: 'messagesJson',
|
|
667
|
-
type: 'string',
|
|
668
|
-
default: '=[{"role": "user", "content": "Hello!"}]',
|
|
669
|
-
description: 'Enter an array of message objects in JSON format (role, content)',
|
|
670
|
-
required: true,
|
|
671
|
-
typeOptions: {
|
|
672
|
-
rows: 4,
|
|
673
|
-
},
|
|
674
|
-
noDataExpression: false,
|
|
675
|
-
requiresDataPath: 'single',
|
|
676
|
-
displayOptions: {
|
|
677
|
-
show: {
|
|
678
|
-
operation: ['generateText', 'generateObject'],
|
|
679
|
-
inputType: ['messages'],
|
|
680
|
-
messageAsJson: [true],
|
|
681
|
-
},
|
|
682
|
-
},
|
|
683
|
-
},
|
|
684
|
-
{
|
|
685
|
-
displayName: 'Schema Name',
|
|
686
|
-
name: 'schemaName',
|
|
687
|
-
type: 'string',
|
|
688
|
-
default: '',
|
|
689
|
-
description: 'Name of the output schema (optional)',
|
|
690
|
-
hint: 'Some providers use this name for additional guidance when generating objects.',
|
|
691
|
-
displayOptions: {
|
|
692
|
-
show: {
|
|
693
|
-
operation: ['generateObject'],
|
|
694
|
-
},
|
|
695
|
-
},
|
|
696
|
-
},
|
|
697
|
-
{
|
|
698
|
-
displayName: 'Schema Description',
|
|
699
|
-
name: 'schemaDescription',
|
|
700
|
-
type: 'string',
|
|
701
|
-
default: '',
|
|
702
|
-
description: 'Description of the output schema (optional)',
|
|
703
|
-
hint: 'Some providers use this description for additional guidance when generating objects.',
|
|
704
|
-
displayOptions: {
|
|
705
|
-
show: {
|
|
706
|
-
operation: ['generateObject'],
|
|
707
|
-
},
|
|
708
|
-
},
|
|
709
|
-
},
|
|
710
|
-
{
|
|
711
|
-
displayName: 'Schema',
|
|
712
|
-
name: 'schema',
|
|
713
|
-
type: 'json',
|
|
714
|
-
displayOptions: {
|
|
715
|
-
show: {
|
|
716
|
-
operation: ['generateObject'],
|
|
717
|
-
},
|
|
718
|
-
},
|
|
719
|
-
default: `{\n\t"type": "object",\n\t"properties": {\n\t\t"sentiment": {\n\t\t"type": "string",\n\t\t"enum": ["positive","negative","neutral"],\n\t\t"description": "The overall sentiment of the text"\n\t\t},\n\t\t"score": {\n\t\t"type": "number",\n\t\t"minimum": -1,\n\t\t"maximum": 1,\n\t\t"description": "Sentiment score from -1 (negative) to 1 (positive)"\n\t\t},\n\t\t"text": {\n\t\t"type": "string",\n\t\t"description": "The text content to analyze"\n\t\t}\n\t}\n}`,
|
|
720
|
-
required: true,
|
|
721
|
-
description: 'JSON schema describing the structure and constraints of the object to generate',
|
|
722
|
-
hint: 'For example, a schema describing sentiment analysis output.',
|
|
723
|
-
requiresDataPath: 'single',
|
|
724
|
-
},
|
|
725
|
-
{
|
|
726
|
-
displayName: 'Options',
|
|
727
|
-
name: 'options',
|
|
728
|
-
type: 'collection',
|
|
729
|
-
placeholder: 'Add Option',
|
|
730
|
-
default: {},
|
|
731
|
-
options: [
|
|
732
|
-
{
|
|
733
|
-
displayName: 'Max Tokens',
|
|
734
|
-
name: 'maxTokens',
|
|
735
|
-
type: 'number',
|
|
736
|
-
typeOptions: {
|
|
737
|
-
minValue: 1,
|
|
738
|
-
},
|
|
739
|
-
default: 2048,
|
|
740
|
-
description: 'The maximum number of tokens to generate',
|
|
741
|
-
},
|
|
742
|
-
{
|
|
743
|
-
displayName: 'Temperature',
|
|
744
|
-
name: 'temperature',
|
|
745
|
-
type: 'number',
|
|
746
|
-
typeOptions: {
|
|
747
|
-
minValue: 0,
|
|
748
|
-
maxValue: 2,
|
|
749
|
-
numberPrecision: 2,
|
|
750
|
-
},
|
|
751
|
-
default: 0.7,
|
|
752
|
-
description: 'Higher values produce more random outputs',
|
|
753
|
-
},
|
|
754
|
-
{
|
|
755
|
-
displayName: 'Top P',
|
|
756
|
-
name: 'topP',
|
|
757
|
-
type: 'number',
|
|
758
|
-
typeOptions: {
|
|
759
|
-
minValue: 0,
|
|
760
|
-
maxValue: 1,
|
|
761
|
-
numberPrecision: 2,
|
|
762
|
-
},
|
|
763
|
-
default: 1,
|
|
764
|
-
description: 'Controls diversity via nucleus sampling. Lower values make outputs more focused.',
|
|
765
|
-
},
|
|
766
|
-
{
|
|
767
|
-
displayName: 'Top K',
|
|
768
|
-
name: 'topK',
|
|
769
|
-
type: 'number',
|
|
770
|
-
typeOptions: {
|
|
771
|
-
minValue: 1,
|
|
772
|
-
},
|
|
773
|
-
default: 50,
|
|
774
|
-
description: 'Limits the number of tokens considered for sampling. Only applies to some providers.',
|
|
775
|
-
displayOptions: {
|
|
776
|
-
show: {
|
|
777
|
-
'/provider': ['google'],
|
|
778
|
-
},
|
|
779
|
-
},
|
|
780
|
-
},
|
|
781
|
-
{
|
|
782
|
-
displayName: 'Frequency Penalty',
|
|
783
|
-
name: 'frequencyPenalty',
|
|
784
|
-
type: 'number',
|
|
785
|
-
typeOptions: {
|
|
786
|
-
minValue: -2,
|
|
787
|
-
maxValue: 2,
|
|
788
|
-
numberPrecision: 2,
|
|
789
|
-
},
|
|
790
|
-
default: 0,
|
|
791
|
-
description: 'Reduces repetition of frequent tokens. Positive values decrease repetition.',
|
|
792
|
-
},
|
|
793
|
-
{
|
|
794
|
-
displayName: 'Presence Penalty',
|
|
795
|
-
name: 'presencePenalty',
|
|
796
|
-
type: 'number',
|
|
797
|
-
typeOptions: {
|
|
798
|
-
minValue: -2,
|
|
799
|
-
maxValue: 2,
|
|
800
|
-
numberPrecision: 2,
|
|
801
|
-
},
|
|
802
|
-
default: 0,
|
|
803
|
-
description: 'Encourages talking about new topics. Positive values increase likelihood of new topics.',
|
|
804
|
-
},
|
|
805
|
-
{
|
|
806
|
-
displayName: 'Seed',
|
|
807
|
-
name: 'seed',
|
|
808
|
-
type: 'number',
|
|
809
|
-
default: 0,
|
|
810
|
-
description: 'Random seed for reproducible outputs. Not supported by all providers.',
|
|
811
|
-
},
|
|
812
|
-
{
|
|
813
|
-
displayName: 'Stop Sequences',
|
|
814
|
-
name: 'stopSequences',
|
|
815
|
-
type: 'string',
|
|
816
|
-
default: '',
|
|
817
|
-
description: 'Sequences where the API will stop generating text. Separate multiple sequences with commas.',
|
|
818
|
-
displayOptions: {
|
|
819
|
-
show: {
|
|
820
|
-
'/provider': ['google', 'openrouter'],
|
|
821
|
-
},
|
|
822
|
-
},
|
|
823
|
-
},
|
|
824
|
-
{
|
|
825
|
-
displayName: 'Include Request Body',
|
|
826
|
-
name: 'includeRequestBody',
|
|
827
|
-
type: 'boolean',
|
|
828
|
-
default: false,
|
|
829
|
-
description: 'Whether to include the request body in the output',
|
|
830
|
-
},
|
|
831
|
-
],
|
|
832
|
-
},
|
|
833
|
-
{
|
|
834
|
-
displayName: 'Safety Settings',
|
|
835
|
-
name: 'safetySettings',
|
|
836
|
-
type: 'fixedCollection',
|
|
837
|
-
typeOptions: {
|
|
838
|
-
multipleValues: true,
|
|
839
|
-
},
|
|
840
|
-
default: {},
|
|
841
|
-
displayOptions: {
|
|
842
|
-
show: {
|
|
843
|
-
provider: ['google'],
|
|
844
|
-
},
|
|
845
|
-
},
|
|
846
|
-
options: [
|
|
847
|
-
{
|
|
848
|
-
name: 'settings',
|
|
849
|
-
displayName: 'Setting',
|
|
850
|
-
values: [
|
|
851
|
-
{
|
|
852
|
-
displayName: 'Category',
|
|
853
|
-
name: 'category',
|
|
854
|
-
type: 'options',
|
|
855
|
-
noDataExpression: true,
|
|
856
|
-
options: [
|
|
857
|
-
{ name: 'Hate Speech', value: 'HARM_CATEGORY_HATE_SPEECH' },
|
|
858
|
-
{ name: 'Dangerous Content', value: 'HARM_CATEGORY_DANGEROUS_CONTENT' },
|
|
859
|
-
{ name: 'Harassment', value: 'HARM_CATEGORY_HARASSMENT' },
|
|
860
|
-
{ name: 'Sexually Explicit', value: 'HARM_CATEGORY_SEXUALLY_EXPLICIT' },
|
|
861
|
-
],
|
|
862
|
-
default: 'HARM_CATEGORY_HATE_SPEECH',
|
|
863
|
-
},
|
|
864
|
-
{
|
|
865
|
-
displayName: 'Threshold',
|
|
866
|
-
name: 'threshold',
|
|
867
|
-
type: 'options',
|
|
868
|
-
noDataExpression: true,
|
|
869
|
-
options: [
|
|
870
|
-
{ name: 'Block Low and Above', value: 'BLOCK_LOW_AND_ABOVE' },
|
|
871
|
-
{ name: 'Block Medium and Above', value: 'BLOCK_MEDIUM_AND_ABOVE' },
|
|
872
|
-
{ name: 'Block Only High', value: 'BLOCK_ONLY_HIGH' },
|
|
873
|
-
{ name: 'Block None', value: 'BLOCK_NONE' },
|
|
874
|
-
],
|
|
875
|
-
default: 'BLOCK_MEDIUM_AND_ABOVE',
|
|
876
|
-
},
|
|
877
|
-
],
|
|
878
|
-
},
|
|
879
|
-
],
|
|
880
|
-
description: 'Set safety categories and thresholds to block or filter certain outputs',
|
|
881
|
-
},
|
|
882
|
-
{
|
|
883
|
-
displayName: 'Use Search Grounding',
|
|
884
|
-
name: 'useSearchGrounding',
|
|
885
|
-
type: 'boolean',
|
|
886
|
-
default: false,
|
|
887
|
-
displayOptions: {
|
|
888
|
-
show: {
|
|
889
|
-
provider: ['google'],
|
|
890
|
-
},
|
|
891
|
-
},
|
|
892
|
-
description: 'Whether to enable real-time or up-to-date information if supported by the model',
|
|
893
|
-
},
|
|
894
|
-
{
|
|
895
|
-
displayName: 'Cached Content',
|
|
896
|
-
name: 'cachedContent',
|
|
897
|
-
type: 'string',
|
|
898
|
-
default: '',
|
|
899
|
-
displayOptions: {
|
|
900
|
-
show: {
|
|
901
|
-
provider: ['google'],
|
|
902
|
-
},
|
|
903
|
-
},
|
|
904
|
-
description: 'Name of cached content to use (format: cachedContents/{cachedContent}). Reduces costs for repetitive content.',
|
|
905
|
-
},
|
|
906
|
-
{
|
|
907
|
-
displayName: 'Response Modalities',
|
|
908
|
-
name: 'responseModalities',
|
|
909
|
-
type: 'multiOptions',
|
|
910
|
-
options: [
|
|
911
|
-
{ name: 'Text', value: 'TEXT' },
|
|
912
|
-
{ name: 'Image', value: 'IMAGE' },
|
|
913
|
-
],
|
|
914
|
-
default: [],
|
|
915
|
-
displayOptions: {
|
|
916
|
-
show: {
|
|
917
|
-
provider: ['google'],
|
|
918
|
-
},
|
|
919
|
-
},
|
|
920
|
-
description: 'Output modalities for the response. Leave empty for text-only (default).',
|
|
921
|
-
},
|
|
922
|
-
{
|
|
923
|
-
displayName: 'Thinking Budget',
|
|
924
|
-
name: 'thinkingBudget',
|
|
925
|
-
type: 'number',
|
|
926
|
-
default: 0,
|
|
927
|
-
typeOptions: {
|
|
928
|
-
minValue: 0,
|
|
929
|
-
maxValue: 8192,
|
|
930
|
-
},
|
|
931
|
-
displayOptions: {
|
|
932
|
-
show: {
|
|
933
|
-
provider: ['google'],
|
|
934
|
-
},
|
|
935
|
-
},
|
|
936
|
-
description: 'Number of thinking tokens for reasoning models (Gemini 2.5+). Set to 0 to disable thinking.',
|
|
937
|
-
},
|
|
938
|
-
{
|
|
939
|
-
displayName: 'Include Thoughts',
|
|
940
|
-
name: 'includeThoughts',
|
|
941
|
-
type: 'boolean',
|
|
942
|
-
default: false,
|
|
943
|
-
displayOptions: {
|
|
944
|
-
show: {
|
|
945
|
-
provider: ['google'],
|
|
946
|
-
},
|
|
947
|
-
},
|
|
948
|
-
description: 'Whether to include thought summaries in the response (reasoning insights)',
|
|
949
|
-
},
|
|
950
|
-
{
|
|
951
|
-
displayName: 'Google Tools',
|
|
952
|
-
name: 'googleTools',
|
|
953
|
-
type: 'multiOptions',
|
|
954
|
-
options: [
|
|
955
|
-
{ name: 'Google Search', value: 'google_search' },
|
|
956
|
-
{ name: 'URL Context', value: 'url_context' },
|
|
957
|
-
{ name: 'Code Execution', value: 'code_execution' },
|
|
958
|
-
],
|
|
959
|
-
default: [],
|
|
960
|
-
displayOptions: {
|
|
961
|
-
show: {
|
|
962
|
-
provider: ['google'],
|
|
963
|
-
},
|
|
964
|
-
},
|
|
965
|
-
description: 'Enable Google-specific tools for enhanced capabilities',
|
|
966
|
-
},
|
|
967
|
-
{
|
|
968
|
-
displayName: 'Enable Streaming',
|
|
969
|
-
name: 'enableStreaming',
|
|
970
|
-
type: 'boolean',
|
|
971
|
-
default: false,
|
|
972
|
-
displayOptions: {
|
|
973
|
-
show: {
|
|
974
|
-
operation: ['generateText'],
|
|
975
|
-
},
|
|
976
|
-
},
|
|
977
|
-
description: 'Whether to stream the response in chunks. Output will contain multiple items.',
|
|
978
|
-
},
|
|
979
|
-
],
|
|
980
|
-
};
|
|
420
|
+
this.description = descriptions_1.UNIVERSAL_AI_DESCRIPTION;
|
|
981
421
|
this.methods = {
|
|
982
422
|
loadOptions: {
|
|
983
423
|
async getModels() {
|
|
984
424
|
const provider = this.getCurrentNodeParameter('provider');
|
|
985
|
-
|
|
986
|
-
|
|
987
|
-
|
|
988
|
-
|
|
989
|
-
|
|
990
|
-
|
|
991
|
-
|
|
992
|
-
|
|
993
|
-
|
|
994
|
-
|
|
995
|
-
|
|
996
|
-
|
|
997
|
-
models = model_lists_1.GROQ_MODELS;
|
|
998
|
-
break;
|
|
999
|
-
case 'openrouter':
|
|
1000
|
-
models = model_lists_1.OPENROUTER_MODELS;
|
|
1001
|
-
break;
|
|
1002
|
-
}
|
|
1003
|
-
modelCache.set(provider, models);
|
|
425
|
+
const cacheKey = `models:${provider}`;
|
|
426
|
+
const cached = modelCache.get(cacheKey);
|
|
427
|
+
if (cached)
|
|
428
|
+
return cached;
|
|
429
|
+
const { OPENROUTER_MODELS, GOOGLE_GEMINI_MODELS, DEEPSEEK_MODELS, GROQ_MODELS } = await Promise.resolve().then(() => __importStar(require('./model-lists')));
|
|
430
|
+
const models = {
|
|
431
|
+
google: GOOGLE_GEMINI_MODELS,
|
|
432
|
+
deepseek: DEEPSEEK_MODELS,
|
|
433
|
+
groq: GROQ_MODELS,
|
|
434
|
+
openrouter: OPENROUTER_MODELS,
|
|
435
|
+
}[provider] || [];
|
|
436
|
+
modelCache.set(cacheKey, models);
|
|
1004
437
|
return models;
|
|
1005
438
|
},
|
|
1006
439
|
},
|
|
@@ -1010,223 +443,33 @@ class UniversalAI {
|
|
|
1010
443
|
const items = this.getInputData();
|
|
1011
444
|
const returnData = [];
|
|
1012
445
|
const provider = this.getNodeParameter('provider', 0);
|
|
1013
|
-
|
|
1014
|
-
|
|
1015
|
-
|
|
1016
|
-
|
|
1017
|
-
|
|
1018
|
-
|
|
1019
|
-
|
|
1020
|
-
|
|
1021
|
-
case 'groq':
|
|
1022
|
-
credentials = await this.getCredentials('groqApi');
|
|
1023
|
-
break;
|
|
1024
|
-
case 'openrouter':
|
|
1025
|
-
credentials = await this.getCredentials('openRouterApi');
|
|
1026
|
-
break;
|
|
446
|
+
const credentialType = {
|
|
447
|
+
google: 'googleGenerativeAIApi',
|
|
448
|
+
deepseek: 'deepSeekApi',
|
|
449
|
+
groq: 'groqApi',
|
|
450
|
+
openrouter: 'openRouterApi',
|
|
451
|
+
}[provider];
|
|
452
|
+
if (!credentialType) {
|
|
453
|
+
throw new n8n_workflow_1.NodeOperationError(this.getNode(), `Unsupported provider: ${provider}`);
|
|
1027
454
|
}
|
|
455
|
+
const credentials = await this.getCredentials(credentialType);
|
|
1028
456
|
if (!(credentials === null || credentials === void 0 ? void 0 : credentials.apiKey)) {
|
|
1029
457
|
throw new n8n_workflow_1.NodeOperationError(this.getNode(), 'No API key provided in credentials');
|
|
1030
458
|
}
|
|
1031
|
-
const
|
|
1032
|
-
const actualBaseURL = credentials.baseUrl;
|
|
1033
|
-
const providerCacheKey = `${provider}:${actualApiKey || ''}:${actualBaseURL || ''}`;
|
|
1034
|
-
let aiProvider;
|
|
1035
|
-
if (providerCache.has(providerCacheKey)) {
|
|
1036
|
-
aiProvider = providerCache.get(providerCacheKey);
|
|
1037
|
-
}
|
|
1038
|
-
else {
|
|
1039
|
-
switch (provider) {
|
|
1040
|
-
case 'google':
|
|
1041
|
-
aiProvider = createGoogleGenerativeAI({
|
|
1042
|
-
apiKey: actualApiKey,
|
|
1043
|
-
...(actualBaseURL && { baseURL: actualBaseURL }),
|
|
1044
|
-
});
|
|
1045
|
-
break;
|
|
1046
|
-
case 'deepseek':
|
|
1047
|
-
aiProvider = createDeepSeek({
|
|
1048
|
-
apiKey: actualApiKey,
|
|
1049
|
-
...(actualBaseURL && { baseURL: actualBaseURL }),
|
|
1050
|
-
});
|
|
1051
|
-
break;
|
|
1052
|
-
case 'groq':
|
|
1053
|
-
aiProvider = createGroq({
|
|
1054
|
-
apiKey: actualApiKey,
|
|
1055
|
-
...(actualBaseURL && { baseURL: actualBaseURL }),
|
|
1056
|
-
});
|
|
1057
|
-
break;
|
|
1058
|
-
case 'openrouter':
|
|
1059
|
-
aiProvider = createOpenRouter({
|
|
1060
|
-
apiKey: actualApiKey,
|
|
1061
|
-
...(actualBaseURL && { baseURL: actualBaseURL }),
|
|
1062
|
-
});
|
|
1063
|
-
break;
|
|
1064
|
-
}
|
|
1065
|
-
providerCache.set(providerCacheKey, aiProvider);
|
|
1066
|
-
}
|
|
459
|
+
const aiProvider = await getProvider(provider, credentials.apiKey, credentials.baseUrl);
|
|
1067
460
|
for (let i = 0; i < items.length; i++) {
|
|
1068
461
|
try {
|
|
1069
|
-
const
|
|
1070
|
-
|
|
1071
|
-
const options = this.getNodeParameter('options', i, {});
|
|
1072
|
-
let modelSettings = {};
|
|
1073
|
-
let stopSequences;
|
|
1074
|
-
if (options.stopSequences && (provider === 'google' || provider === 'openrouter')) {
|
|
1075
|
-
stopSequences = options.stopSequences.split(',').map(s => s.trim()).filter(s => s.length > 0);
|
|
1076
|
-
}
|
|
1077
|
-
if (provider === 'google') {
|
|
1078
|
-
const safetySettingsRaw = this.getNodeParameter('safetySettings.settings', i, []);
|
|
1079
|
-
const useSearchGrounding = this.getNodeParameter('useSearchGrounding', i, false);
|
|
1080
|
-
const cachedContent = this.getNodeParameter('cachedContent', i, '');
|
|
1081
|
-
const responseModalities = this.getNodeParameter('responseModalities', i, []);
|
|
1082
|
-
const thinkingBudget = this.getNodeParameter('thinkingBudget', i, 0);
|
|
1083
|
-
const includeThoughts = this.getNodeParameter('includeThoughts', i, false);
|
|
1084
|
-
const safetySettings = safetySettingsRaw.map((s) => ({
|
|
1085
|
-
category: s.category,
|
|
1086
|
-
threshold: s.threshold,
|
|
1087
|
-
}));
|
|
1088
|
-
modelSettings = {
|
|
1089
|
-
structuredOutputs: operation === 'generateObject',
|
|
1090
|
-
safetySettings: safetySettings.length > 0 ? safetySettings : undefined,
|
|
1091
|
-
useSearchGrounding,
|
|
1092
|
-
...(cachedContent && { cachedContent }),
|
|
1093
|
-
...(responseModalities.length > 0 && { responseModalities }),
|
|
1094
|
-
...(thinkingBudget > 0 && {
|
|
1095
|
-
thinkingConfig: {
|
|
1096
|
-
thinkingBudget,
|
|
1097
|
-
includeThoughts,
|
|
1098
|
-
},
|
|
1099
|
-
}),
|
|
1100
|
-
};
|
|
1101
|
-
}
|
|
1102
|
-
else if (provider === 'openrouter') {
|
|
1103
|
-
modelSettings = {};
|
|
1104
|
-
}
|
|
1105
|
-
const input = await buildInput(this, i);
|
|
1106
|
-
let tools = undefined;
|
|
1107
|
-
if (provider === 'google') {
|
|
1108
|
-
const googleTools = this.getNodeParameter('googleTools', i, []);
|
|
1109
|
-
if (googleTools.length > 0) {
|
|
1110
|
-
tools = {};
|
|
1111
|
-
const google = require('@ai-sdk/google').google;
|
|
1112
|
-
if (googleTools.includes('google_search')) {
|
|
1113
|
-
tools.google_search = google.tools.googleSearch({});
|
|
1114
|
-
}
|
|
1115
|
-
if (googleTools.includes('url_context')) {
|
|
1116
|
-
tools.url_context = google.tools.urlContext({});
|
|
1117
|
-
}
|
|
1118
|
-
if (googleTools.includes('code_execution')) {
|
|
1119
|
-
tools.code_execution = google.tools.codeExecution({});
|
|
1120
|
-
}
|
|
1121
|
-
}
|
|
1122
|
-
}
|
|
1123
|
-
const enableStreaming = this.getNodeParameter('enableStreaming', i, false);
|
|
1124
|
-
if (operation === 'generateText') {
|
|
1125
|
-
const generateTextParams = {
|
|
1126
|
-
model: aiProvider(model, modelSettings),
|
|
1127
|
-
messages: input.messages,
|
|
1128
|
-
maxTokens: options.maxTokens,
|
|
1129
|
-
temperature: options.temperature,
|
|
1130
|
-
topP: options.topP,
|
|
1131
|
-
topK: options.topK,
|
|
1132
|
-
frequencyPenalty: options.frequencyPenalty,
|
|
1133
|
-
presencePenalty: options.presencePenalty,
|
|
1134
|
-
seed: options.seed,
|
|
1135
|
-
prompt: input.prompt,
|
|
1136
|
-
system: input.system,
|
|
1137
|
-
...(tools && { tools }),
|
|
1138
|
-
};
|
|
1139
|
-
if (stopSequences && (provider === 'google' || provider === 'openrouter')) {
|
|
1140
|
-
generateTextParams.stopSequences = stopSequences;
|
|
1141
|
-
}
|
|
1142
|
-
if (enableStreaming) {
|
|
1143
|
-
const stream = await (0, ai_1.streamText)(generateTextParams);
|
|
1144
|
-
let fullText = '';
|
|
1145
|
-
const chunks = [];
|
|
1146
|
-
for await (const textPart of stream.textStream) {
|
|
1147
|
-
fullText += textPart;
|
|
1148
|
-
chunks.push(textPart);
|
|
1149
|
-
}
|
|
1150
|
-
const finalUsage = await stream.usage;
|
|
1151
|
-
for (const chunk of chunks) {
|
|
1152
|
-
returnData.push({
|
|
1153
|
-
json: {
|
|
1154
|
-
chunk,
|
|
1155
|
-
isStreaming: true,
|
|
1156
|
-
}
|
|
1157
|
-
});
|
|
1158
|
-
}
|
|
1159
|
-
returnData.push({
|
|
1160
|
-
json: {
|
|
1161
|
-
text: fullText,
|
|
1162
|
-
toolCalls: stream.toolCalls || [],
|
|
1163
|
-
toolResults: stream.toolResults || [],
|
|
1164
|
-
finishReason: stream.finishReason,
|
|
1165
|
-
usage: {
|
|
1166
|
-
promptTokens: finalUsage.promptTokens,
|
|
1167
|
-
completionTokens: finalUsage.completionTokens,
|
|
1168
|
-
totalTokens: finalUsage.totalTokens,
|
|
1169
|
-
},
|
|
1170
|
-
isStreaming: false,
|
|
1171
|
-
isFinal: true,
|
|
1172
|
-
}
|
|
1173
|
-
});
|
|
1174
|
-
}
|
|
1175
|
-
else {
|
|
1176
|
-
const result = await (0, ai_1.generateText)(generateTextParams);
|
|
1177
|
-
const formatted = formatTextResult(result, options.includeRequestBody, provider);
|
|
1178
|
-
if (tools && result.toolCalls) {
|
|
1179
|
-
formatted.toolCalls = result.toolCalls;
|
|
1180
|
-
formatted.toolResults = result.toolResults;
|
|
1181
|
-
}
|
|
1182
|
-
returnData.push({ json: formatted });
|
|
1183
|
-
}
|
|
1184
|
-
}
|
|
1185
|
-
else {
|
|
1186
|
-
const schemaName = this.getNodeParameter('schemaName', i, '');
|
|
1187
|
-
const schemaDescription = this.getNodeParameter('schemaDescription', i, '');
|
|
1188
|
-
const rawSchema = this.getNodeParameter('schema', i);
|
|
1189
|
-
let parsedSchema;
|
|
1190
|
-
try {
|
|
1191
|
-
parsedSchema = JSON.parse(rawSchema);
|
|
1192
|
-
}
|
|
1193
|
-
catch (err) {
|
|
1194
|
-
throw new n8n_workflow_1.NodeOperationError(this.getNode(), 'Schema is not valid JSON: ' + err.message);
|
|
1195
|
-
}
|
|
1196
|
-
const ajv = new ajv_1.default();
|
|
1197
|
-
if (!ajv.validateSchema(parsedSchema)) {
|
|
1198
|
-
throw new n8n_workflow_1.NodeOperationError(this.getNode(), `Invalid JSON Schema: ${ajv.errorsText(ajv.errors)}`);
|
|
1199
|
-
}
|
|
1200
|
-
const generateObjectParams = {
|
|
1201
|
-
model: aiProvider(model, modelSettings),
|
|
1202
|
-
schema: (0, ai_1.jsonSchema)(parsedSchema),
|
|
1203
|
-
schemaName,
|
|
1204
|
-
schemaDescription,
|
|
1205
|
-
temperature: options.temperature,
|
|
1206
|
-
topP: options.topP,
|
|
1207
|
-
topK: options.topK,
|
|
1208
|
-
frequencyPenalty: options.frequencyPenalty,
|
|
1209
|
-
presencePenalty: options.presencePenalty,
|
|
1210
|
-
seed: options.seed,
|
|
1211
|
-
...(stopSequences && (provider === 'google' || provider === 'openrouter') && { stopSequences }),
|
|
1212
|
-
};
|
|
1213
|
-
if (input.messages && input.messages.length > 0) {
|
|
1214
|
-
generateObjectParams.messages = input.messages;
|
|
1215
|
-
}
|
|
1216
|
-
else if (input.prompt) {
|
|
1217
|
-
generateObjectParams.prompt = input.prompt;
|
|
1218
|
-
}
|
|
1219
|
-
if (input.system) {
|
|
1220
|
-
generateObjectParams.system = input.system;
|
|
1221
|
-
}
|
|
1222
|
-
const result = await (0, ai_1.generateObject)(generateObjectParams);
|
|
1223
|
-
const formatted = formatObjectResult(result, options.includeRequestBody);
|
|
1224
|
-
returnData.push({ json: formatted });
|
|
1225
|
-
}
|
|
462
|
+
const result = await processItem(this, i, provider, aiProvider);
|
|
463
|
+
returnData.push(...result);
|
|
1226
464
|
}
|
|
1227
465
|
catch (error) {
|
|
1228
466
|
if (this.continueOnFail()) {
|
|
1229
|
-
returnData.push({
|
|
467
|
+
returnData.push({
|
|
468
|
+
json: {
|
|
469
|
+
error: error.message,
|
|
470
|
+
itemIndex: i
|
|
471
|
+
}
|
|
472
|
+
});
|
|
1230
473
|
}
|
|
1231
474
|
else {
|
|
1232
475
|
throw new n8n_workflow_1.NodeOperationError(this.getNode(), error, { itemIndex: i });
|
|
@@ -1237,4 +480,153 @@ class UniversalAI {
|
|
|
1237
480
|
}
|
|
1238
481
|
}
|
|
1239
482
|
exports.UniversalAI = UniversalAI;
|
|
483
|
+
async function processItem(exec, index, provider, aiProvider) {
|
|
484
|
+
const operation = exec.getNodeParameter('operation', index);
|
|
485
|
+
const model = exec.getNodeParameter('model', index);
|
|
486
|
+
const options = exec.getNodeParameter('options', index, {});
|
|
487
|
+
const input = await buildInput(exec, index);
|
|
488
|
+
const modelSettings = getModelSettings(exec, index, provider, operation, options);
|
|
489
|
+
return operation === 'generateText'
|
|
490
|
+
? await generateTextOperation(exec, index, provider, aiProvider, model, modelSettings, input, options)
|
|
491
|
+
: await generateObjectOperation(exec, index, provider, aiProvider, model, modelSettings, input, options);
|
|
492
|
+
}
|
|
493
|
+
function getModelSettings(exec, index, provider, operation, options) {
|
|
494
|
+
const settings = {};
|
|
495
|
+
const stopSequences = parseStopSequences(options.stopSequences);
|
|
496
|
+
if (stopSequences && (provider === 'google' || provider === 'openrouter')) {
|
|
497
|
+
settings.stopSequences = stopSequences;
|
|
498
|
+
}
|
|
499
|
+
if (provider === 'google') {
|
|
500
|
+
const safetySettingsRaw = exec.getNodeParameter('safetySettings.settings', index, []);
|
|
501
|
+
if (safetySettingsRaw.length > 0) {
|
|
502
|
+
settings.safetySettings = safetySettingsRaw.map((s) => ({
|
|
503
|
+
category: s.category,
|
|
504
|
+
threshold: s.threshold,
|
|
505
|
+
}));
|
|
506
|
+
}
|
|
507
|
+
settings.structuredOutputs = operation === 'generateObject';
|
|
508
|
+
settings.useSearchGrounding = exec.getNodeParameter('useSearchGrounding', index, false);
|
|
509
|
+
const cachedContent = exec.getNodeParameter('cachedContent', index, '');
|
|
510
|
+
if (cachedContent) {
|
|
511
|
+
settings.cachedContent = cachedContent;
|
|
512
|
+
}
|
|
513
|
+
const thinkingBudgetValue = Number(exec.getNodeParameter('thinkingBudget', index, 0));
|
|
514
|
+
if (!Number.isNaN(thinkingBudgetValue) && thinkingBudgetValue > 0) {
|
|
515
|
+
settings.thinkingConfig = {
|
|
516
|
+
thinkingBudget: thinkingBudgetValue,
|
|
517
|
+
includeThoughts: exec.getNodeParameter('includeThoughts', index, false),
|
|
518
|
+
};
|
|
519
|
+
}
|
|
520
|
+
const responseModalities = exec.getNodeParameter('responseModalities', index, []);
|
|
521
|
+
if (responseModalities.length > 0) {
|
|
522
|
+
settings.responseModalities = responseModalities;
|
|
523
|
+
}
|
|
524
|
+
}
|
|
525
|
+
return settings;
|
|
526
|
+
}
|
|
527
|
+
async function generateTextOperation(exec, index, provider, aiProvider, model, modelSettings, input, options) {
|
|
528
|
+
const enableStreaming = exec.getNodeParameter('enableStreaming', index, false);
|
|
529
|
+
const includeRequestBody = options.includeRequestBody;
|
|
530
|
+
const tools = provider === 'google' ? await buildGoogleTools(exec, index) : undefined;
|
|
531
|
+
const params = {
|
|
532
|
+
model: aiProvider(model, modelSettings),
|
|
533
|
+
...input,
|
|
534
|
+
...(tools && { tools }),
|
|
535
|
+
};
|
|
536
|
+
const numericOptionKeys = [
|
|
537
|
+
'maxTokens',
|
|
538
|
+
'temperature',
|
|
539
|
+
'topP',
|
|
540
|
+
'topK',
|
|
541
|
+
'frequencyPenalty',
|
|
542
|
+
'presencePenalty',
|
|
543
|
+
'seed',
|
|
544
|
+
];
|
|
545
|
+
for (const key of numericOptionKeys) {
|
|
546
|
+
const value = options[key];
|
|
547
|
+
if (value !== undefined && value !== null && value !== '') {
|
|
548
|
+
params[key] = value;
|
|
549
|
+
}
|
|
550
|
+
}
|
|
551
|
+
if (enableStreaming) {
|
|
552
|
+
return await handleStreaming(params, provider, includeRequestBody);
|
|
553
|
+
}
|
|
554
|
+
const result = await (0, ai_1.generateText)(params);
|
|
555
|
+
return [{ json: formatTextResult(result, includeRequestBody, provider) }];
|
|
556
|
+
}
|
|
557
|
+
async function buildGoogleTools(exec, index) {
|
|
558
|
+
const googleTools = exec.getNodeParameter('googleTools', index, []);
|
|
559
|
+
if (!googleTools || googleTools.length === 0) {
|
|
560
|
+
return undefined;
|
|
561
|
+
}
|
|
562
|
+
const tools = {};
|
|
563
|
+
const google = require('@ai-sdk/google').google;
|
|
564
|
+
const toolSet = new Set(googleTools);
|
|
565
|
+
if (toolSet.has('google_search')) {
|
|
566
|
+
tools.google_search = google.tools.googleSearch({});
|
|
567
|
+
}
|
|
568
|
+
if (toolSet.has('url_context')) {
|
|
569
|
+
tools.url_context = google.tools.urlContext({});
|
|
570
|
+
}
|
|
571
|
+
if (toolSet.has('code_execution')) {
|
|
572
|
+
tools.code_execution = google.tools.codeExecution({});
|
|
573
|
+
}
|
|
574
|
+
return tools;
|
|
575
|
+
}
|
|
576
|
+
async function handleStreaming(params, provider, includeRequestBody) {
|
|
577
|
+
const stream = await (0, ai_1.streamText)(params);
|
|
578
|
+
const chunks = [];
|
|
579
|
+
let fullText = '';
|
|
580
|
+
for await (const textPart of stream.textStream) {
|
|
581
|
+
fullText += textPart;
|
|
582
|
+
chunks.push({ json: { chunk: textPart, isStreaming: true } });
|
|
583
|
+
}
|
|
584
|
+
const finalUsage = await stream.usage;
|
|
585
|
+
const finalJson = {
|
|
586
|
+
text: fullText,
|
|
587
|
+
toolCalls: stream.toolCalls || [],
|
|
588
|
+
toolResults: stream.toolResults || [],
|
|
589
|
+
finishReason: stream.finishReason,
|
|
590
|
+
usage: formatUsage({ usage: finalUsage }, provider),
|
|
591
|
+
isStreaming: false,
|
|
592
|
+
isFinal: true,
|
|
593
|
+
};
|
|
594
|
+
if (includeRequestBody) {
|
|
595
|
+
const requestMetadata = stream.request ? await stream.request : undefined;
|
|
596
|
+
if ((requestMetadata === null || requestMetadata === void 0 ? void 0 : requestMetadata.body) !== undefined) {
|
|
597
|
+
finalJson.request = { body: requestMetadata.body };
|
|
598
|
+
}
|
|
599
|
+
}
|
|
600
|
+
chunks.push({ json: finalJson });
|
|
601
|
+
return chunks;
|
|
602
|
+
}
|
|
603
|
+
async function generateObjectOperation(exec, index, provider, aiProvider, model, modelSettings, input, options) {
|
|
604
|
+
const schemaName = exec.getNodeParameter('schemaName', index, '');
|
|
605
|
+
const schemaDescription = exec.getNodeParameter('schemaDescription', index, '');
|
|
606
|
+
const rawSchema = exec.getNodeParameter('schema', index);
|
|
607
|
+
const parsedSchema = parseAndValidateSchema(rawSchema, exec);
|
|
608
|
+
const params = {
|
|
609
|
+
model: aiProvider(model, modelSettings),
|
|
610
|
+
schema: (0, ai_1.jsonSchema)(parsedSchema),
|
|
611
|
+
schemaName,
|
|
612
|
+
schemaDescription,
|
|
613
|
+
...input,
|
|
614
|
+
};
|
|
615
|
+
const numericOptionKeys = [
|
|
616
|
+
'temperature',
|
|
617
|
+
'topP',
|
|
618
|
+
'topK',
|
|
619
|
+
'frequencyPenalty',
|
|
620
|
+
'presencePenalty',
|
|
621
|
+
'seed',
|
|
622
|
+
];
|
|
623
|
+
for (const key of numericOptionKeys) {
|
|
624
|
+
const value = options[key];
|
|
625
|
+
if (value !== undefined && value !== null && value !== '') {
|
|
626
|
+
params[key] = value;
|
|
627
|
+
}
|
|
628
|
+
}
|
|
629
|
+
const result = await (0, ai_1.generateObject)(params);
|
|
630
|
+
return [{ json: formatObjectResult(result, options.includeRequestBody, provider) }];
|
|
631
|
+
}
|
|
1240
632
|
//# sourceMappingURL=UniversalAI.node.js.map
|