n8n-nodes-vercel-ai-sdk-universal-temp 0.1.21 → 0.1.22
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -1,151 +1,268 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
2
35
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
36
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
37
|
};
|
|
5
38
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
39
|
exports.UniversalAI = void 0;
|
|
7
40
|
const n8n_workflow_1 = require("n8n-workflow");
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
41
|
+
let googleProvider;
|
|
42
|
+
let deepseekProvider;
|
|
43
|
+
let groqProvider;
|
|
44
|
+
let openrouterProvider;
|
|
12
45
|
const ai_1 = require("ai");
|
|
13
46
|
const zod_1 = require("zod");
|
|
14
47
|
const ajv_1 = __importDefault(require("ajv"));
|
|
15
|
-
const model_lists_1 = require("./model-lists");
|
|
16
48
|
const descriptions_1 = require("../shared/descriptions");
|
|
17
|
-
|
|
18
|
-
|
|
49
|
+
class Cache {
|
|
50
|
+
constructor(maxSize = 100, ttl = 5 * 60 * 1000) {
|
|
51
|
+
this.cache = new Map();
|
|
52
|
+
this.totalHits = 0;
|
|
53
|
+
this.totalMisses = 0;
|
|
54
|
+
this.maxSize = maxSize;
|
|
55
|
+
this.ttl = ttl;
|
|
56
|
+
}
|
|
57
|
+
get(key) {
|
|
58
|
+
const item = this.cache.get(key);
|
|
59
|
+
if (!item) {
|
|
60
|
+
this.totalMisses++;
|
|
61
|
+
return undefined;
|
|
62
|
+
}
|
|
63
|
+
if (Date.now() - item.timestamp > this.ttl) {
|
|
64
|
+
this.cache.delete(key);
|
|
65
|
+
this.totalMisses++;
|
|
66
|
+
return undefined;
|
|
67
|
+
}
|
|
68
|
+
item.hits++;
|
|
69
|
+
this.totalHits++;
|
|
70
|
+
return item.value;
|
|
71
|
+
}
|
|
72
|
+
set(key, value) {
|
|
73
|
+
if (this.cache.size >= this.maxSize) {
|
|
74
|
+
let minHits = Infinity;
|
|
75
|
+
let keyToDelete;
|
|
76
|
+
for (const [k, v] of this.cache.entries()) {
|
|
77
|
+
if (v.hits < minHits) {
|
|
78
|
+
minHits = v.hits;
|
|
79
|
+
keyToDelete = k;
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
if (keyToDelete) {
|
|
83
|
+
this.cache.delete(keyToDelete);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
this.cache.set(key, { value, timestamp: Date.now(), hits: 0 });
|
|
87
|
+
}
|
|
88
|
+
getStats() {
|
|
89
|
+
return {
|
|
90
|
+
size: this.cache.size,
|
|
91
|
+
hits: this.totalHits,
|
|
92
|
+
misses: this.totalMisses,
|
|
93
|
+
hitRate: this.totalHits / (this.totalHits + this.totalMisses) || 0
|
|
94
|
+
};
|
|
95
|
+
}
|
|
96
|
+
clear() {
|
|
97
|
+
this.cache.clear();
|
|
98
|
+
this.totalHits = 0;
|
|
99
|
+
this.totalMisses = 0;
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
const modelCache = new Cache(50);
|
|
103
|
+
const providerCache = new Cache(20);
|
|
104
|
+
const schemaCache = new Cache(30);
|
|
105
|
+
const messageSchema = zod_1.z.object({
|
|
106
|
+
role: zod_1.z.enum(['system', 'user', 'assistant']),
|
|
107
|
+
content: zod_1.z.any(),
|
|
108
|
+
});
|
|
109
|
+
const messagesArraySchema = zod_1.z.array(messageSchema);
|
|
110
|
+
const ajv = new ajv_1.default({
|
|
111
|
+
allErrors: true,
|
|
112
|
+
verbose: true,
|
|
113
|
+
strict: false,
|
|
114
|
+
});
|
|
115
|
+
const isUrl = (str) => {
|
|
116
|
+
return str.startsWith('http://') ||
|
|
117
|
+
str.startsWith('https://') ||
|
|
118
|
+
str.startsWith('data:');
|
|
119
|
+
};
|
|
120
|
+
const isLikelyBase64 = (str) => {
|
|
121
|
+
if (str.length % 4 !== 0)
|
|
122
|
+
return false;
|
|
123
|
+
if (!/^[A-Za-z0-9+/]*={0,2}$/.test(str))
|
|
124
|
+
return false;
|
|
125
|
+
if (str.length > 10000)
|
|
126
|
+
return true;
|
|
127
|
+
return true;
|
|
128
|
+
};
|
|
19
129
|
async function buildInput(exec, itemIndex) {
|
|
20
|
-
var _a;
|
|
21
130
|
const inputType = exec.getNodeParameter('inputType', itemIndex);
|
|
22
131
|
if (inputType === 'prompt') {
|
|
23
|
-
const promptVal = exec.getNodeParameter('prompt', itemIndex);
|
|
24
|
-
const systemVal = exec.getNodeParameter('system', itemIndex);
|
|
25
132
|
return {
|
|
26
|
-
prompt:
|
|
27
|
-
system:
|
|
133
|
+
prompt: exec.getNodeParameter('prompt', itemIndex),
|
|
134
|
+
system: exec.getNodeParameter('system', itemIndex),
|
|
28
135
|
};
|
|
29
136
|
}
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
137
|
+
const messageAsJson = exec.getNodeParameter('messageAsJson', itemIndex, false);
|
|
138
|
+
return messageAsJson
|
|
139
|
+
? buildMessagesFromJson(exec, itemIndex)
|
|
140
|
+
: buildMessagesFromUI(exec, itemIndex);
|
|
141
|
+
}
|
|
142
|
+
async function buildMessagesFromJson(exec, itemIndex) {
|
|
143
|
+
const rawJson = exec.getNodeParameter('messagesJson', itemIndex);
|
|
144
|
+
try {
|
|
145
|
+
const parsed = JSON.parse(rawJson);
|
|
146
|
+
const result = messagesArraySchema.safeParse(parsed);
|
|
147
|
+
if (!result.success) {
|
|
148
|
+
throw new n8n_workflow_1.NodeOperationError(exec.getNode(), 'Messages must be an array of objects with role and content.');
|
|
149
|
+
}
|
|
150
|
+
return { messages: result.data };
|
|
151
|
+
}
|
|
152
|
+
catch (error) {
|
|
153
|
+
throw new n8n_workflow_1.NodeOperationError(exec.getNode(), `Invalid JSON in "Messages (JSON)" field: ${error.message}`);
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
async function buildMessagesFromUI(exec, itemIndex) {
|
|
157
|
+
var _a;
|
|
158
|
+
const items = exec.getInputData();
|
|
159
|
+
const messagesUi = exec.getNodeParameter('messages.messagesUi', itemIndex, []);
|
|
160
|
+
const builtMessages = [];
|
|
161
|
+
const itemBinary = items[itemIndex].binary;
|
|
162
|
+
for (const msg of messagesUi) {
|
|
163
|
+
const role = msg.role;
|
|
164
|
+
if (role === 'system') {
|
|
165
|
+
builtMessages.push({ role, content: msg.systemContent || '' });
|
|
166
|
+
continue;
|
|
167
|
+
}
|
|
168
|
+
const attachments = ((_a = msg.attachments) === null || _a === void 0 ? void 0 : _a.attachment) || [];
|
|
169
|
+
if (attachments.length === 0) {
|
|
170
|
+
builtMessages.push({ role, content: msg.content || '' });
|
|
55
171
|
}
|
|
56
172
|
else {
|
|
57
|
-
const
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
for (const msg of messagesUi) {
|
|
61
|
-
const role = msg.role;
|
|
62
|
-
if (role === 'system') {
|
|
63
|
-
builtMessages.push({
|
|
64
|
-
role,
|
|
65
|
-
content: msg.systemContent || '',
|
|
66
|
-
});
|
|
67
|
-
continue;
|
|
68
|
-
}
|
|
69
|
-
const attachments = ((_a = msg.attachments) === null || _a === void 0 ? void 0 : _a.attachment) || [];
|
|
70
|
-
if (attachments.length === 0) {
|
|
71
|
-
builtMessages.push({
|
|
72
|
-
role,
|
|
73
|
-
content: msg.content || '',
|
|
74
|
-
});
|
|
75
|
-
}
|
|
76
|
-
else {
|
|
77
|
-
const parts = [];
|
|
78
|
-
if (msg.content) {
|
|
79
|
-
parts.push({
|
|
80
|
-
type: 'text',
|
|
81
|
-
text: msg.content,
|
|
82
|
-
});
|
|
83
|
-
}
|
|
84
|
-
for (const attachment of attachments) {
|
|
85
|
-
const fileContentInput = attachment.fileContent;
|
|
86
|
-
let detectedMimeType = attachment.mimeType || 'application/octet-stream';
|
|
87
|
-
if (detectedMimeType === 'other') {
|
|
88
|
-
detectedMimeType = attachment.mimeTypeOther || 'application/octet-stream';
|
|
89
|
-
}
|
|
90
|
-
let fileData;
|
|
91
|
-
if (fileContentInput.startsWith('http://') || fileContentInput.startsWith('https://') || fileContentInput.startsWith('data:')) {
|
|
92
|
-
fileData = fileContentInput;
|
|
93
|
-
}
|
|
94
|
-
else {
|
|
95
|
-
const itemBinary = items[itemIndex].binary;
|
|
96
|
-
if (itemBinary === null || itemBinary === void 0 ? void 0 : itemBinary[fileContentInput]) {
|
|
97
|
-
const binaryData = itemBinary[fileContentInput];
|
|
98
|
-
fileData = Buffer.from(binaryData.data, 'base64');
|
|
99
|
-
if (!attachment.mimeType && binaryData.mimeType) {
|
|
100
|
-
detectedMimeType = binaryData.mimeType;
|
|
101
|
-
}
|
|
102
|
-
}
|
|
103
|
-
else {
|
|
104
|
-
try {
|
|
105
|
-
fileData = Buffer.from(fileContentInput, 'base64');
|
|
106
|
-
}
|
|
107
|
-
catch (error) {
|
|
108
|
-
throw new n8n_workflow_1.NodeOperationError(exec.getNode(), `Invalid base64 data in attachment for message ${itemIndex + 1}: ${error.message}`);
|
|
109
|
-
}
|
|
110
|
-
}
|
|
111
|
-
}
|
|
112
|
-
if (fileData && !(Buffer.isBuffer(fileData) && fileData.length === 0)) {
|
|
113
|
-
parts.push({
|
|
114
|
-
type: 'file',
|
|
115
|
-
data: fileData,
|
|
116
|
-
mediaType: detectedMimeType,
|
|
117
|
-
});
|
|
118
|
-
}
|
|
119
|
-
}
|
|
120
|
-
if (parts.length > 0) {
|
|
121
|
-
builtMessages.push({
|
|
122
|
-
role,
|
|
123
|
-
content: parts,
|
|
124
|
-
});
|
|
125
|
-
}
|
|
126
|
-
}
|
|
173
|
+
const messageWithAttachments = await buildMessageWithAttachments(role, msg.content, attachments, itemBinary, exec, itemIndex);
|
|
174
|
+
if (messageWithAttachments) {
|
|
175
|
+
builtMessages.push(messageWithAttachments);
|
|
127
176
|
}
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
const convertMessagesToModel = exec.getNodeParameter('convertMessagesToModel', itemIndex, false);
|
|
180
|
+
if (convertMessagesToModel) {
|
|
181
|
+
return { messages: (0, ai_1.convertToModelMessages)(builtMessages) };
|
|
182
|
+
}
|
|
183
|
+
return { messages: builtMessages };
|
|
184
|
+
}
|
|
185
|
+
async function buildMessageWithAttachments(role, content, attachments, itemBinary, exec, itemIndex) {
|
|
186
|
+
const parts = [];
|
|
187
|
+
if (content) {
|
|
188
|
+
parts.push({ type: 'text', text: content });
|
|
189
|
+
}
|
|
190
|
+
const MAX_CONCURRENT_ATTACHMENTS = 3;
|
|
191
|
+
const processedAttachments = [];
|
|
192
|
+
for (let i = 0; i < attachments.length; i += MAX_CONCURRENT_ATTACHMENTS) {
|
|
193
|
+
const batch = attachments.slice(i, i + MAX_CONCURRENT_ATTACHMENTS);
|
|
194
|
+
const batchPromises = batch.map(attachment => processAttachment(attachment, itemBinary, exec, itemIndex));
|
|
195
|
+
const batchResults = await Promise.all(batchPromises);
|
|
196
|
+
processedAttachments.push(...batchResults);
|
|
197
|
+
}
|
|
198
|
+
for (const attachment of processedAttachments) {
|
|
199
|
+
if (attachment) {
|
|
200
|
+
parts.push(attachment);
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
return parts.length > 0 ? { role, content: parts } : null;
|
|
204
|
+
}
|
|
205
|
+
async function processAttachment(attachment, itemBinary, exec, itemIndex) {
|
|
206
|
+
const fileContentInput = attachment.fileContent;
|
|
207
|
+
if (!fileContentInput)
|
|
208
|
+
return null;
|
|
209
|
+
let mimeType = getMimeType(attachment);
|
|
210
|
+
let fileData;
|
|
211
|
+
if (isUrl(fileContentInput)) {
|
|
212
|
+
fileData = fileContentInput;
|
|
213
|
+
}
|
|
214
|
+
else {
|
|
215
|
+
fileData = await getBinaryData(fileContentInput, itemBinary, exec, itemIndex);
|
|
216
|
+
if (!fileData)
|
|
217
|
+
return null;
|
|
218
|
+
const binaryItem = itemBinary === null || itemBinary === void 0 ? void 0 : itemBinary[fileContentInput];
|
|
219
|
+
if (!mimeType && (binaryItem === null || binaryItem === void 0 ? void 0 : binaryItem.mimeType)) {
|
|
220
|
+
mimeType = binaryItem.mimeType;
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
if (!fileData || (Buffer.isBuffer(fileData) && fileData.length === 0)) {
|
|
224
|
+
return null;
|
|
225
|
+
}
|
|
226
|
+
return {
|
|
227
|
+
type: 'file',
|
|
228
|
+
data: fileData,
|
|
229
|
+
mediaType: mimeType || 'application/octet-stream',
|
|
230
|
+
};
|
|
231
|
+
}
|
|
232
|
+
function getMimeType(attachment) {
|
|
233
|
+
return attachment.mimeType === 'other'
|
|
234
|
+
? attachment.mimeTypeOther
|
|
235
|
+
: attachment.mimeType;
|
|
236
|
+
}
|
|
237
|
+
async function getBinaryData(fileContentInput, itemBinary, exec, itemIndex) {
|
|
238
|
+
if (itemBinary === null || itemBinary === void 0 ? void 0 : itemBinary[fileContentInput]) {
|
|
239
|
+
const binaryData = itemBinary[fileContentInput];
|
|
240
|
+
return Buffer.from(binaryData.data, 'base64');
|
|
241
|
+
}
|
|
242
|
+
try {
|
|
243
|
+
if (isLikelyBase64(fileContentInput)) {
|
|
244
|
+
const buffer = Buffer.from(fileContentInput, 'base64');
|
|
245
|
+
if (buffer.length > 0 && buffer.length < 50 * 1024 * 1024) {
|
|
246
|
+
return buffer;
|
|
132
247
|
}
|
|
133
|
-
return { messages: finalMessages };
|
|
134
248
|
}
|
|
135
249
|
}
|
|
250
|
+
catch (error) {
|
|
251
|
+
throw new n8n_workflow_1.NodeOperationError(exec.getNode(), `Invalid file content for attachment: ${error.message}`);
|
|
252
|
+
}
|
|
253
|
+
return null;
|
|
136
254
|
}
|
|
137
255
|
function formatTextResult(result, includeRequestBody, provider) {
|
|
138
|
-
var _a, _b, _c
|
|
139
|
-
let text = result
|
|
140
|
-
let reasoning = result.reasoning;
|
|
256
|
+
var _a, _b, _c;
|
|
257
|
+
let { text, reasoning } = result;
|
|
141
258
|
if (provider === 'groq' && text.includes('<think>')) {
|
|
142
259
|
const thinkMatch = text.match(/<think>(.*?)<\/think>/s);
|
|
143
260
|
if (thinkMatch) {
|
|
144
|
-
reasoning = [{ text: thinkMatch[1].trim() }];
|
|
261
|
+
reasoning = [{ type: 'reasoning', text: thinkMatch[1].trim() }];
|
|
145
262
|
text = text.replace(/<think>.*?<\/think>\s*/s, '').trim();
|
|
146
263
|
}
|
|
147
264
|
}
|
|
148
|
-
const
|
|
265
|
+
const baseResult = {
|
|
149
266
|
text,
|
|
150
267
|
reasoning,
|
|
151
268
|
reasoningText: result.reasoningText,
|
|
@@ -154,72 +271,150 @@ function formatTextResult(result, includeRequestBody, provider) {
|
|
|
154
271
|
finishReason: result.finishReason,
|
|
155
272
|
sources: result.sources || [],
|
|
156
273
|
files: result.files || [],
|
|
157
|
-
usage:
|
|
158
|
-
|
|
159
|
-
completionTokens: (_b = result.usage) === null || _b === void 0 ? void 0 : _b.completionTokens,
|
|
160
|
-
totalTokens: (_c = result.usage) === null || _c === void 0 ? void 0 : _c.totalTokens,
|
|
161
|
-
...(provider === 'deepseek' && {
|
|
162
|
-
cacheMetrics: {
|
|
163
|
-
promptCacheHitTokens: (_e = (_d = result.experimental_providerMetadata) === null || _d === void 0 ? void 0 : _d.deepseek) === null || _e === void 0 ? void 0 : _e.promptCacheHitTokens,
|
|
164
|
-
promptCacheMissTokens: (_g = (_f = result.experimental_providerMetadata) === null || _f === void 0 ? void 0 : _f.deepseek) === null || _g === void 0 ? void 0 : _g.promptCacheMissTokens,
|
|
165
|
-
},
|
|
166
|
-
}),
|
|
167
|
-
...(provider === 'groq' && {
|
|
168
|
-
cacheMetrics: {
|
|
169
|
-
promptCacheHitTokens: (_j = (_h = result.experimental_providerMetadata) === null || _h === void 0 ? void 0 : _h.groq) === null || _j === void 0 ? void 0 : _j.promptCacheHitTokens,
|
|
170
|
-
promptCacheMissTokens: (_l = (_k = result.experimental_providerMetadata) === null || _k === void 0 ? void 0 : _k.groq) === null || _l === void 0 ? void 0 : _l.promptCacheMissTokens,
|
|
171
|
-
},
|
|
172
|
-
}),
|
|
173
|
-
...(provider === 'google' && {
|
|
174
|
-
cacheMetrics: {
|
|
175
|
-
cachedContentTokenCount: (_p = (_o = (_m = result.experimental_providerMetadata) === null || _m === void 0 ? void 0 : _m.google) === null || _o === void 0 ? void 0 : _o.usageMetadata) === null || _p === void 0 ? void 0 : _p.cachedContentTokenCount,
|
|
176
|
-
thoughtsTokenCount: (_s = (_r = (_q = result.experimental_providerMetadata) === null || _q === void 0 ? void 0 : _q.google) === null || _r === void 0 ? void 0 : _r.usageMetadata) === null || _s === void 0 ? void 0 : _s.thoughtsTokenCount,
|
|
177
|
-
},
|
|
178
|
-
}),
|
|
179
|
-
},
|
|
180
|
-
response: {
|
|
181
|
-
id: (_t = result.response) === null || _t === void 0 ? void 0 : _t.id,
|
|
182
|
-
modelId: (_u = result.response) === null || _u === void 0 ? void 0 : _u.modelId,
|
|
183
|
-
timestamp: (_v = result.response) === null || _v === void 0 ? void 0 : _v.timestamp,
|
|
184
|
-
headers: (_w = result.response) === null || _w === void 0 ? void 0 : _w.headers,
|
|
185
|
-
},
|
|
274
|
+
usage: formatUsage(result, provider),
|
|
275
|
+
response: formatResponse(result),
|
|
186
276
|
steps: result.steps || [],
|
|
187
277
|
warnings: result.warnings || [],
|
|
188
278
|
experimental_providerMetadata: result.experimental_providerMetadata,
|
|
189
|
-
...(provider === 'google' && {
|
|
190
|
-
groundingMetadata: (_y = (_x = result.experimental_providerMetadata) === null || _x === void 0 ? void 0 : _x.google) === null || _y === void 0 ? void 0 : _y.groundingMetadata,
|
|
191
|
-
safetyRatings: (_0 = (_z = result.experimental_providerMetadata) === null || _z === void 0 ? void 0 : _z.google) === null || _0 === void 0 ? void 0 : _0.safetyRatings,
|
|
192
|
-
}),
|
|
193
279
|
};
|
|
280
|
+
if (provider === 'google') {
|
|
281
|
+
const providerMetadata = result.experimental_providerMetadata;
|
|
282
|
+
baseResult.groundingMetadata = (_a = providerMetadata === null || providerMetadata === void 0 ? void 0 : providerMetadata.google) === null || _a === void 0 ? void 0 : _a.groundingMetadata;
|
|
283
|
+
baseResult.safetyRatings = (_b = providerMetadata === null || providerMetadata === void 0 ? void 0 : providerMetadata.google) === null || _b === void 0 ? void 0 : _b.safetyRatings;
|
|
284
|
+
}
|
|
194
285
|
if (includeRequestBody) {
|
|
195
|
-
|
|
286
|
+
const requestBody = (_c = result.request) === null || _c === void 0 ? void 0 : _c.body;
|
|
287
|
+
if (requestBody !== undefined) {
|
|
288
|
+
baseResult.request = { body: requestBody };
|
|
289
|
+
}
|
|
196
290
|
}
|
|
197
|
-
return
|
|
291
|
+
return baseResult;
|
|
198
292
|
}
|
|
199
|
-
function formatObjectResult(result, includeRequestBody) {
|
|
200
|
-
var _a
|
|
293
|
+
function formatObjectResult(result, includeRequestBody, provider) {
|
|
294
|
+
var _a;
|
|
201
295
|
const out = {
|
|
202
296
|
object: result.object,
|
|
203
297
|
finishReason: result.finishReason,
|
|
204
|
-
usage:
|
|
205
|
-
|
|
206
|
-
completionTokens: (_b = result.usage) === null || _b === void 0 ? void 0 : _b.completionTokens,
|
|
207
|
-
totalTokens: (_c = result.usage) === null || _c === void 0 ? void 0 : _c.totalTokens,
|
|
208
|
-
},
|
|
209
|
-
response: {
|
|
210
|
-
id: (_d = result.response) === null || _d === void 0 ? void 0 : _d.id,
|
|
211
|
-
modelId: (_e = result.response) === null || _e === void 0 ? void 0 : _e.modelId,
|
|
212
|
-
timestamp: (_f = result.response) === null || _f === void 0 ? void 0 : _f.timestamp,
|
|
213
|
-
headers: (_g = result.response) === null || _g === void 0 ? void 0 : _g.headers,
|
|
214
|
-
},
|
|
298
|
+
usage: formatUsage(result, provider),
|
|
299
|
+
response: formatResponse(result),
|
|
215
300
|
warnings: result.warnings || [],
|
|
216
301
|
experimental_providerMetadata: result.experimental_providerMetadata,
|
|
217
302
|
};
|
|
218
303
|
if (includeRequestBody) {
|
|
219
|
-
out.request = { body: (
|
|
304
|
+
out.request = { body: (_a = result.request) === null || _a === void 0 ? void 0 : _a.body };
|
|
220
305
|
}
|
|
221
306
|
return out;
|
|
222
307
|
}
|
|
308
|
+
function formatUsage(result, provider) {
|
|
309
|
+
var _a, _b, _c;
|
|
310
|
+
const usage = {
|
|
311
|
+
promptTokens: (_a = result.usage) === null || _a === void 0 ? void 0 : _a.promptTokens,
|
|
312
|
+
completionTokens: (_b = result.usage) === null || _b === void 0 ? void 0 : _b.completionTokens,
|
|
313
|
+
totalTokens: (_c = result.usage) === null || _c === void 0 ? void 0 : _c.totalTokens,
|
|
314
|
+
};
|
|
315
|
+
const cacheMetrics = getCacheMetrics(result, provider);
|
|
316
|
+
if (Object.keys(cacheMetrics).length > 0) {
|
|
317
|
+
usage.cacheMetrics = cacheMetrics;
|
|
318
|
+
}
|
|
319
|
+
return usage;
|
|
320
|
+
}
|
|
321
|
+
function getCacheMetrics(result, provider) {
|
|
322
|
+
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
323
|
+
const metadata = result.experimental_providerMetadata;
|
|
324
|
+
switch (provider) {
|
|
325
|
+
case 'deepseek':
|
|
326
|
+
return {
|
|
327
|
+
promptCacheHitTokens: (_a = metadata === null || metadata === void 0 ? void 0 : metadata.deepseek) === null || _a === void 0 ? void 0 : _a.promptCacheHitTokens,
|
|
328
|
+
promptCacheMissTokens: (_b = metadata === null || metadata === void 0 ? void 0 : metadata.deepseek) === null || _b === void 0 ? void 0 : _b.promptCacheMissTokens,
|
|
329
|
+
};
|
|
330
|
+
case 'groq':
|
|
331
|
+
return {
|
|
332
|
+
promptCacheHitTokens: (_c = metadata === null || metadata === void 0 ? void 0 : metadata.groq) === null || _c === void 0 ? void 0 : _c.promptCacheHitTokens,
|
|
333
|
+
promptCacheMissTokens: (_d = metadata === null || metadata === void 0 ? void 0 : metadata.groq) === null || _d === void 0 ? void 0 : _d.promptCacheMissTokens,
|
|
334
|
+
};
|
|
335
|
+
case 'google':
|
|
336
|
+
return {
|
|
337
|
+
cachedContentTokenCount: (_f = (_e = metadata === null || metadata === void 0 ? void 0 : metadata.google) === null || _e === void 0 ? void 0 : _e.usageMetadata) === null || _f === void 0 ? void 0 : _f.cachedContentTokenCount,
|
|
338
|
+
thoughtsTokenCount: (_h = (_g = metadata === null || metadata === void 0 ? void 0 : metadata.google) === null || _g === void 0 ? void 0 : _g.usageMetadata) === null || _h === void 0 ? void 0 : _h.thoughtsTokenCount,
|
|
339
|
+
};
|
|
340
|
+
default:
|
|
341
|
+
return {};
|
|
342
|
+
}
|
|
343
|
+
}
|
|
344
|
+
function formatResponse(result) {
|
|
345
|
+
var _a, _b, _c, _d;
|
|
346
|
+
return {
|
|
347
|
+
id: (_a = result.response) === null || _a === void 0 ? void 0 : _a.id,
|
|
348
|
+
modelId: (_b = result.response) === null || _b === void 0 ? void 0 : _b.modelId,
|
|
349
|
+
timestamp: (_c = result.response) === null || _c === void 0 ? void 0 : _c.timestamp,
|
|
350
|
+
headers: (_d = result.response) === null || _d === void 0 ? void 0 : _d.headers,
|
|
351
|
+
};
|
|
352
|
+
}
|
|
353
|
+
async function getProvider(provider, apiKey, baseURL) {
|
|
354
|
+
const cacheKey = `${provider}:${apiKey}:${baseURL || ''}`;
|
|
355
|
+
const cached = providerCache.get(cacheKey);
|
|
356
|
+
if (cached)
|
|
357
|
+
return cached;
|
|
358
|
+
let providerInstance;
|
|
359
|
+
try {
|
|
360
|
+
switch (provider) {
|
|
361
|
+
case 'google':
|
|
362
|
+
if (!googleProvider) {
|
|
363
|
+
googleProvider = require('@ai-sdk/google').createGoogleGenerativeAI;
|
|
364
|
+
}
|
|
365
|
+
providerInstance = googleProvider({ apiKey, ...(baseURL && { baseURL }) });
|
|
366
|
+
break;
|
|
367
|
+
case 'deepseek':
|
|
368
|
+
if (!deepseekProvider) {
|
|
369
|
+
deepseekProvider = require('@ai-sdk/deepseek').createDeepSeek;
|
|
370
|
+
}
|
|
371
|
+
providerInstance = deepseekProvider({ apiKey, ...(baseURL && { baseURL }) });
|
|
372
|
+
break;
|
|
373
|
+
case 'groq':
|
|
374
|
+
if (!groqProvider) {
|
|
375
|
+
groqProvider = require('@ai-sdk/groq').createGroq;
|
|
376
|
+
}
|
|
377
|
+
providerInstance = groqProvider({ apiKey, ...(baseURL && { baseURL }) });
|
|
378
|
+
break;
|
|
379
|
+
case 'openrouter':
|
|
380
|
+
if (!openrouterProvider) {
|
|
381
|
+
openrouterProvider = require('@openrouter/ai-sdk-provider').createOpenRouter;
|
|
382
|
+
}
|
|
383
|
+
providerInstance = openrouterProvider({ apiKey, ...(baseURL && { baseURL }) });
|
|
384
|
+
break;
|
|
385
|
+
default:
|
|
386
|
+
throw new Error(`Unsupported provider: ${provider}`);
|
|
387
|
+
}
|
|
388
|
+
providerCache.set(cacheKey, providerInstance);
|
|
389
|
+
return providerInstance;
|
|
390
|
+
}
|
|
391
|
+
catch (error) {
|
|
392
|
+
throw new Error(`Failed to initialize ${provider} provider: ${error.message}`);
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
function parseAndValidateSchema(rawSchema, exec) {
|
|
396
|
+
const cacheKey = `schema:${Buffer.from(rawSchema).toString('base64').substring(0, 50)}`;
|
|
397
|
+
const cached = schemaCache.get(cacheKey);
|
|
398
|
+
if (cached)
|
|
399
|
+
return cached;
|
|
400
|
+
let parsedSchema;
|
|
401
|
+
try {
|
|
402
|
+
parsedSchema = JSON.parse(rawSchema);
|
|
403
|
+
}
|
|
404
|
+
catch (err) {
|
|
405
|
+
throw new n8n_workflow_1.NodeOperationError(exec.getNode(), 'Schema is not valid JSON: ' + err.message);
|
|
406
|
+
}
|
|
407
|
+
if (!ajv.validateSchema(parsedSchema)) {
|
|
408
|
+
throw new n8n_workflow_1.NodeOperationError(exec.getNode(), `Invalid JSON Schema: ${ajv.errorsText(ajv.errors)}`);
|
|
409
|
+
}
|
|
410
|
+
schemaCache.set(cacheKey, parsedSchema);
|
|
411
|
+
return parsedSchema;
|
|
412
|
+
}
|
|
413
|
+
function parseStopSequences(stopSequencesStr) {
|
|
414
|
+
if (!stopSequencesStr)
|
|
415
|
+
return undefined;
|
|
416
|
+
return stopSequencesStr.split(',').map(s => s.trim()).filter(s => s.length > 0);
|
|
417
|
+
}
|
|
223
418
|
class UniversalAI {
|
|
224
419
|
constructor() {
|
|
225
420
|
this.description = descriptions_1.UNIVERSAL_AI_DESCRIPTION;
|
|
@@ -227,25 +422,18 @@ class UniversalAI {
|
|
|
227
422
|
loadOptions: {
|
|
228
423
|
async getModels() {
|
|
229
424
|
const provider = this.getCurrentNodeParameter('provider');
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
models = model_lists_1.GROQ_MODELS;
|
|
243
|
-
break;
|
|
244
|
-
case 'openrouter':
|
|
245
|
-
models = model_lists_1.OPENROUTER_MODELS;
|
|
246
|
-
break;
|
|
247
|
-
}
|
|
248
|
-
modelCache.set(provider, models);
|
|
425
|
+
const cacheKey = `models:${provider}`;
|
|
426
|
+
const cached = modelCache.get(cacheKey);
|
|
427
|
+
if (cached)
|
|
428
|
+
return cached;
|
|
429
|
+
const { OPENROUTER_MODELS, GOOGLE_GEMINI_MODELS, DEEPSEEK_MODELS, GROQ_MODELS } = await Promise.resolve().then(() => __importStar(require('./model-lists')));
|
|
430
|
+
const models = {
|
|
431
|
+
google: GOOGLE_GEMINI_MODELS,
|
|
432
|
+
deepseek: DEEPSEEK_MODELS,
|
|
433
|
+
groq: GROQ_MODELS,
|
|
434
|
+
openrouter: OPENROUTER_MODELS,
|
|
435
|
+
}[provider] || [];
|
|
436
|
+
modelCache.set(cacheKey, models);
|
|
249
437
|
return models;
|
|
250
438
|
},
|
|
251
439
|
},
|
|
@@ -255,223 +443,33 @@ class UniversalAI {
|
|
|
255
443
|
const items = this.getInputData();
|
|
256
444
|
const returnData = [];
|
|
257
445
|
const provider = this.getNodeParameter('provider', 0);
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
case 'groq':
|
|
267
|
-
credentials = await this.getCredentials('groqApi');
|
|
268
|
-
break;
|
|
269
|
-
case 'openrouter':
|
|
270
|
-
credentials = await this.getCredentials('openRouterApi');
|
|
271
|
-
break;
|
|
446
|
+
const credentialType = {
|
|
447
|
+
google: 'googleGenerativeAIApi',
|
|
448
|
+
deepseek: 'deepSeekApi',
|
|
449
|
+
groq: 'groqApi',
|
|
450
|
+
openrouter: 'openRouterApi',
|
|
451
|
+
}[provider];
|
|
452
|
+
if (!credentialType) {
|
|
453
|
+
throw new n8n_workflow_1.NodeOperationError(this.getNode(), `Unsupported provider: ${provider}`);
|
|
272
454
|
}
|
|
455
|
+
const credentials = await this.getCredentials(credentialType);
|
|
273
456
|
if (!(credentials === null || credentials === void 0 ? void 0 : credentials.apiKey)) {
|
|
274
457
|
throw new n8n_workflow_1.NodeOperationError(this.getNode(), 'No API key provided in credentials');
|
|
275
458
|
}
|
|
276
|
-
const
|
|
277
|
-
const actualBaseURL = credentials.baseUrl;
|
|
278
|
-
const providerCacheKey = `${provider}:${actualApiKey || ''}:${actualBaseURL || ''}`;
|
|
279
|
-
let aiProvider;
|
|
280
|
-
if (providerCache.has(providerCacheKey)) {
|
|
281
|
-
aiProvider = providerCache.get(providerCacheKey);
|
|
282
|
-
}
|
|
283
|
-
else {
|
|
284
|
-
switch (provider) {
|
|
285
|
-
case 'google':
|
|
286
|
-
aiProvider = createGoogleGenerativeAI({
|
|
287
|
-
apiKey: actualApiKey,
|
|
288
|
-
...(actualBaseURL && { baseURL: actualBaseURL }),
|
|
289
|
-
});
|
|
290
|
-
break;
|
|
291
|
-
case 'deepseek':
|
|
292
|
-
aiProvider = createDeepSeek({
|
|
293
|
-
apiKey: actualApiKey,
|
|
294
|
-
...(actualBaseURL && { baseURL: actualBaseURL }),
|
|
295
|
-
});
|
|
296
|
-
break;
|
|
297
|
-
case 'groq':
|
|
298
|
-
aiProvider = createGroq({
|
|
299
|
-
apiKey: actualApiKey,
|
|
300
|
-
...(actualBaseURL && { baseURL: actualBaseURL }),
|
|
301
|
-
});
|
|
302
|
-
break;
|
|
303
|
-
case 'openrouter':
|
|
304
|
-
aiProvider = createOpenRouter({
|
|
305
|
-
apiKey: actualApiKey,
|
|
306
|
-
...(actualBaseURL && { baseURL: actualBaseURL }),
|
|
307
|
-
});
|
|
308
|
-
break;
|
|
309
|
-
}
|
|
310
|
-
providerCache.set(providerCacheKey, aiProvider);
|
|
311
|
-
}
|
|
459
|
+
const aiProvider = await getProvider(provider, credentials.apiKey, credentials.baseUrl);
|
|
312
460
|
for (let i = 0; i < items.length; i++) {
|
|
313
461
|
try {
|
|
314
|
-
const
|
|
315
|
-
|
|
316
|
-
const options = this.getNodeParameter('options', i, {});
|
|
317
|
-
let modelSettings = {};
|
|
318
|
-
let stopSequences;
|
|
319
|
-
if (options.stopSequences && (provider === 'google' || provider === 'openrouter')) {
|
|
320
|
-
stopSequences = options.stopSequences.split(',').map(s => s.trim()).filter(s => s.length > 0);
|
|
321
|
-
}
|
|
322
|
-
if (provider === 'google') {
|
|
323
|
-
const safetySettingsRaw = this.getNodeParameter('safetySettings.settings', i, []);
|
|
324
|
-
const useSearchGrounding = this.getNodeParameter('useSearchGrounding', i, false);
|
|
325
|
-
const cachedContent = this.getNodeParameter('cachedContent', i, '');
|
|
326
|
-
const responseModalities = this.getNodeParameter('responseModalities', i, []);
|
|
327
|
-
const thinkingBudget = this.getNodeParameter('thinkingBudget', i, 0);
|
|
328
|
-
const includeThoughts = this.getNodeParameter('includeThoughts', i, false);
|
|
329
|
-
const safetySettings = safetySettingsRaw.map((s) => ({
|
|
330
|
-
category: s.category,
|
|
331
|
-
threshold: s.threshold,
|
|
332
|
-
}));
|
|
333
|
-
modelSettings = {
|
|
334
|
-
structuredOutputs: operation === 'generateObject',
|
|
335
|
-
safetySettings: safetySettings.length > 0 ? safetySettings : undefined,
|
|
336
|
-
useSearchGrounding,
|
|
337
|
-
...(cachedContent && { cachedContent }),
|
|
338
|
-
...(responseModalities.length > 0 && { responseModalities }),
|
|
339
|
-
...(thinkingBudget > 0 && {
|
|
340
|
-
thinkingConfig: {
|
|
341
|
-
thinkingBudget,
|
|
342
|
-
includeThoughts,
|
|
343
|
-
},
|
|
344
|
-
}),
|
|
345
|
-
};
|
|
346
|
-
}
|
|
347
|
-
else if (provider === 'openrouter') {
|
|
348
|
-
modelSettings = {};
|
|
349
|
-
}
|
|
350
|
-
const input = await buildInput(this, i);
|
|
351
|
-
let tools = undefined;
|
|
352
|
-
if (provider === 'google') {
|
|
353
|
-
const googleTools = this.getNodeParameter('googleTools', i, []);
|
|
354
|
-
if (googleTools.length > 0) {
|
|
355
|
-
tools = {};
|
|
356
|
-
const google = require('@ai-sdk/google').google;
|
|
357
|
-
if (googleTools.includes('google_search')) {
|
|
358
|
-
tools.google_search = google.tools.googleSearch({});
|
|
359
|
-
}
|
|
360
|
-
if (googleTools.includes('url_context')) {
|
|
361
|
-
tools.url_context = google.tools.urlContext({});
|
|
362
|
-
}
|
|
363
|
-
if (googleTools.includes('code_execution')) {
|
|
364
|
-
tools.code_execution = google.tools.codeExecution({});
|
|
365
|
-
}
|
|
366
|
-
}
|
|
367
|
-
}
|
|
368
|
-
const enableStreaming = this.getNodeParameter('enableStreaming', i, false);
|
|
369
|
-
if (operation === 'generateText') {
|
|
370
|
-
const generateTextParams = {
|
|
371
|
-
model: aiProvider(model, modelSettings),
|
|
372
|
-
messages: input.messages,
|
|
373
|
-
maxTokens: options.maxTokens,
|
|
374
|
-
temperature: options.temperature,
|
|
375
|
-
topP: options.topP,
|
|
376
|
-
topK: options.topK,
|
|
377
|
-
frequencyPenalty: options.frequencyPenalty,
|
|
378
|
-
presencePenalty: options.presencePenalty,
|
|
379
|
-
seed: options.seed,
|
|
380
|
-
prompt: input.prompt,
|
|
381
|
-
system: input.system,
|
|
382
|
-
...(tools && { tools }),
|
|
383
|
-
};
|
|
384
|
-
if (stopSequences && (provider === 'google' || provider === 'openrouter')) {
|
|
385
|
-
generateTextParams.stopSequences = stopSequences;
|
|
386
|
-
}
|
|
387
|
-
if (enableStreaming) {
|
|
388
|
-
const stream = await (0, ai_1.streamText)(generateTextParams);
|
|
389
|
-
let fullText = '';
|
|
390
|
-
const chunks = [];
|
|
391
|
-
for await (const textPart of stream.textStream) {
|
|
392
|
-
fullText += textPart;
|
|
393
|
-
chunks.push(textPart);
|
|
394
|
-
}
|
|
395
|
-
const finalUsage = await stream.usage;
|
|
396
|
-
for (const chunk of chunks) {
|
|
397
|
-
returnData.push({
|
|
398
|
-
json: {
|
|
399
|
-
chunk,
|
|
400
|
-
isStreaming: true,
|
|
401
|
-
}
|
|
402
|
-
});
|
|
403
|
-
}
|
|
404
|
-
returnData.push({
|
|
405
|
-
json: {
|
|
406
|
-
text: fullText,
|
|
407
|
-
toolCalls: stream.toolCalls || [],
|
|
408
|
-
toolResults: stream.toolResults || [],
|
|
409
|
-
finishReason: stream.finishReason,
|
|
410
|
-
usage: {
|
|
411
|
-
promptTokens: finalUsage.promptTokens,
|
|
412
|
-
completionTokens: finalUsage.completionTokens,
|
|
413
|
-
totalTokens: finalUsage.totalTokens,
|
|
414
|
-
},
|
|
415
|
-
isStreaming: false,
|
|
416
|
-
isFinal: true,
|
|
417
|
-
}
|
|
418
|
-
});
|
|
419
|
-
}
|
|
420
|
-
else {
|
|
421
|
-
const result = await (0, ai_1.generateText)(generateTextParams);
|
|
422
|
-
const formatted = formatTextResult(result, options.includeRequestBody, provider);
|
|
423
|
-
if (tools && result.toolCalls) {
|
|
424
|
-
formatted.toolCalls = result.toolCalls;
|
|
425
|
-
formatted.toolResults = result.toolResults;
|
|
426
|
-
}
|
|
427
|
-
returnData.push({ json: formatted });
|
|
428
|
-
}
|
|
429
|
-
}
|
|
430
|
-
else {
|
|
431
|
-
const schemaName = this.getNodeParameter('schemaName', i, '');
|
|
432
|
-
const schemaDescription = this.getNodeParameter('schemaDescription', i, '');
|
|
433
|
-
const rawSchema = this.getNodeParameter('schema', i);
|
|
434
|
-
let parsedSchema;
|
|
435
|
-
try {
|
|
436
|
-
parsedSchema = JSON.parse(rawSchema);
|
|
437
|
-
}
|
|
438
|
-
catch (err) {
|
|
439
|
-
throw new n8n_workflow_1.NodeOperationError(this.getNode(), 'Schema is not valid JSON: ' + err.message);
|
|
440
|
-
}
|
|
441
|
-
const ajv = new ajv_1.default();
|
|
442
|
-
if (!ajv.validateSchema(parsedSchema)) {
|
|
443
|
-
throw new n8n_workflow_1.NodeOperationError(this.getNode(), `Invalid JSON Schema: ${ajv.errorsText(ajv.errors)}`);
|
|
444
|
-
}
|
|
445
|
-
const generateObjectParams = {
|
|
446
|
-
model: aiProvider(model, modelSettings),
|
|
447
|
-
schema: (0, ai_1.jsonSchema)(parsedSchema),
|
|
448
|
-
schemaName,
|
|
449
|
-
schemaDescription,
|
|
450
|
-
temperature: options.temperature,
|
|
451
|
-
topP: options.topP,
|
|
452
|
-
topK: options.topK,
|
|
453
|
-
frequencyPenalty: options.frequencyPenalty,
|
|
454
|
-
presencePenalty: options.presencePenalty,
|
|
455
|
-
seed: options.seed,
|
|
456
|
-
...(stopSequences && (provider === 'google' || provider === 'openrouter') && { stopSequences }),
|
|
457
|
-
};
|
|
458
|
-
if (input.messages && input.messages.length > 0) {
|
|
459
|
-
generateObjectParams.messages = input.messages;
|
|
460
|
-
}
|
|
461
|
-
else if (input.prompt) {
|
|
462
|
-
generateObjectParams.prompt = input.prompt;
|
|
463
|
-
}
|
|
464
|
-
if (input.system) {
|
|
465
|
-
generateObjectParams.system = input.system;
|
|
466
|
-
}
|
|
467
|
-
const result = await (0, ai_1.generateObject)(generateObjectParams);
|
|
468
|
-
const formatted = formatObjectResult(result, options.includeRequestBody);
|
|
469
|
-
returnData.push({ json: formatted });
|
|
470
|
-
}
|
|
462
|
+
const result = await processItem(this, i, provider, aiProvider);
|
|
463
|
+
returnData.push(...result);
|
|
471
464
|
}
|
|
472
465
|
catch (error) {
|
|
473
466
|
if (this.continueOnFail()) {
|
|
474
|
-
returnData.push({
|
|
467
|
+
returnData.push({
|
|
468
|
+
json: {
|
|
469
|
+
error: error.message,
|
|
470
|
+
itemIndex: i
|
|
471
|
+
}
|
|
472
|
+
});
|
|
475
473
|
}
|
|
476
474
|
else {
|
|
477
475
|
throw new n8n_workflow_1.NodeOperationError(this.getNode(), error, { itemIndex: i });
|
|
@@ -482,4 +480,153 @@ class UniversalAI {
|
|
|
482
480
|
}
|
|
483
481
|
}
|
|
484
482
|
exports.UniversalAI = UniversalAI;
|
|
483
|
+
async function processItem(exec, index, provider, aiProvider) {
|
|
484
|
+
const operation = exec.getNodeParameter('operation', index);
|
|
485
|
+
const model = exec.getNodeParameter('model', index);
|
|
486
|
+
const options = exec.getNodeParameter('options', index, {});
|
|
487
|
+
const input = await buildInput(exec, index);
|
|
488
|
+
const modelSettings = getModelSettings(exec, index, provider, operation, options);
|
|
489
|
+
return operation === 'generateText'
|
|
490
|
+
? await generateTextOperation(exec, index, provider, aiProvider, model, modelSettings, input, options)
|
|
491
|
+
: await generateObjectOperation(exec, index, provider, aiProvider, model, modelSettings, input, options);
|
|
492
|
+
}
|
|
493
|
+
function getModelSettings(exec, index, provider, operation, options) {
|
|
494
|
+
const settings = {};
|
|
495
|
+
const stopSequences = parseStopSequences(options.stopSequences);
|
|
496
|
+
if (stopSequences && (provider === 'google' || provider === 'openrouter')) {
|
|
497
|
+
settings.stopSequences = stopSequences;
|
|
498
|
+
}
|
|
499
|
+
if (provider === 'google') {
|
|
500
|
+
const safetySettingsRaw = exec.getNodeParameter('safetySettings.settings', index, []);
|
|
501
|
+
if (safetySettingsRaw.length > 0) {
|
|
502
|
+
settings.safetySettings = safetySettingsRaw.map((s) => ({
|
|
503
|
+
category: s.category,
|
|
504
|
+
threshold: s.threshold,
|
|
505
|
+
}));
|
|
506
|
+
}
|
|
507
|
+
settings.structuredOutputs = operation === 'generateObject';
|
|
508
|
+
settings.useSearchGrounding = exec.getNodeParameter('useSearchGrounding', index, false);
|
|
509
|
+
const cachedContent = exec.getNodeParameter('cachedContent', index, '');
|
|
510
|
+
if (cachedContent) {
|
|
511
|
+
settings.cachedContent = cachedContent;
|
|
512
|
+
}
|
|
513
|
+
const thinkingBudgetValue = Number(exec.getNodeParameter('thinkingBudget', index, 0));
|
|
514
|
+
if (!Number.isNaN(thinkingBudgetValue) && thinkingBudgetValue > 0) {
|
|
515
|
+
settings.thinkingConfig = {
|
|
516
|
+
thinkingBudget: thinkingBudgetValue,
|
|
517
|
+
includeThoughts: exec.getNodeParameter('includeThoughts', index, false),
|
|
518
|
+
};
|
|
519
|
+
}
|
|
520
|
+
const responseModalities = exec.getNodeParameter('responseModalities', index, []);
|
|
521
|
+
if (responseModalities.length > 0) {
|
|
522
|
+
settings.responseModalities = responseModalities;
|
|
523
|
+
}
|
|
524
|
+
}
|
|
525
|
+
return settings;
|
|
526
|
+
}
|
|
527
|
+
async function generateTextOperation(exec, index, provider, aiProvider, model, modelSettings, input, options) {
|
|
528
|
+
const enableStreaming = exec.getNodeParameter('enableStreaming', index, false);
|
|
529
|
+
const includeRequestBody = options.includeRequestBody;
|
|
530
|
+
const tools = provider === 'google' ? await buildGoogleTools(exec, index) : undefined;
|
|
531
|
+
const params = {
|
|
532
|
+
model: aiProvider(model, modelSettings),
|
|
533
|
+
...input,
|
|
534
|
+
...(tools && { tools }),
|
|
535
|
+
};
|
|
536
|
+
const numericOptionKeys = [
|
|
537
|
+
'maxTokens',
|
|
538
|
+
'temperature',
|
|
539
|
+
'topP',
|
|
540
|
+
'topK',
|
|
541
|
+
'frequencyPenalty',
|
|
542
|
+
'presencePenalty',
|
|
543
|
+
'seed',
|
|
544
|
+
];
|
|
545
|
+
for (const key of numericOptionKeys) {
|
|
546
|
+
const value = options[key];
|
|
547
|
+
if (value !== undefined && value !== null && value !== '') {
|
|
548
|
+
params[key] = value;
|
|
549
|
+
}
|
|
550
|
+
}
|
|
551
|
+
if (enableStreaming) {
|
|
552
|
+
return await handleStreaming(params, provider, includeRequestBody);
|
|
553
|
+
}
|
|
554
|
+
const result = await (0, ai_1.generateText)(params);
|
|
555
|
+
return [{ json: formatTextResult(result, includeRequestBody, provider) }];
|
|
556
|
+
}
|
|
557
|
+
async function buildGoogleTools(exec, index) {
|
|
558
|
+
const googleTools = exec.getNodeParameter('googleTools', index, []);
|
|
559
|
+
if (!googleTools || googleTools.length === 0) {
|
|
560
|
+
return undefined;
|
|
561
|
+
}
|
|
562
|
+
const tools = {};
|
|
563
|
+
const google = require('@ai-sdk/google').google;
|
|
564
|
+
const toolSet = new Set(googleTools);
|
|
565
|
+
if (toolSet.has('google_search')) {
|
|
566
|
+
tools.google_search = google.tools.googleSearch({});
|
|
567
|
+
}
|
|
568
|
+
if (toolSet.has('url_context')) {
|
|
569
|
+
tools.url_context = google.tools.urlContext({});
|
|
570
|
+
}
|
|
571
|
+
if (toolSet.has('code_execution')) {
|
|
572
|
+
tools.code_execution = google.tools.codeExecution({});
|
|
573
|
+
}
|
|
574
|
+
return tools;
|
|
575
|
+
}
|
|
576
|
+
async function handleStreaming(params, provider, includeRequestBody) {
|
|
577
|
+
const stream = await (0, ai_1.streamText)(params);
|
|
578
|
+
const chunks = [];
|
|
579
|
+
let fullText = '';
|
|
580
|
+
for await (const textPart of stream.textStream) {
|
|
581
|
+
fullText += textPart;
|
|
582
|
+
chunks.push({ json: { chunk: textPart, isStreaming: true } });
|
|
583
|
+
}
|
|
584
|
+
const finalUsage = await stream.usage;
|
|
585
|
+
const finalJson = {
|
|
586
|
+
text: fullText,
|
|
587
|
+
toolCalls: stream.toolCalls || [],
|
|
588
|
+
toolResults: stream.toolResults || [],
|
|
589
|
+
finishReason: stream.finishReason,
|
|
590
|
+
usage: formatUsage({ usage: finalUsage }, provider),
|
|
591
|
+
isStreaming: false,
|
|
592
|
+
isFinal: true,
|
|
593
|
+
};
|
|
594
|
+
if (includeRequestBody) {
|
|
595
|
+
const requestMetadata = stream.request ? await stream.request : undefined;
|
|
596
|
+
if ((requestMetadata === null || requestMetadata === void 0 ? void 0 : requestMetadata.body) !== undefined) {
|
|
597
|
+
finalJson.request = { body: requestMetadata.body };
|
|
598
|
+
}
|
|
599
|
+
}
|
|
600
|
+
chunks.push({ json: finalJson });
|
|
601
|
+
return chunks;
|
|
602
|
+
}
|
|
603
|
+
async function generateObjectOperation(exec, index, provider, aiProvider, model, modelSettings, input, options) {
|
|
604
|
+
const schemaName = exec.getNodeParameter('schemaName', index, '');
|
|
605
|
+
const schemaDescription = exec.getNodeParameter('schemaDescription', index, '');
|
|
606
|
+
const rawSchema = exec.getNodeParameter('schema', index);
|
|
607
|
+
const parsedSchema = parseAndValidateSchema(rawSchema, exec);
|
|
608
|
+
const params = {
|
|
609
|
+
model: aiProvider(model, modelSettings),
|
|
610
|
+
schema: (0, ai_1.jsonSchema)(parsedSchema),
|
|
611
|
+
schemaName,
|
|
612
|
+
schemaDescription,
|
|
613
|
+
...input,
|
|
614
|
+
};
|
|
615
|
+
const numericOptionKeys = [
|
|
616
|
+
'temperature',
|
|
617
|
+
'topP',
|
|
618
|
+
'topK',
|
|
619
|
+
'frequencyPenalty',
|
|
620
|
+
'presencePenalty',
|
|
621
|
+
'seed',
|
|
622
|
+
];
|
|
623
|
+
for (const key of numericOptionKeys) {
|
|
624
|
+
const value = options[key];
|
|
625
|
+
if (value !== undefined && value !== null && value !== '') {
|
|
626
|
+
params[key] = value;
|
|
627
|
+
}
|
|
628
|
+
}
|
|
629
|
+
const result = await (0, ai_1.generateObject)(params);
|
|
630
|
+
return [{ json: formatObjectResult(result, options.includeRequestBody, provider) }];
|
|
631
|
+
}
|
|
485
632
|
//# sourceMappingURL=UniversalAI.node.js.map
|