n8n-nodes-vercel-ai-sdk-universal-temp 0.1.21 → 0.1.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,151 +1,270 @@
1
1
  "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
15
+ }) : function(o, v) {
16
+ o["default"] = v;
17
+ });
18
+ var __importStar = (this && this.__importStar) || (function () {
19
+ var ownKeys = function(o) {
20
+ ownKeys = Object.getOwnPropertyNames || function (o) {
21
+ var ar = [];
22
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
23
+ return ar;
24
+ };
25
+ return ownKeys(o);
26
+ };
27
+ return function (mod) {
28
+ if (mod && mod.__esModule) return mod;
29
+ var result = {};
30
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
31
+ __setModuleDefault(result, mod);
32
+ return result;
33
+ };
34
+ })();
2
35
  var __importDefault = (this && this.__importDefault) || function (mod) {
3
36
  return (mod && mod.__esModule) ? mod : { "default": mod };
4
37
  };
5
38
  Object.defineProperty(exports, "__esModule", { value: true });
6
39
  exports.UniversalAI = void 0;
7
40
  const n8n_workflow_1 = require("n8n-workflow");
8
- const { createGoogleGenerativeAI } = require('@ai-sdk/google');
9
- const { createDeepSeek } = require('@ai-sdk/deepseek');
10
- const { createGroq } = require('@ai-sdk/groq');
11
- const { createOpenRouter } = require('@openrouter/ai-sdk-provider');
41
+ let googleProvider;
42
+ let deepseekProvider;
43
+ let groqProvider;
44
+ let openrouterProvider;
12
45
  const ai_1 = require("ai");
13
46
  const zod_1 = require("zod");
14
47
  const ajv_1 = __importDefault(require("ajv"));
15
- const model_lists_1 = require("./model-lists");
16
48
  const descriptions_1 = require("../shared/descriptions");
17
- const modelCache = new Map();
18
- const providerCache = new Map();
49
+ class Cache {
50
+ constructor(maxSize = 100, ttl = 5 * 60 * 1000) {
51
+ this.cache = new Map();
52
+ this.totalHits = 0;
53
+ this.totalMisses = 0;
54
+ this.maxSize = maxSize;
55
+ this.ttl = ttl;
56
+ }
57
+ get(key) {
58
+ const item = this.cache.get(key);
59
+ if (!item) {
60
+ this.totalMisses++;
61
+ return undefined;
62
+ }
63
+ if (Date.now() - item.timestamp > this.ttl) {
64
+ this.cache.delete(key);
65
+ this.totalMisses++;
66
+ return undefined;
67
+ }
68
+ item.hits++;
69
+ this.totalHits++;
70
+ return item.value;
71
+ }
72
+ set(key, value) {
73
+ if (this.cache.size >= this.maxSize) {
74
+ let minHits = Infinity;
75
+ let keyToDelete;
76
+ for (const [k, v] of this.cache.entries()) {
77
+ if (v.hits < minHits) {
78
+ minHits = v.hits;
79
+ keyToDelete = k;
80
+ }
81
+ }
82
+ if (keyToDelete) {
83
+ this.cache.delete(keyToDelete);
84
+ }
85
+ }
86
+ this.cache.set(key, { value, timestamp: Date.now(), hits: 0 });
87
+ }
88
+ getStats() {
89
+ return {
90
+ size: this.cache.size,
91
+ hits: this.totalHits,
92
+ misses: this.totalMisses,
93
+ hitRate: this.totalHits / (this.totalHits + this.totalMisses) || 0
94
+ };
95
+ }
96
+ clear() {
97
+ this.cache.clear();
98
+ this.totalHits = 0;
99
+ this.totalMisses = 0;
100
+ }
101
+ }
102
+ const modelCache = new Cache(50);
103
+ const providerCache = new Cache(20);
104
+ const schemaCache = new Cache(30);
105
+ const messageSchema = zod_1.z.object({
106
+ role: zod_1.z.enum(['system', 'user', 'assistant']),
107
+ content: zod_1.z.any(),
108
+ });
109
+ const messagesArraySchema = zod_1.z.array(messageSchema);
110
+ const ajv = new ajv_1.default({
111
+ allErrors: true,
112
+ verbose: true,
113
+ strict: false,
114
+ });
115
+ const isUrl = (str) => {
116
+ if (typeof str !== 'string')
117
+ return false;
118
+ return str.startsWith('http://') ||
119
+ str.startsWith('https://') ||
120
+ str.startsWith('data:');
121
+ };
122
+ const isLikelyBase64 = (str) => {
123
+ if (str.length % 4 !== 0)
124
+ return false;
125
+ if (!/^[A-Za-z0-9+/]*={0,2}$/.test(str))
126
+ return false;
127
+ if (str.length > 10000)
128
+ return true;
129
+ return true;
130
+ };
19
131
  async function buildInput(exec, itemIndex) {
20
- var _a;
21
132
  const inputType = exec.getNodeParameter('inputType', itemIndex);
22
133
  if (inputType === 'prompt') {
23
- const promptVal = exec.getNodeParameter('prompt', itemIndex);
24
- const systemVal = exec.getNodeParameter('system', itemIndex);
25
134
  return {
26
- prompt: promptVal,
27
- system: systemVal,
135
+ prompt: exec.getNodeParameter('prompt', itemIndex),
136
+ system: exec.getNodeParameter('system', itemIndex),
28
137
  };
29
138
  }
30
- else {
31
- const messageAsJson = exec.getNodeParameter('messageAsJson', itemIndex, false);
32
- if (messageAsJson) {
33
- const rawJson = exec.getNodeParameter('messagesJson', itemIndex);
34
- let arr;
35
- try {
36
- arr = JSON.parse(rawJson);
37
- }
38
- catch (error) {
39
- throw new n8n_workflow_1.NodeOperationError(exec.getNode(), `Invalid JSON in "Messages (JSON)" field: ${error.message}`);
40
- }
41
- const parseRes = zod_1.z
42
- .array(zod_1.z.object({
43
- role: zod_1.z.enum(['system', 'user', 'assistant']),
44
- content: zod_1.z.any(),
45
- }))
46
- .safeParse(arr);
47
- if (!parseRes.success) {
48
- throw new n8n_workflow_1.NodeOperationError(exec.getNode(), 'Messages must be an array of objects with role and content.');
49
- }
50
- const messages = parseRes.data.map((m) => ({
51
- role: m.role,
52
- content: m.content,
53
- }));
54
- return { messages };
139
+ const messageAsJson = exec.getNodeParameter('messageAsJson', itemIndex, false);
140
+ return messageAsJson
141
+ ? buildMessagesFromJson(exec, itemIndex)
142
+ : buildMessagesFromUI(exec, itemIndex);
143
+ }
144
+ async function buildMessagesFromJson(exec, itemIndex) {
145
+ const rawJson = exec.getNodeParameter('messagesJson', itemIndex);
146
+ try {
147
+ const parsed = JSON.parse(rawJson);
148
+ const result = messagesArraySchema.safeParse(parsed);
149
+ if (!result.success) {
150
+ throw new n8n_workflow_1.NodeOperationError(exec.getNode(), 'Messages must be an array of objects with role and content.');
151
+ }
152
+ return { messages: result.data };
153
+ }
154
+ catch (error) {
155
+ throw new n8n_workflow_1.NodeOperationError(exec.getNode(), `Invalid JSON in "Messages (JSON)" field: ${error.message}`);
156
+ }
157
+ }
158
+ async function buildMessagesFromUI(exec, itemIndex) {
159
+ var _a;
160
+ const items = exec.getInputData();
161
+ const messagesUi = exec.getNodeParameter('messages.messagesUi', itemIndex, []);
162
+ const builtMessages = [];
163
+ const itemBinary = items[itemIndex].binary;
164
+ for (const msg of messagesUi) {
165
+ const role = msg.role;
166
+ if (role === 'system') {
167
+ builtMessages.push({ role, content: msg.systemContent || '' });
168
+ continue;
169
+ }
170
+ const attachments = ((_a = msg.attachments) === null || _a === void 0 ? void 0 : _a.attachment) || [];
171
+ if (attachments.length === 0) {
172
+ builtMessages.push({ role, content: msg.content || '' });
55
173
  }
56
174
  else {
57
- const items = exec.getInputData();
58
- const messagesUi = exec.getNodeParameter('messages.messagesUi', itemIndex, []);
59
- const builtMessages = [];
60
- for (const msg of messagesUi) {
61
- const role = msg.role;
62
- if (role === 'system') {
63
- builtMessages.push({
64
- role,
65
- content: msg.systemContent || '',
66
- });
67
- continue;
68
- }
69
- const attachments = ((_a = msg.attachments) === null || _a === void 0 ? void 0 : _a.attachment) || [];
70
- if (attachments.length === 0) {
71
- builtMessages.push({
72
- role,
73
- content: msg.content || '',
74
- });
75
- }
76
- else {
77
- const parts = [];
78
- if (msg.content) {
79
- parts.push({
80
- type: 'text',
81
- text: msg.content,
82
- });
83
- }
84
- for (const attachment of attachments) {
85
- const fileContentInput = attachment.fileContent;
86
- let detectedMimeType = attachment.mimeType || 'application/octet-stream';
87
- if (detectedMimeType === 'other') {
88
- detectedMimeType = attachment.mimeTypeOther || 'application/octet-stream';
89
- }
90
- let fileData;
91
- if (fileContentInput.startsWith('http://') || fileContentInput.startsWith('https://') || fileContentInput.startsWith('data:')) {
92
- fileData = fileContentInput;
93
- }
94
- else {
95
- const itemBinary = items[itemIndex].binary;
96
- if (itemBinary === null || itemBinary === void 0 ? void 0 : itemBinary[fileContentInput]) {
97
- const binaryData = itemBinary[fileContentInput];
98
- fileData = Buffer.from(binaryData.data, 'base64');
99
- if (!attachment.mimeType && binaryData.mimeType) {
100
- detectedMimeType = binaryData.mimeType;
101
- }
102
- }
103
- else {
104
- try {
105
- fileData = Buffer.from(fileContentInput, 'base64');
106
- }
107
- catch (error) {
108
- throw new n8n_workflow_1.NodeOperationError(exec.getNode(), `Invalid base64 data in attachment for message ${itemIndex + 1}: ${error.message}`);
109
- }
110
- }
111
- }
112
- if (fileData && !(Buffer.isBuffer(fileData) && fileData.length === 0)) {
113
- parts.push({
114
- type: 'file',
115
- data: fileData,
116
- mediaType: detectedMimeType,
117
- });
118
- }
119
- }
120
- if (parts.length > 0) {
121
- builtMessages.push({
122
- role,
123
- content: parts,
124
- });
125
- }
126
- }
175
+ const messageWithAttachments = await buildMessageWithAttachments(role, msg.content, attachments, itemBinary, exec, itemIndex);
176
+ if (messageWithAttachments) {
177
+ builtMessages.push(messageWithAttachments);
127
178
  }
128
- const convertMessagesToModel = exec.getNodeParameter('convertMessagesToModel', itemIndex, false);
129
- let finalMessages = builtMessages;
130
- if (convertMessagesToModel) {
131
- finalMessages = (0, ai_1.convertToModelMessages)(builtMessages);
179
+ }
180
+ }
181
+ const convertMessagesToModel = exec.getNodeParameter('convertMessagesToModel', itemIndex, false);
182
+ if (convertMessagesToModel) {
183
+ return { messages: (0, ai_1.convertToModelMessages)(builtMessages) };
184
+ }
185
+ return { messages: builtMessages };
186
+ }
187
+ async function buildMessageWithAttachments(role, content, attachments, itemBinary, exec, itemIndex) {
188
+ const parts = [];
189
+ if (content) {
190
+ parts.push({ type: 'text', text: content });
191
+ }
192
+ const MAX_CONCURRENT_ATTACHMENTS = 3;
193
+ const processedAttachments = [];
194
+ for (let i = 0; i < attachments.length; i += MAX_CONCURRENT_ATTACHMENTS) {
195
+ const batch = attachments.slice(i, i + MAX_CONCURRENT_ATTACHMENTS);
196
+ const batchPromises = batch.map(attachment => processAttachment(attachment, itemBinary, exec, itemIndex));
197
+ const batchResults = await Promise.all(batchPromises);
198
+ processedAttachments.push(...batchResults);
199
+ }
200
+ for (const attachment of processedAttachments) {
201
+ if (attachment) {
202
+ parts.push(attachment);
203
+ }
204
+ }
205
+ return parts.length > 0 ? { role, content: parts } : null;
206
+ }
207
+ async function processAttachment(attachment, itemBinary, exec, itemIndex) {
208
+ const fileContentInput = attachment.fileContent;
209
+ if (!fileContentInput || typeof fileContentInput !== 'string')
210
+ return null;
211
+ let mimeType = getMimeType(attachment);
212
+ let fileData;
213
+ if (isUrl(fileContentInput)) {
214
+ fileData = fileContentInput;
215
+ }
216
+ else {
217
+ fileData = await getBinaryData(fileContentInput, itemBinary, exec, itemIndex);
218
+ if (!fileData)
219
+ return null;
220
+ const binaryItem = itemBinary === null || itemBinary === void 0 ? void 0 : itemBinary[fileContentInput];
221
+ if (!mimeType && (binaryItem === null || binaryItem === void 0 ? void 0 : binaryItem.mimeType)) {
222
+ mimeType = binaryItem.mimeType;
223
+ }
224
+ }
225
+ if (!fileData || (Buffer.isBuffer(fileData) && fileData.length === 0)) {
226
+ return null;
227
+ }
228
+ return {
229
+ type: 'file',
230
+ data: fileData,
231
+ mediaType: mimeType || 'application/octet-stream',
232
+ };
233
+ }
234
+ function getMimeType(attachment) {
235
+ return attachment.mimeType === 'other'
236
+ ? attachment.mimeTypeOther
237
+ : attachment.mimeType;
238
+ }
239
+ async function getBinaryData(fileContentInput, itemBinary, exec, itemIndex) {
240
+ if (itemBinary === null || itemBinary === void 0 ? void 0 : itemBinary[fileContentInput]) {
241
+ const binaryData = itemBinary[fileContentInput];
242
+ return Buffer.from(binaryData.data, 'base64');
243
+ }
244
+ try {
245
+ if (isLikelyBase64(fileContentInput)) {
246
+ const buffer = Buffer.from(fileContentInput, 'base64');
247
+ if (buffer.length > 0 && buffer.length < 50 * 1024 * 1024) {
248
+ return buffer;
132
249
  }
133
- return { messages: finalMessages };
134
250
  }
135
251
  }
252
+ catch (error) {
253
+ throw new n8n_workflow_1.NodeOperationError(exec.getNode(), `Invalid file content for attachment: ${error.message}`);
254
+ }
255
+ return null;
136
256
  }
137
257
  function formatTextResult(result, includeRequestBody, provider) {
138
- var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _0, _1;
139
- let text = result.text;
140
- let reasoning = result.reasoning;
258
+ var _a, _b, _c;
259
+ let { text, reasoning } = result;
141
260
  if (provider === 'groq' && text.includes('<think>')) {
142
261
  const thinkMatch = text.match(/<think>(.*?)<\/think>/s);
143
262
  if (thinkMatch) {
144
- reasoning = [{ text: thinkMatch[1].trim() }];
263
+ reasoning = [{ type: 'reasoning', text: thinkMatch[1].trim() }];
145
264
  text = text.replace(/<think>.*?<\/think>\s*/s, '').trim();
146
265
  }
147
266
  }
148
- const out = {
267
+ const baseResult = {
149
268
  text,
150
269
  reasoning,
151
270
  reasoningText: result.reasoningText,
@@ -154,72 +273,150 @@ function formatTextResult(result, includeRequestBody, provider) {
154
273
  finishReason: result.finishReason,
155
274
  sources: result.sources || [],
156
275
  files: result.files || [],
157
- usage: {
158
- promptTokens: (_a = result.usage) === null || _a === void 0 ? void 0 : _a.promptTokens,
159
- completionTokens: (_b = result.usage) === null || _b === void 0 ? void 0 : _b.completionTokens,
160
- totalTokens: (_c = result.usage) === null || _c === void 0 ? void 0 : _c.totalTokens,
161
- ...(provider === 'deepseek' && {
162
- cacheMetrics: {
163
- promptCacheHitTokens: (_e = (_d = result.experimental_providerMetadata) === null || _d === void 0 ? void 0 : _d.deepseek) === null || _e === void 0 ? void 0 : _e.promptCacheHitTokens,
164
- promptCacheMissTokens: (_g = (_f = result.experimental_providerMetadata) === null || _f === void 0 ? void 0 : _f.deepseek) === null || _g === void 0 ? void 0 : _g.promptCacheMissTokens,
165
- },
166
- }),
167
- ...(provider === 'groq' && {
168
- cacheMetrics: {
169
- promptCacheHitTokens: (_j = (_h = result.experimental_providerMetadata) === null || _h === void 0 ? void 0 : _h.groq) === null || _j === void 0 ? void 0 : _j.promptCacheHitTokens,
170
- promptCacheMissTokens: (_l = (_k = result.experimental_providerMetadata) === null || _k === void 0 ? void 0 : _k.groq) === null || _l === void 0 ? void 0 : _l.promptCacheMissTokens,
171
- },
172
- }),
173
- ...(provider === 'google' && {
174
- cacheMetrics: {
175
- cachedContentTokenCount: (_p = (_o = (_m = result.experimental_providerMetadata) === null || _m === void 0 ? void 0 : _m.google) === null || _o === void 0 ? void 0 : _o.usageMetadata) === null || _p === void 0 ? void 0 : _p.cachedContentTokenCount,
176
- thoughtsTokenCount: (_s = (_r = (_q = result.experimental_providerMetadata) === null || _q === void 0 ? void 0 : _q.google) === null || _r === void 0 ? void 0 : _r.usageMetadata) === null || _s === void 0 ? void 0 : _s.thoughtsTokenCount,
177
- },
178
- }),
179
- },
180
- response: {
181
- id: (_t = result.response) === null || _t === void 0 ? void 0 : _t.id,
182
- modelId: (_u = result.response) === null || _u === void 0 ? void 0 : _u.modelId,
183
- timestamp: (_v = result.response) === null || _v === void 0 ? void 0 : _v.timestamp,
184
- headers: (_w = result.response) === null || _w === void 0 ? void 0 : _w.headers,
185
- },
276
+ usage: formatUsage(result, provider),
277
+ response: formatResponse(result),
186
278
  steps: result.steps || [],
187
279
  warnings: result.warnings || [],
188
280
  experimental_providerMetadata: result.experimental_providerMetadata,
189
- ...(provider === 'google' && {
190
- groundingMetadata: (_y = (_x = result.experimental_providerMetadata) === null || _x === void 0 ? void 0 : _x.google) === null || _y === void 0 ? void 0 : _y.groundingMetadata,
191
- safetyRatings: (_0 = (_z = result.experimental_providerMetadata) === null || _z === void 0 ? void 0 : _z.google) === null || _0 === void 0 ? void 0 : _0.safetyRatings,
192
- }),
193
281
  };
282
+ if (provider === 'google') {
283
+ const providerMetadata = result.experimental_providerMetadata;
284
+ baseResult.groundingMetadata = (_a = providerMetadata === null || providerMetadata === void 0 ? void 0 : providerMetadata.google) === null || _a === void 0 ? void 0 : _a.groundingMetadata;
285
+ baseResult.safetyRatings = (_b = providerMetadata === null || providerMetadata === void 0 ? void 0 : providerMetadata.google) === null || _b === void 0 ? void 0 : _b.safetyRatings;
286
+ }
194
287
  if (includeRequestBody) {
195
- out.request = { body: (_1 = result.request) === null || _1 === void 0 ? void 0 : _1.body };
288
+ const requestBody = (_c = result.request) === null || _c === void 0 ? void 0 : _c.body;
289
+ if (requestBody !== undefined) {
290
+ baseResult.request = { body: requestBody };
291
+ }
196
292
  }
197
- return out;
293
+ return baseResult;
198
294
  }
199
- function formatObjectResult(result, includeRequestBody) {
200
- var _a, _b, _c, _d, _e, _f, _g, _h;
295
+ function formatObjectResult(result, includeRequestBody, provider) {
296
+ var _a;
201
297
  const out = {
202
298
  object: result.object,
203
299
  finishReason: result.finishReason,
204
- usage: {
205
- promptTokens: (_a = result.usage) === null || _a === void 0 ? void 0 : _a.promptTokens,
206
- completionTokens: (_b = result.usage) === null || _b === void 0 ? void 0 : _b.completionTokens,
207
- totalTokens: (_c = result.usage) === null || _c === void 0 ? void 0 : _c.totalTokens,
208
- },
209
- response: {
210
- id: (_d = result.response) === null || _d === void 0 ? void 0 : _d.id,
211
- modelId: (_e = result.response) === null || _e === void 0 ? void 0 : _e.modelId,
212
- timestamp: (_f = result.response) === null || _f === void 0 ? void 0 : _f.timestamp,
213
- headers: (_g = result.response) === null || _g === void 0 ? void 0 : _g.headers,
214
- },
300
+ usage: formatUsage(result, provider),
301
+ response: formatResponse(result),
215
302
  warnings: result.warnings || [],
216
303
  experimental_providerMetadata: result.experimental_providerMetadata,
217
304
  };
218
305
  if (includeRequestBody) {
219
- out.request = { body: (_h = result.request) === null || _h === void 0 ? void 0 : _h.body };
306
+ out.request = { body: (_a = result.request) === null || _a === void 0 ? void 0 : _a.body };
220
307
  }
221
308
  return out;
222
309
  }
310
+ function formatUsage(result, provider) {
311
+ var _a, _b, _c;
312
+ const usage = {
313
+ promptTokens: (_a = result.usage) === null || _a === void 0 ? void 0 : _a.promptTokens,
314
+ completionTokens: (_b = result.usage) === null || _b === void 0 ? void 0 : _b.completionTokens,
315
+ totalTokens: (_c = result.usage) === null || _c === void 0 ? void 0 : _c.totalTokens,
316
+ };
317
+ const cacheMetrics = getCacheMetrics(result, provider);
318
+ if (Object.keys(cacheMetrics).length > 0) {
319
+ usage.cacheMetrics = cacheMetrics;
320
+ }
321
+ return usage;
322
+ }
323
+ function getCacheMetrics(result, provider) {
324
+ var _a, _b, _c, _d, _e, _f, _g, _h;
325
+ const metadata = result.experimental_providerMetadata;
326
+ switch (provider) {
327
+ case 'deepseek':
328
+ return {
329
+ promptCacheHitTokens: (_a = metadata === null || metadata === void 0 ? void 0 : metadata.deepseek) === null || _a === void 0 ? void 0 : _a.promptCacheHitTokens,
330
+ promptCacheMissTokens: (_b = metadata === null || metadata === void 0 ? void 0 : metadata.deepseek) === null || _b === void 0 ? void 0 : _b.promptCacheMissTokens,
331
+ };
332
+ case 'groq':
333
+ return {
334
+ promptCacheHitTokens: (_c = metadata === null || metadata === void 0 ? void 0 : metadata.groq) === null || _c === void 0 ? void 0 : _c.promptCacheHitTokens,
335
+ promptCacheMissTokens: (_d = metadata === null || metadata === void 0 ? void 0 : metadata.groq) === null || _d === void 0 ? void 0 : _d.promptCacheMissTokens,
336
+ };
337
+ case 'google':
338
+ return {
339
+ cachedContentTokenCount: (_f = (_e = metadata === null || metadata === void 0 ? void 0 : metadata.google) === null || _e === void 0 ? void 0 : _e.usageMetadata) === null || _f === void 0 ? void 0 : _f.cachedContentTokenCount,
340
+ thoughtsTokenCount: (_h = (_g = metadata === null || metadata === void 0 ? void 0 : metadata.google) === null || _g === void 0 ? void 0 : _g.usageMetadata) === null || _h === void 0 ? void 0 : _h.thoughtsTokenCount,
341
+ };
342
+ default:
343
+ return {};
344
+ }
345
+ }
346
+ function formatResponse(result) {
347
+ var _a, _b, _c, _d;
348
+ return {
349
+ id: (_a = result.response) === null || _a === void 0 ? void 0 : _a.id,
350
+ modelId: (_b = result.response) === null || _b === void 0 ? void 0 : _b.modelId,
351
+ timestamp: (_c = result.response) === null || _c === void 0 ? void 0 : _c.timestamp,
352
+ headers: (_d = result.response) === null || _d === void 0 ? void 0 : _d.headers,
353
+ };
354
+ }
355
+ async function getProvider(provider, apiKey, baseURL) {
356
+ const cacheKey = `${provider}:${apiKey}:${baseURL || ''}`;
357
+ const cached = providerCache.get(cacheKey);
358
+ if (cached)
359
+ return cached;
360
+ let providerInstance;
361
+ try {
362
+ switch (provider) {
363
+ case 'google':
364
+ if (!googleProvider) {
365
+ googleProvider = require('@ai-sdk/google').createGoogleGenerativeAI;
366
+ }
367
+ providerInstance = googleProvider({ apiKey, ...(baseURL && { baseURL }) });
368
+ break;
369
+ case 'deepseek':
370
+ if (!deepseekProvider) {
371
+ deepseekProvider = require('@ai-sdk/deepseek').createDeepSeek;
372
+ }
373
+ providerInstance = deepseekProvider({ apiKey, ...(baseURL && { baseURL }) });
374
+ break;
375
+ case 'groq':
376
+ if (!groqProvider) {
377
+ groqProvider = require('@ai-sdk/groq').createGroq;
378
+ }
379
+ providerInstance = groqProvider({ apiKey, ...(baseURL && { baseURL }) });
380
+ break;
381
+ case 'openrouter':
382
+ if (!openrouterProvider) {
383
+ openrouterProvider = require('@openrouter/ai-sdk-provider').createOpenRouter;
384
+ }
385
+ providerInstance = openrouterProvider({ apiKey, ...(baseURL && { baseURL }) });
386
+ break;
387
+ default:
388
+ throw new Error(`Unsupported provider: ${provider}`);
389
+ }
390
+ providerCache.set(cacheKey, providerInstance);
391
+ return providerInstance;
392
+ }
393
+ catch (error) {
394
+ throw new Error(`Failed to initialize ${provider} provider: ${error.message}`);
395
+ }
396
+ }
397
+ function parseAndValidateSchema(rawSchema, exec) {
398
+ const cacheKey = `schema:${Buffer.from(rawSchema).toString('base64').substring(0, 50)}`;
399
+ const cached = schemaCache.get(cacheKey);
400
+ if (cached)
401
+ return cached;
402
+ let parsedSchema;
403
+ try {
404
+ parsedSchema = JSON.parse(rawSchema);
405
+ }
406
+ catch (err) {
407
+ throw new n8n_workflow_1.NodeOperationError(exec.getNode(), 'Schema is not valid JSON: ' + err.message);
408
+ }
409
+ if (!ajv.validateSchema(parsedSchema)) {
410
+ throw new n8n_workflow_1.NodeOperationError(exec.getNode(), `Invalid JSON Schema: ${ajv.errorsText(ajv.errors)}`);
411
+ }
412
+ schemaCache.set(cacheKey, parsedSchema);
413
+ return parsedSchema;
414
+ }
415
+ function parseStopSequences(stopSequencesStr) {
416
+ if (!stopSequencesStr)
417
+ return undefined;
418
+ return stopSequencesStr.split(',').map(s => s.trim()).filter(s => s.length > 0);
419
+ }
223
420
  class UniversalAI {
224
421
  constructor() {
225
422
  this.description = descriptions_1.UNIVERSAL_AI_DESCRIPTION;
@@ -227,25 +424,18 @@ class UniversalAI {
227
424
  loadOptions: {
228
425
  async getModels() {
229
426
  const provider = this.getCurrentNodeParameter('provider');
230
- if (modelCache.has(provider)) {
231
- return modelCache.get(provider);
232
- }
233
- let models = [];
234
- switch (provider) {
235
- case 'google':
236
- models = model_lists_1.GOOGLE_GEMINI_MODELS;
237
- break;
238
- case 'deepseek':
239
- models = model_lists_1.DEEPSEEK_MODELS;
240
- break;
241
- case 'groq':
242
- models = model_lists_1.GROQ_MODELS;
243
- break;
244
- case 'openrouter':
245
- models = model_lists_1.OPENROUTER_MODELS;
246
- break;
247
- }
248
- modelCache.set(provider, models);
427
+ const cacheKey = `models:${provider}`;
428
+ const cached = modelCache.get(cacheKey);
429
+ if (cached)
430
+ return cached;
431
+ const { OPENROUTER_MODELS, GOOGLE_GEMINI_MODELS, DEEPSEEK_MODELS, GROQ_MODELS } = await Promise.resolve().then(() => __importStar(require('./model-lists')));
432
+ const models = {
433
+ google: GOOGLE_GEMINI_MODELS,
434
+ deepseek: DEEPSEEK_MODELS,
435
+ groq: GROQ_MODELS,
436
+ openrouter: OPENROUTER_MODELS,
437
+ }[provider] || [];
438
+ modelCache.set(cacheKey, models);
249
439
  return models;
250
440
  },
251
441
  },
@@ -255,226 +445,38 @@ class UniversalAI {
255
445
  const items = this.getInputData();
256
446
  const returnData = [];
257
447
  const provider = this.getNodeParameter('provider', 0);
258
- let credentials = null;
259
- switch (provider) {
260
- case 'google':
261
- credentials = await this.getCredentials('googleGenerativeAIApi');
262
- break;
263
- case 'deepseek':
264
- credentials = await this.getCredentials('deepSeekApi');
265
- break;
266
- case 'groq':
267
- credentials = await this.getCredentials('groqApi');
268
- break;
269
- case 'openrouter':
270
- credentials = await this.getCredentials('openRouterApi');
271
- break;
448
+ const credentialType = {
449
+ google: 'googleGenerativeAIApi',
450
+ deepseek: 'deepSeekApi',
451
+ groq: 'groqApi',
452
+ openrouter: 'openRouterApi',
453
+ }[provider];
454
+ if (!credentialType) {
455
+ throw new n8n_workflow_1.NodeOperationError(this.getNode(), `Unsupported provider: ${provider}`);
272
456
  }
457
+ const credentials = await this.getCredentials(credentialType);
273
458
  if (!(credentials === null || credentials === void 0 ? void 0 : credentials.apiKey)) {
274
459
  throw new n8n_workflow_1.NodeOperationError(this.getNode(), 'No API key provided in credentials');
275
460
  }
276
- const actualApiKey = credentials.apiKey;
277
- const actualBaseURL = credentials.baseUrl;
278
- const providerCacheKey = `${provider}:${actualApiKey || ''}:${actualBaseURL || ''}`;
279
- let aiProvider;
280
- if (providerCache.has(providerCacheKey)) {
281
- aiProvider = providerCache.get(providerCacheKey);
282
- }
283
- else {
284
- switch (provider) {
285
- case 'google':
286
- aiProvider = createGoogleGenerativeAI({
287
- apiKey: actualApiKey,
288
- ...(actualBaseURL && { baseURL: actualBaseURL }),
289
- });
290
- break;
291
- case 'deepseek':
292
- aiProvider = createDeepSeek({
293
- apiKey: actualApiKey,
294
- ...(actualBaseURL && { baseURL: actualBaseURL }),
295
- });
296
- break;
297
- case 'groq':
298
- aiProvider = createGroq({
299
- apiKey: actualApiKey,
300
- ...(actualBaseURL && { baseURL: actualBaseURL }),
301
- });
302
- break;
303
- case 'openrouter':
304
- aiProvider = createOpenRouter({
305
- apiKey: actualApiKey,
306
- ...(actualBaseURL && { baseURL: actualBaseURL }),
307
- });
308
- break;
309
- }
310
- providerCache.set(providerCacheKey, aiProvider);
311
- }
461
+ const aiProvider = await getProvider(provider, credentials.apiKey, credentials.baseUrl);
312
462
  for (let i = 0; i < items.length; i++) {
313
463
  try {
314
- const operation = this.getNodeParameter('operation', i);
315
- const model = this.getNodeParameter('model', i);
316
- const options = this.getNodeParameter('options', i, {});
317
- let modelSettings = {};
318
- let stopSequences;
319
- if (options.stopSequences && (provider === 'google' || provider === 'openrouter')) {
320
- stopSequences = options.stopSequences.split(',').map(s => s.trim()).filter(s => s.length > 0);
321
- }
322
- if (provider === 'google') {
323
- const safetySettingsRaw = this.getNodeParameter('safetySettings.settings', i, []);
324
- const useSearchGrounding = this.getNodeParameter('useSearchGrounding', i, false);
325
- const cachedContent = this.getNodeParameter('cachedContent', i, '');
326
- const responseModalities = this.getNodeParameter('responseModalities', i, []);
327
- const thinkingBudget = this.getNodeParameter('thinkingBudget', i, 0);
328
- const includeThoughts = this.getNodeParameter('includeThoughts', i, false);
329
- const safetySettings = safetySettingsRaw.map((s) => ({
330
- category: s.category,
331
- threshold: s.threshold,
332
- }));
333
- modelSettings = {
334
- structuredOutputs: operation === 'generateObject',
335
- safetySettings: safetySettings.length > 0 ? safetySettings : undefined,
336
- useSearchGrounding,
337
- ...(cachedContent && { cachedContent }),
338
- ...(responseModalities.length > 0 && { responseModalities }),
339
- ...(thinkingBudget > 0 && {
340
- thinkingConfig: {
341
- thinkingBudget,
342
- includeThoughts,
343
- },
344
- }),
345
- };
346
- }
347
- else if (provider === 'openrouter') {
348
- modelSettings = {};
349
- }
350
- const input = await buildInput(this, i);
351
- let tools = undefined;
352
- if (provider === 'google') {
353
- const googleTools = this.getNodeParameter('googleTools', i, []);
354
- if (googleTools.length > 0) {
355
- tools = {};
356
- const google = require('@ai-sdk/google').google;
357
- if (googleTools.includes('google_search')) {
358
- tools.google_search = google.tools.googleSearch({});
359
- }
360
- if (googleTools.includes('url_context')) {
361
- tools.url_context = google.tools.urlContext({});
362
- }
363
- if (googleTools.includes('code_execution')) {
364
- tools.code_execution = google.tools.codeExecution({});
365
- }
366
- }
367
- }
368
- const enableStreaming = this.getNodeParameter('enableStreaming', i, false);
369
- if (operation === 'generateText') {
370
- const generateTextParams = {
371
- model: aiProvider(model, modelSettings),
372
- messages: input.messages,
373
- maxTokens: options.maxTokens,
374
- temperature: options.temperature,
375
- topP: options.topP,
376
- topK: options.topK,
377
- frequencyPenalty: options.frequencyPenalty,
378
- presencePenalty: options.presencePenalty,
379
- seed: options.seed,
380
- prompt: input.prompt,
381
- system: input.system,
382
- ...(tools && { tools }),
383
- };
384
- if (stopSequences && (provider === 'google' || provider === 'openrouter')) {
385
- generateTextParams.stopSequences = stopSequences;
386
- }
387
- if (enableStreaming) {
388
- const stream = await (0, ai_1.streamText)(generateTextParams);
389
- let fullText = '';
390
- const chunks = [];
391
- for await (const textPart of stream.textStream) {
392
- fullText += textPart;
393
- chunks.push(textPart);
394
- }
395
- const finalUsage = await stream.usage;
396
- for (const chunk of chunks) {
397
- returnData.push({
398
- json: {
399
- chunk,
400
- isStreaming: true,
401
- }
402
- });
403
- }
404
- returnData.push({
405
- json: {
406
- text: fullText,
407
- toolCalls: stream.toolCalls || [],
408
- toolResults: stream.toolResults || [],
409
- finishReason: stream.finishReason,
410
- usage: {
411
- promptTokens: finalUsage.promptTokens,
412
- completionTokens: finalUsage.completionTokens,
413
- totalTokens: finalUsage.totalTokens,
414
- },
415
- isStreaming: false,
416
- isFinal: true,
417
- }
418
- });
419
- }
420
- else {
421
- const result = await (0, ai_1.generateText)(generateTextParams);
422
- const formatted = formatTextResult(result, options.includeRequestBody, provider);
423
- if (tools && result.toolCalls) {
424
- formatted.toolCalls = result.toolCalls;
425
- formatted.toolResults = result.toolResults;
426
- }
427
- returnData.push({ json: formatted });
428
- }
429
- }
430
- else {
431
- const schemaName = this.getNodeParameter('schemaName', i, '');
432
- const schemaDescription = this.getNodeParameter('schemaDescription', i, '');
433
- const rawSchema = this.getNodeParameter('schema', i);
434
- let parsedSchema;
435
- try {
436
- parsedSchema = JSON.parse(rawSchema);
437
- }
438
- catch (err) {
439
- throw new n8n_workflow_1.NodeOperationError(this.getNode(), 'Schema is not valid JSON: ' + err.message);
440
- }
441
- const ajv = new ajv_1.default();
442
- if (!ajv.validateSchema(parsedSchema)) {
443
- throw new n8n_workflow_1.NodeOperationError(this.getNode(), `Invalid JSON Schema: ${ajv.errorsText(ajv.errors)}`);
444
- }
445
- const generateObjectParams = {
446
- model: aiProvider(model, modelSettings),
447
- schema: (0, ai_1.jsonSchema)(parsedSchema),
448
- schemaName,
449
- schemaDescription,
450
- temperature: options.temperature,
451
- topP: options.topP,
452
- topK: options.topK,
453
- frequencyPenalty: options.frequencyPenalty,
454
- presencePenalty: options.presencePenalty,
455
- seed: options.seed,
456
- ...(stopSequences && (provider === 'google' || provider === 'openrouter') && { stopSequences }),
457
- };
458
- if (input.messages && input.messages.length > 0) {
459
- generateObjectParams.messages = input.messages;
460
- }
461
- else if (input.prompt) {
462
- generateObjectParams.prompt = input.prompt;
463
- }
464
- if (input.system) {
465
- generateObjectParams.system = input.system;
466
- }
467
- const result = await (0, ai_1.generateObject)(generateObjectParams);
468
- const formatted = formatObjectResult(result, options.includeRequestBody);
469
- returnData.push({ json: formatted });
470
- }
464
+ const result = await processItem(this, i, provider, aiProvider);
465
+ returnData.push(...result);
471
466
  }
472
467
  catch (error) {
468
+ const errorMessage = error instanceof Error ? error.message : String(error);
469
+ const errorData = {
470
+ error: errorMessage,
471
+ itemIndex: i,
472
+ timestamp: new Date().toISOString(),
473
+ nodeName: this.getNode().name,
474
+ };
473
475
  if (this.continueOnFail()) {
474
- returnData.push({ json: { error: error.message } });
476
+ returnData.push({ json: errorData });
475
477
  }
476
478
  else {
477
- throw new n8n_workflow_1.NodeOperationError(this.getNode(), error, { itemIndex: i });
479
+ throw new n8n_workflow_1.NodeOperationError(this.getNode(), errorMessage, errorData);
478
480
  }
479
481
  }
480
482
  }
@@ -482,4 +484,153 @@ class UniversalAI {
482
484
  }
483
485
  }
484
486
  exports.UniversalAI = UniversalAI;
487
+ async function processItem(exec, index, provider, aiProvider) {
488
+ const operation = exec.getNodeParameter('operation', index);
489
+ const model = exec.getNodeParameter('model', index);
490
+ const options = exec.getNodeParameter('options', index, {});
491
+ const input = await buildInput(exec, index);
492
+ const modelSettings = getModelSettings(exec, index, provider, operation, options);
493
+ return operation === 'generateText'
494
+ ? await generateTextOperation(exec, index, provider, aiProvider, model, modelSettings, input, options)
495
+ : await generateObjectOperation(exec, index, provider, aiProvider, model, modelSettings, input, options);
496
+ }
497
+ function getModelSettings(exec, index, provider, operation, options) {
498
+ const settings = {};
499
+ const stopSequences = parseStopSequences(options.stopSequences);
500
+ if (stopSequences && (provider === 'google' || provider === 'openrouter')) {
501
+ settings.stopSequences = stopSequences;
502
+ }
503
+ if (provider === 'google') {
504
+ const safetySettingsRaw = exec.getNodeParameter('safetySettings.settings', index, []);
505
+ if (safetySettingsRaw.length > 0) {
506
+ settings.safetySettings = safetySettingsRaw.map((s) => ({
507
+ category: s.category,
508
+ threshold: s.threshold,
509
+ }));
510
+ }
511
+ settings.structuredOutputs = operation === 'generateObject';
512
+ settings.useSearchGrounding = exec.getNodeParameter('useSearchGrounding', index, false);
513
+ const cachedContent = exec.getNodeParameter('cachedContent', index, '');
514
+ if (cachedContent) {
515
+ settings.cachedContent = cachedContent;
516
+ }
517
+ const thinkingBudgetValue = Number(exec.getNodeParameter('thinkingBudget', index, 0));
518
+ if (!Number.isNaN(thinkingBudgetValue) && thinkingBudgetValue > 0) {
519
+ settings.thinkingConfig = {
520
+ thinkingBudget: thinkingBudgetValue,
521
+ includeThoughts: exec.getNodeParameter('includeThoughts', index, false),
522
+ };
523
+ }
524
+ const responseModalities = exec.getNodeParameter('responseModalities', index, []);
525
+ if (responseModalities.length > 0) {
526
+ settings.responseModalities = responseModalities;
527
+ }
528
+ }
529
+ return settings;
530
+ }
531
+ async function generateTextOperation(exec, index, provider, aiProvider, model, modelSettings, input, options) {
532
+ const enableStreaming = exec.getNodeParameter('enableStreaming', index, false);
533
+ const includeRequestBody = options.includeRequestBody;
534
+ const tools = provider === 'google' ? await buildGoogleTools(exec, index) : undefined;
535
+ const params = {
536
+ model: aiProvider(model, modelSettings),
537
+ ...input,
538
+ ...(tools && { tools }),
539
+ };
540
+ const numericOptionKeys = [
541
+ 'maxTokens',
542
+ 'temperature',
543
+ 'topP',
544
+ 'topK',
545
+ 'frequencyPenalty',
546
+ 'presencePenalty',
547
+ 'seed',
548
+ ];
549
+ for (const key of numericOptionKeys) {
550
+ const value = options[key];
551
+ if (value !== undefined && value !== null && value !== '') {
552
+ params[key] = value;
553
+ }
554
+ }
555
+ if (enableStreaming) {
556
+ return await handleStreaming(params, provider, includeRequestBody);
557
+ }
558
+ const result = await (0, ai_1.generateText)(params);
559
+ return [{ json: formatTextResult(result, includeRequestBody, provider) }];
560
+ }
561
+ async function buildGoogleTools(exec, index) {
562
+ const googleTools = exec.getNodeParameter('googleTools', index, []);
563
+ if (!googleTools || googleTools.length === 0) {
564
+ return undefined;
565
+ }
566
+ const tools = {};
567
+ const google = require('@ai-sdk/google').google;
568
+ const toolSet = new Set(googleTools);
569
+ if (toolSet.has('google_search')) {
570
+ tools.google_search = google.tools.googleSearch({});
571
+ }
572
+ if (toolSet.has('url_context')) {
573
+ tools.url_context = google.tools.urlContext({});
574
+ }
575
+ if (toolSet.has('code_execution')) {
576
+ tools.code_execution = google.tools.codeExecution({});
577
+ }
578
+ return tools;
579
+ }
580
+ async function handleStreaming(params, provider, includeRequestBody) {
581
+ const stream = await (0, ai_1.streamText)(params);
582
+ const chunks = [];
583
+ let fullText = '';
584
+ for await (const textPart of stream.textStream) {
585
+ fullText += textPart;
586
+ chunks.push({ json: { chunk: textPart, isStreaming: true } });
587
+ }
588
+ const finalUsage = await stream.usage;
589
+ const finalJson = {
590
+ text: fullText,
591
+ toolCalls: stream.toolCalls || [],
592
+ toolResults: stream.toolResults || [],
593
+ finishReason: stream.finishReason,
594
+ usage: formatUsage({ usage: finalUsage }, provider),
595
+ isStreaming: false,
596
+ isFinal: true,
597
+ };
598
+ if (includeRequestBody) {
599
+ const requestMetadata = stream.request ? await stream.request : undefined;
600
+ if ((requestMetadata === null || requestMetadata === void 0 ? void 0 : requestMetadata.body) !== undefined) {
601
+ finalJson.request = { body: requestMetadata.body };
602
+ }
603
+ }
604
+ chunks.push({ json: finalJson });
605
+ return chunks;
606
+ }
607
+ async function generateObjectOperation(exec, index, provider, aiProvider, model, modelSettings, input, options) {
608
+ const schemaName = exec.getNodeParameter('schemaName', index, '');
609
+ const schemaDescription = exec.getNodeParameter('schemaDescription', index, '');
610
+ const rawSchema = exec.getNodeParameter('schema', index);
611
+ const parsedSchema = parseAndValidateSchema(rawSchema, exec);
612
+ const params = {
613
+ model: aiProvider(model, modelSettings),
614
+ schema: (0, ai_1.jsonSchema)(parsedSchema),
615
+ schemaName,
616
+ schemaDescription,
617
+ ...input,
618
+ };
619
+ const numericOptionKeys = [
620
+ 'temperature',
621
+ 'topP',
622
+ 'topK',
623
+ 'frequencyPenalty',
624
+ 'presencePenalty',
625
+ 'seed',
626
+ ];
627
+ for (const key of numericOptionKeys) {
628
+ const value = options[key];
629
+ if (value !== undefined && value !== null && value !== '') {
630
+ params[key] = value;
631
+ }
632
+ }
633
+ const result = await (0, ai_1.generateObject)(params);
634
+ return [{ json: formatObjectResult(result, options.includeRequestBody, provider) }];
635
+ }
485
636
  //# sourceMappingURL=UniversalAI.node.js.map