n8n-nodes-vercel-ai-sdk-universal-temp 0.1.51 → 0.1.53

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -41,15 +41,42 @@ const n8n_workflow_1 = require("n8n-workflow");
41
41
  const ai_1 = require("ai");
42
42
  const zod_1 = require("zod");
43
43
  const ajv_1 = __importDefault(require("ajv"));
44
+ const crypto_1 = require("crypto");
44
45
  const descriptions_1 = require("../shared/descriptions");
46
+ class UniversalAIError extends Error {
47
+ constructor(message, code, context) {
48
+ super(message);
49
+ this.code = code;
50
+ this.context = context;
51
+ this.name = 'UniversalAIError';
52
+ }
53
+ }
54
+ class CacheError extends UniversalAIError {
55
+ constructor(message, context) {
56
+ super(message, 'CACHE_ERROR', context);
57
+ }
58
+ }
59
+ class ProviderError extends UniversalAIError {
60
+ constructor(message, context) {
61
+ super(message, 'PROVIDER_ERROR', context);
62
+ }
63
+ }
64
+ class ValidationError extends UniversalAIError {
65
+ constructor(message, context) {
66
+ super(message, 'VALIDATION_ERROR', context);
67
+ }
68
+ }
45
69
  class Cache {
46
- constructor(maxSize = 100, ttl = 5 * 60 * 1000) {
70
+ constructor(_name, maxSize = 100, ttl = 5 * 60 * 1000) {
47
71
  this.cache = new Map();
48
72
  this.totalHits = 0;
49
73
  this.totalMisses = 0;
50
74
  this.totalEvictions = 0;
51
75
  this.maxSize = maxSize;
52
76
  this.ttl = ttl;
77
+ if (ttl > 0) {
78
+ setInterval(() => this.cleanupExpired(), Math.min(ttl, 60000));
79
+ }
53
80
  }
54
81
  get(key) {
55
82
  const item = this.cache.get(key);
@@ -65,6 +92,7 @@ class Cache {
65
92
  return undefined;
66
93
  }
67
94
  item.hits++;
95
+ item.lastAccessed = now;
68
96
  this.totalHits++;
69
97
  return item.value;
70
98
  }
@@ -72,20 +100,42 @@ class Cache {
72
100
  const now = Date.now();
73
101
  const expiresAt = customTTL ? now + customTTL : (this.ttl > 0 ? now + this.ttl : undefined);
74
102
  if (this.cache.size >= this.maxSize) {
75
- let oldestKey;
76
- let oldestTime = now;
77
- for (const [k, v] of this.cache.entries()) {
78
- if (v.timestamp < oldestTime) {
79
- oldestTime = v.timestamp;
80
- oldestKey = k;
81
- }
103
+ this.evictLRU();
104
+ }
105
+ this.cache.set(key, {
106
+ value,
107
+ timestamp: now,
108
+ hits: 0,
109
+ expiresAt,
110
+ lastAccessed: now
111
+ });
112
+ }
113
+ evictLRU() {
114
+ let lruKey;
115
+ let oldestAccess = Date.now();
116
+ for (const [key, item] of this.cache.entries()) {
117
+ if (item.lastAccessed < oldestAccess) {
118
+ oldestAccess = item.lastAccessed;
119
+ lruKey = key;
82
120
  }
83
- if (oldestKey) {
84
- this.cache.delete(oldestKey);
85
- this.totalEvictions++;
121
+ }
122
+ if (lruKey) {
123
+ this.cache.delete(lruKey);
124
+ this.totalEvictions++;
125
+ }
126
+ }
127
+ cleanupExpired() {
128
+ const now = Date.now();
129
+ let cleaned = 0;
130
+ for (const [key, item] of this.cache.entries()) {
131
+ if (item.expiresAt && now > item.expiresAt) {
132
+ this.cache.delete(key);
133
+ cleaned++;
86
134
  }
87
135
  }
88
- this.cache.set(key, { value, timestamp: now, hits: 0, expiresAt });
136
+ if (cleaned > 0) {
137
+ this.totalEvictions += cleaned;
138
+ }
89
139
  }
90
140
  delete(key) {
91
141
  return this.cache.delete(key);
@@ -96,93 +146,111 @@ class Cache {
96
146
  this.totalMisses = 0;
97
147
  this.totalEvictions = 0;
98
148
  }
149
+ entries() {
150
+ return this.cache.entries();
151
+ }
152
+ getMetadata(key) {
153
+ return this.cache.get(key);
154
+ }
99
155
  getStats() {
156
+ const totalRequests = this.totalHits + this.totalMisses;
157
+ let totalSize = 0;
158
+ for (const item of this.cache.values()) {
159
+ totalSize += this.estimateSize(item.value);
160
+ }
100
161
  return {
101
162
  size: this.cache.size,
102
163
  maxSize: this.maxSize,
103
- hitRate: this.totalHits / (this.totalHits + this.totalMisses) || 0,
164
+ hitRate: totalRequests > 0 ? this.totalHits / totalRequests : 0,
104
165
  totalHits: this.totalHits,
105
166
  totalMisses: this.totalMisses,
106
167
  totalEvictions: this.totalEvictions,
107
168
  ttl: this.ttl,
169
+ averageItemSize: this.cache.size > 0 ? totalSize / this.cache.size : 0,
108
170
  };
109
171
  }
172
+ estimateSize(value) {
173
+ try {
174
+ return JSON.stringify(value).length;
175
+ }
176
+ catch {
177
+ return 1024;
178
+ }
179
+ }
110
180
  }
111
- const modelCache = new Cache(50);
112
- const providerCache = new Cache(20);
113
- const schemaCache = new Cache(30);
114
- const googleCacheClients = new Cache(10, 60 * 60 * 1000);
115
- const googleCachedContexts = new Cache(50, 55 * 60 * 1000);
181
+ function generateCacheKey(data, prefix = '') {
182
+ const dataStr = typeof data === 'string' ? data : JSON.stringify(data);
183
+ const hash = (0, crypto_1.createHash)('sha256').update(dataStr).digest('hex').substring(0, 16);
184
+ return `${prefix}${hash}`;
185
+ }
186
+ const modelCache = new Cache('models', 50, 10 * 60 * 1000);
187
+ const providerCache = new Cache('providers', 20, 30 * 60 * 1000);
188
+ const schemaCache = new Cache('schemas', 30, 60 * 60 * 1000);
189
+ const googleCacheClients = new Cache('google_clients', 10, 60 * 60 * 1000);
190
+ const googleCachedContexts = new Cache('google_contexts', 50, 55 * 60 * 1000);
191
+ async function cleanupExpiredGoogleCaches() {
192
+ const now = Date.now();
193
+ for (const [key, entry] of googleCachedContexts.entries()) {
194
+ if (entry.expiresAt && now > entry.expiresAt) {
195
+ try {
196
+ const client = await getGoogleCacheManager(entry.value.apiKey);
197
+ await client.caches.delete(entry.value.name);
198
+ }
199
+ catch (error) {
200
+ console.warn(`Failed to cleanup Google cache ${entry.value.name}:`, error);
201
+ }
202
+ finally {
203
+ googleCachedContexts.delete(key);
204
+ }
205
+ }
206
+ }
207
+ }
208
+ setInterval(() => {
209
+ cleanupExpiredGoogleCaches().catch(console.error);
210
+ }, 5 * 60 * 1000);
116
211
  async function getGoogleCacheManager(apiKey) {
117
- let client = googleCacheClients.get(apiKey);
118
- if (!client) {
212
+ const cacheKey = generateCacheKey(apiKey, 'google_client:');
213
+ const cachedClient = googleCacheClients.get(cacheKey);
214
+ if (cachedClient) {
215
+ return cachedClient;
216
+ }
217
+ try {
119
218
  const { GoogleGenAI } = await Promise.resolve().then(() => __importStar(require('@google/genai')));
120
- client = new GoogleGenAI({ apiKey });
121
- googleCacheClients.set(apiKey, client);
219
+ const client = new GoogleGenAI({ apiKey });
220
+ googleCacheClients.set(cacheKey, client);
221
+ return client;
222
+ }
223
+ catch (error) {
224
+ throw new CacheError(`Failed to initialize Google cache client: ${error.message}`, { apiKey: apiKey.substring(0, 8) + '...' });
122
225
  }
123
- return client;
124
226
  }
125
- async function createGoogleCache(exec, index, apiKey, cacheContent, tools) {
126
- var _a;
227
+ function isUrl(str) {
228
+ if (typeof str !== 'string')
229
+ return false;
127
230
  try {
128
- const useGoogleCache = exec.getNodeParameter('useGoogleCache', index, false);
129
- if (!useGoogleCache) {
130
- return null;
131
- }
132
- const googleCacheManager = await getGoogleCacheManager(apiKey);
133
- const normalizedCacheContent = (_a = cacheContent === null || cacheContent === void 0 ? void 0 : cacheContent.trim()) !== null && _a !== void 0 ? _a : '';
134
- if (!normalizedCacheContent) {
135
- return null;
136
- }
137
- const cacheKeyData = {
138
- content: normalizedCacheContent,
139
- tools: tools ? Object.keys(tools).sort() : [],
140
- model: 'gemini-2.0-flash-001',
141
- };
142
- const cacheKey = JSON.stringify(cacheKeyData);
143
- const existingCache = googleCachedContexts.get(cacheKey);
144
- if (existingCache) {
145
- return existingCache.name;
146
- }
147
- const ttlSeconds = 3600;
148
- const displayName = `universal_ai_cache_${Date.now()}`;
149
- const cacheConfig = {
150
- model: 'gemini-2.0-flash-001',
151
- config: {
152
- displayName,
153
- ttl: `${ttlSeconds}s`,
154
- contents: [{
155
- role: 'user',
156
- parts: [{ text: normalizedCacheContent }],
157
- }],
158
- },
159
- };
160
- if (tools && Object.keys(tools).length > 0) {
161
- cacheConfig.config.tools = Object.values(tools);
162
- }
163
- const result = await googleCacheManager.caches.create(cacheConfig);
164
- const cachedContentName = result === null || result === void 0 ? void 0 : result.name;
165
- if (!cachedContentName) {
166
- throw new Error('Failed to get cached content name from creation response');
167
- }
168
- googleCachedContexts.set(cacheKey, { name: cachedContentName }, ttlSeconds * 1000);
169
- return cachedContentName;
231
+ const url = new URL(str);
232
+ return url.protocol === 'http:' || url.protocol === 'https:';
170
233
  }
171
- catch (error) {
172
- console.error('UniversalAI: Failed to create Google cache. Falling back to non-cached execution:', error);
173
- return null;
234
+ catch {
235
+ return str.startsWith('data:');
174
236
  }
175
237
  }
176
- function canUseCache(cacheContent) {
177
- return Boolean(cacheContent && cacheContent.trim().length > 0);
238
+ function isLikelyBase64(str) {
239
+ if (typeof str !== 'string')
240
+ return false;
241
+ if (str.length % 4 !== 0)
242
+ return false;
243
+ if (!/^[A-Za-z0-9+/]*={0,2}$/.test(str))
244
+ return false;
245
+ if (str.length > 10000)
246
+ return true;
247
+ return true;
178
248
  }
179
249
  function extractTextFromMessageContent(content) {
180
- if (!content) {
250
+ if (!content)
181
251
  return '';
182
- }
183
- if (typeof content === 'string') {
252
+ if (typeof content === 'string')
184
253
  return content;
185
- }
186
254
  if (Array.isArray(content)) {
187
255
  return content
188
256
  .map((part) => {
@@ -202,16 +270,20 @@ function extractTextFromMessageContent(content) {
202
270
  }
203
271
  return '';
204
272
  }
273
+ function canUseCache(cacheContent) {
274
+ return Boolean(cacheContent && cacheContent.trim().length > 0);
275
+ }
205
276
  function resolveCacheContent(input) {
277
+ var _a, _b, _c;
206
278
  const sections = [];
207
279
  let hasSystem = false;
208
280
  let hasMessages = false;
209
281
  let hasPrompt = false;
210
- if (input.system && input.system.trim()) {
282
+ if ((_a = input.system) === null || _a === void 0 ? void 0 : _a.trim()) {
211
283
  sections.push(`System Instruction:\n${input.system.trim()}`);
212
284
  hasSystem = true;
213
285
  }
214
- if (input.messages && input.messages.length > 0) {
286
+ if ((_b = input.messages) === null || _b === void 0 ? void 0 : _b.length) {
215
287
  const messageSections = [];
216
288
  for (const message of input.messages) {
217
289
  const text = extractTextFromMessageContent(message.content);
@@ -222,34 +294,31 @@ function resolveCacheContent(input) {
222
294
  sections.push(`System Instruction (from messages):\n${text.trim()}`);
223
295
  hasSystem = true;
224
296
  }
225
- continue;
226
297
  }
227
- messageSections.push(`${message.role.toUpperCase()}:\n${text.trim()}`);
298
+ else {
299
+ messageSections.push(`${message.role.toUpperCase()}:\n${text.trim()}`);
300
+ }
228
301
  }
229
302
  if (messageSections.length > 0) {
230
303
  sections.push(`Messages:\n${messageSections.join('\n\n')}`);
231
304
  hasMessages = true;
232
305
  }
233
306
  }
234
- if (input.prompt && input.prompt.trim()) {
307
+ if ((_c = input.prompt) === null || _c === void 0 ? void 0 : _c.trim()) {
235
308
  sections.push(`Prompt Template:\n${input.prompt.trim()}`);
236
309
  hasPrompt = true;
237
310
  }
238
311
  const content = sections.join('\n\n').trim();
239
312
  let source;
240
313
  const sourceCount = [hasSystem, hasMessages, hasPrompt].filter(Boolean).length;
241
- if (sourceCount > 1) {
314
+ if (sourceCount > 1)
242
315
  source = 'combined';
243
- }
244
- else if (hasSystem) {
316
+ else if (hasSystem)
245
317
  source = 'system';
246
- }
247
- else if (hasMessages) {
318
+ else if (hasMessages)
248
319
  source = 'messages';
249
- }
250
- else if (hasPrompt) {
320
+ else if (hasPrompt)
251
321
  source = 'prompt';
252
- }
253
322
  return {
254
323
  content: content || undefined,
255
324
  hasSystem,
@@ -258,120 +327,234 @@ function resolveCacheContent(input) {
258
327
  source,
259
328
  };
260
329
  }
261
- const messageSchema = zod_1.z.object({
262
- role: zod_1.z.enum(['system', 'user', 'assistant']),
263
- content: zod_1.z.any(),
264
- });
265
- const messagesArraySchema = zod_1.z.array(messageSchema);
266
- const ajv = new ajv_1.default({
267
- allErrors: true,
268
- verbose: true,
269
- strict: false,
270
- });
271
- const isUrl = (str) => {
272
- if (typeof str !== 'string')
273
- return false;
274
- return str.startsWith('http://') ||
275
- str.startsWith('https://') ||
276
- str.startsWith('data:');
277
- };
278
- const isLikelyBase64 = (str) => {
279
- if (str.length % 4 !== 0)
280
- return false;
281
- if (!/^[A-Za-z0-9+/]*={0,2}$/.test(str))
282
- return false;
283
- if (str.length > 10000)
284
- return true;
285
- return true;
286
- };
330
+ async function createGoogleCache(exec, index, apiKey, cacheContent, tools) {
331
+ try {
332
+ const useGoogleCache = exec.getNodeParameter('useGoogleCache', index, false);
333
+ if (!useGoogleCache || !(cacheContent === null || cacheContent === void 0 ? void 0 : cacheContent.trim())) {
334
+ return null;
335
+ }
336
+ const googleCacheManager = await getGoogleCacheManager(apiKey);
337
+ const normalizedCacheContent = cacheContent.trim();
338
+ const cacheKeyData = {
339
+ content: normalizedCacheContent,
340
+ tools: tools ? Object.keys(tools).sort() : [],
341
+ model: 'gemini-2.0-flash-001',
342
+ };
343
+ const cacheKey = generateCacheKey(cacheKeyData, 'google_cache:');
344
+ const existingCache = googleCachedContexts.get(cacheKey);
345
+ if (existingCache) {
346
+ return existingCache.name;
347
+ }
348
+ const ttlSeconds = 3600;
349
+ const displayName = `n8n_cache_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
350
+ const cacheConfig = {
351
+ model: 'gemini-2.0-flash-001',
352
+ config: {
353
+ displayName,
354
+ ttl: `${ttlSeconds}s`,
355
+ contents: [{
356
+ role: 'user',
357
+ parts: [{ text: normalizedCacheContent }],
358
+ }],
359
+ },
360
+ };
361
+ if (tools && Object.keys(tools).length > 0) {
362
+ cacheConfig.config.tools = Object.values(tools);
363
+ }
364
+ const result = await googleCacheManager.caches.create(cacheConfig);
365
+ const cachedContentName = result === null || result === void 0 ? void 0 : result.name;
366
+ if (!cachedContentName) {
367
+ throw new CacheError('Failed to create cache: No name in response', { displayName });
368
+ }
369
+ googleCachedContexts.set(cacheKey, {
370
+ name: cachedContentName,
371
+ apiKey
372
+ }, ttlSeconds * 1000);
373
+ return cachedContentName;
374
+ }
375
+ catch (error) {
376
+ if (error instanceof CacheError)
377
+ throw error;
378
+ throw new CacheError(`Google cache creation failed: ${error.message}`, {
379
+ cacheContentLength: cacheContent === null || cacheContent === void 0 ? void 0 : cacheContent.length,
380
+ hasTools: !!tools && Object.keys(tools).length > 0
381
+ });
382
+ }
383
+ }
287
384
  async function buildInput(exec, itemIndex) {
288
385
  const inputType = exec.getNodeParameter('inputType', itemIndex);
289
386
  if (inputType === 'prompt') {
290
- const promptValue = exec.getNodeParameter('prompt', itemIndex, '');
291
- const systemValue = exec.getNodeParameter('system', itemIndex, '');
292
- const result = {};
293
- const trimmedPrompt = typeof promptValue === 'string' ? promptValue.trim() : '';
294
- if (trimmedPrompt) {
295
- result.prompt = trimmedPrompt;
296
- }
297
- const trimmedSystem = typeof systemValue === 'string' ? systemValue.trim() : '';
298
- if (trimmedSystem) {
299
- result.system = trimmedSystem;
300
- }
301
- return result;
387
+ return buildPromptInput(exec, itemIndex);
302
388
  }
303
389
  const messageAsJson = exec.getNodeParameter('messageAsJson', itemIndex, false);
304
390
  return messageAsJson
305
391
  ? buildMessagesFromJson(exec, itemIndex)
306
392
  : buildMessagesFromUI(exec, itemIndex);
307
393
  }
394
+ function buildPromptInput(exec, itemIndex) {
395
+ const result = {};
396
+ const promptValue = exec.getNodeParameter('prompt', itemIndex, '').trim();
397
+ if (promptValue) {
398
+ if (promptValue.length > 100000) {
399
+ throw new ValidationError('Prompt is too long (max 100,000 characters)');
400
+ }
401
+ result.prompt = promptValue;
402
+ }
403
+ const systemValue = exec.getNodeParameter('system', itemIndex, '').trim();
404
+ if (systemValue) {
405
+ if (systemValue.length > 50000) {
406
+ throw new ValidationError('System instruction is too long (max 50,000 characters)');
407
+ }
408
+ result.system = systemValue;
409
+ }
410
+ return result;
411
+ }
412
+ const messageSchema = zod_1.z.object({
413
+ role: zod_1.z.enum(['system', 'user', 'assistant', 'tool']),
414
+ content: zod_1.z.any(),
415
+ });
416
+ const messagesArraySchema = zod_1.z.array(messageSchema);
417
+ const ajv = new ajv_1.default({
418
+ allErrors: true,
419
+ verbose: true,
420
+ strict: false,
421
+ useDefaults: true,
422
+ removeAdditional: true,
423
+ });
308
424
  async function buildMessagesFromJson(exec, itemIndex) {
309
425
  const rawJson = exec.getNodeParameter('messagesJson', itemIndex);
426
+ if (!rawJson.trim()) {
427
+ throw new ValidationError('Messages JSON field is empty');
428
+ }
429
+ if (rawJson.length > 200000) {
430
+ throw new ValidationError('Messages JSON is too large (max 200,000 characters)');
431
+ }
310
432
  try {
311
433
  const parsed = JSON.parse(rawJson);
312
434
  const result = messagesArraySchema.safeParse(parsed);
313
435
  if (!result.success) {
314
- throw new n8n_workflow_1.NodeOperationError(exec.getNode(), 'Messages must be an array of objects with role and content.');
436
+ const errorDetails = result.error.issues
437
+ .map((issue) => {
438
+ const path = issue.path.length > 0 ? issue.path.join('.') : '(root)';
439
+ return `${path}: ${issue.message}`;
440
+ })
441
+ .join('; ');
442
+ throw new ValidationError(`Invalid messages format: ${errorDetails}`);
443
+ }
444
+ if (result.data.length > 100) {
445
+ throw new ValidationError('Too many messages (max 100)');
315
446
  }
316
447
  return { messages: result.data };
317
448
  }
318
449
  catch (error) {
319
- throw new n8n_workflow_1.NodeOperationError(exec.getNode(), `Invalid JSON in "Messages (JSON)" field: ${error.message}`);
450
+ if (error instanceof ValidationError)
451
+ throw error;
452
+ throw new ValidationError(`Invalid JSON in messages field: ${error.message}`);
320
453
  }
321
454
  }
322
455
  async function buildMessagesFromUI(exec, itemIndex) {
323
- var _a;
456
+ var _a, _b, _c;
324
457
  const items = exec.getInputData();
325
458
  const messagesUi = exec.getNodeParameter('messages.messagesUi', itemIndex, []);
459
+ if (messagesUi.length > 100) {
460
+ throw new ValidationError('Too many messages (max 100)');
461
+ }
326
462
  const builtMessages = [];
327
463
  const itemBinary = items[itemIndex].binary;
328
464
  for (const msg of messagesUi) {
329
465
  const role = msg.role;
330
466
  if (role === 'system') {
331
- builtMessages.push({ role, content: msg.systemContent || '' });
467
+ if ((_a = msg.systemContent) === null || _a === void 0 ? void 0 : _a.trim()) {
468
+ builtMessages.push({ role, content: msg.systemContent.trim() });
469
+ }
332
470
  continue;
333
471
  }
334
- const attachments = ((_a = msg.attachments) === null || _a === void 0 ? void 0 : _a.attachment) || [];
472
+ const attachments = ((_b = msg.attachments) === null || _b === void 0 ? void 0 : _b.attachment) || [];
473
+ const content = ((_c = msg.content) === null || _c === void 0 ? void 0 : _c.trim()) || '';
335
474
  if (attachments.length === 0) {
336
- builtMessages.push({ role, content: msg.content || '' });
475
+ if (content) {
476
+ builtMessages.push({ role, content });
477
+ }
337
478
  }
338
479
  else {
339
- const messageWithAttachments = await buildMessageWithAttachments(role, msg.content, attachments, itemBinary, exec, itemIndex);
480
+ const messageWithAttachments = await buildMessageWithAttachments(role, content, attachments, itemBinary, exec, itemIndex);
340
481
  if (messageWithAttachments) {
341
482
  builtMessages.push(messageWithAttachments);
342
483
  }
343
484
  }
344
485
  }
345
486
  const convertMessagesToModel = exec.getNodeParameter('convertMessagesToModel', itemIndex, false);
346
- if (convertMessagesToModel) {
347
- return { messages: (0, ai_1.convertToModelMessages)(builtMessages) };
348
- }
349
- return { messages: builtMessages };
487
+ return {
488
+ messages: convertMessagesToModel ? (0, ai_1.convertToModelMessages)(builtMessages) : builtMessages
489
+ };
350
490
  }
491
+ const MAX_ATTACHMENT_SIZE = 50 * 1024 * 1024;
492
+ const MAX_TOTAL_ATTACHMENTS_SIZE = 100 * 1024 * 1024;
351
493
  async function buildMessageWithAttachments(role, content, attachments, itemBinary, exec, itemIndex) {
352
494
  const parts = [];
353
495
  if (content) {
354
496
  parts.push({ type: 'text', text: content });
355
497
  }
498
+ let totalSize = 0;
356
499
  const MAX_CONCURRENT_ATTACHMENTS = 3;
357
- const processedAttachments = [];
358
500
  for (let i = 0; i < attachments.length; i += MAX_CONCURRENT_ATTACHMENTS) {
359
501
  const batch = attachments.slice(i, i + MAX_CONCURRENT_ATTACHMENTS);
360
502
  const batchPromises = batch.map(attachment => processAttachment(attachment, itemBinary, exec, itemIndex));
361
- const batchResults = await Promise.all(batchPromises);
362
- processedAttachments.push(...batchResults);
363
- }
364
- for (const attachment of processedAttachments) {
365
- if (attachment) {
366
- parts.push(attachment);
503
+ const processedAttachments = await Promise.all(batchPromises);
504
+ for (const attachment of processedAttachments) {
505
+ if (attachment) {
506
+ if (attachment.data instanceof Buffer) {
507
+ totalSize += attachment.data.length;
508
+ if (totalSize > MAX_TOTAL_ATTACHMENTS_SIZE) {
509
+ throw new ValidationError(`Total attachments size exceeds limit of ${MAX_TOTAL_ATTACHMENTS_SIZE / 1024 / 1024}MB`);
510
+ }
511
+ }
512
+ parts.push(attachment);
513
+ }
367
514
  }
368
515
  }
369
516
  return parts.length > 0 ? { role, content: parts } : null;
370
517
  }
518
+ function getMimeType(attachment) {
519
+ return attachment.mimeType === 'other' ? attachment.mimeTypeOther : attachment.mimeType;
520
+ }
521
+ async function getBinaryData(fileContentInput, itemBinary, exec, itemIndex) {
522
+ if (itemBinary === null || itemBinary === void 0 ? void 0 : itemBinary[fileContentInput]) {
523
+ const binaryData = itemBinary[fileContentInput];
524
+ if (binaryData.data) {
525
+ const buffer = Buffer.from(binaryData.data, 'base64');
526
+ if (buffer.length > MAX_ATTACHMENT_SIZE) {
527
+ throw new ValidationError(`Attachment too large: ${buffer.length / 1024 / 1024}MB (max ${MAX_ATTACHMENT_SIZE / 1024 / 1024}MB)`);
528
+ }
529
+ return {
530
+ data: buffer,
531
+ mimeType: binaryData.mimeType
532
+ };
533
+ }
534
+ }
535
+ try {
536
+ if (isLikelyBase64(fileContentInput)) {
537
+ const buffer = Buffer.from(fileContentInput, 'base64');
538
+ if (buffer.length > MAX_ATTACHMENT_SIZE) {
539
+ throw new ValidationError(`Attachment too large: ${buffer.length / 1024 / 1024}MB (max ${MAX_ATTACHMENT_SIZE / 1024 / 1024}MB)`);
540
+ }
541
+ if (buffer.length > 0) {
542
+ return { data: buffer, mimeType: undefined };
543
+ }
544
+ }
545
+ }
546
+ catch (error) {
547
+ if (error instanceof ValidationError)
548
+ throw error;
549
+ throw new ValidationError(`Invalid file content for attachment: ${error.message}`);
550
+ }
551
+ return { data: null, mimeType: undefined };
552
+ }
371
553
  async function processAttachment(attachment, itemBinary, exec, itemIndex) {
372
554
  const fileContentInput = attachment.fileContent;
373
- if (!fileContentInput || typeof fileContentInput !== 'string')
555
+ if (!fileContentInput || typeof fileContentInput !== 'string') {
374
556
  return null;
557
+ }
375
558
  let mimeType = getMimeType(attachment);
376
559
  let fileData;
377
560
  if (isUrl(fileContentInput)) {
@@ -379,46 +562,19 @@ async function processAttachment(attachment, itemBinary, exec, itemIndex) {
379
562
  }
380
563
  else {
381
564
  const result = await getBinaryData(fileContentInput, itemBinary, exec, itemIndex);
565
+ if (!result.data)
566
+ return null;
382
567
  fileData = result.data;
383
568
  if (!mimeType && result.mimeType) {
384
569
  mimeType = result.mimeType;
385
570
  }
386
571
  }
387
- if (!fileData || (Buffer.isBuffer(fileData) && fileData.length === 0)) {
388
- return null;
389
- }
390
572
  return {
391
573
  type: 'file',
392
574
  data: fileData,
393
- mediaType: mimeType || 'application/octet-stream',
575
+ mediaType: mimeType || 'application/octet-stream'
394
576
  };
395
577
  }
396
- function getMimeType(attachment) {
397
- return attachment.mimeType === 'other'
398
- ? attachment.mimeTypeOther
399
- : attachment.mimeType;
400
- }
401
- async function getBinaryData(fileContentInput, itemBinary, exec, itemIndex) {
402
- if (itemBinary === null || itemBinary === void 0 ? void 0 : itemBinary[fileContentInput]) {
403
- const binaryData = itemBinary[fileContentInput];
404
- return {
405
- data: Buffer.from(binaryData.data, 'base64'),
406
- mimeType: binaryData.mimeType,
407
- };
408
- }
409
- try {
410
- if (isLikelyBase64(fileContentInput)) {
411
- const buffer = Buffer.from(fileContentInput, 'base64');
412
- if (buffer.length > 0 && buffer.length < 50 * 1024 * 1024) {
413
- return { data: buffer, mimeType: undefined };
414
- }
415
- }
416
- }
417
- catch (error) {
418
- throw new n8n_workflow_1.NodeOperationError(exec.getNode(), `Invalid file content for attachment: ${error.message}`);
419
- }
420
- return { data: null, mimeType: undefined };
421
- }
422
578
  function formatTextResult(result, includeRequestBody, provider) {
423
579
  var _a, _b, _c, _d, _e;
424
580
  let { text, reasoning } = result;
@@ -532,163 +688,159 @@ function getCacheMetrics(result, provider, metadata) {
532
688
  }
533
689
  function formatResponse(result) {
534
690
  var _a, _b, _c, _d;
535
- const response = {
691
+ return {
536
692
  id: (_a = result.response) === null || _a === void 0 ? void 0 : _a.id,
537
693
  modelId: (_b = result.response) === null || _b === void 0 ? void 0 : _b.modelId,
538
694
  timestamp: (_c = result.response) === null || _c === void 0 ? void 0 : _c.timestamp,
539
695
  headers: (_d = result.response) === null || _d === void 0 ? void 0 : _d.headers,
540
696
  };
541
- return response;
542
697
  }
543
- async function getProvider(provider, apiKey, baseURL, customHeaders) {
544
- const headersKey = customHeaders
545
- ? JSON.stringify(Object.keys(customHeaders)
698
+ async function getProvider(provider, config) {
699
+ const headersKey = config.customHeaders
700
+ ? generateCacheKey(Object.keys(config.customHeaders)
546
701
  .sort()
547
- .map((key) => [key, customHeaders[key]]))
702
+ .map((key) => [key, config.customHeaders[key]]), 'headers:')
548
703
  : '';
549
- const cacheKey = `${provider}:${apiKey}:${baseURL || ''}:${headersKey}`;
704
+ const cacheKey = generateCacheKey(`${provider}:${config.apiKey}:${config.baseURL || ''}:${headersKey}`, 'provider:');
550
705
  const cached = providerCache.get(cacheKey);
551
706
  if (cached)
552
707
  return cached;
553
- let providerInstance;
554
708
  try {
709
+ let providerInstance;
555
710
  switch (provider) {
556
711
  case 'google':
557
712
  const { createGoogleGenerativeAI } = await Promise.resolve().then(() => __importStar(require('@ai-sdk/google')));
558
713
  providerInstance = createGoogleGenerativeAI({
559
- apiKey,
560
- ...(baseURL && { baseURL }),
561
- ...(customHeaders && Object.keys(customHeaders).length > 0 && { headers: customHeaders }),
714
+ apiKey: config.apiKey,
715
+ ...(config.baseURL && { baseURL: config.baseURL }),
716
+ ...(config.customHeaders && Object.keys(config.customHeaders).length > 0 && {
717
+ headers: config.customHeaders
718
+ }),
562
719
  });
563
720
  break;
564
721
  case 'deepseek':
565
722
  const { createDeepSeek } = await Promise.resolve().then(() => __importStar(require('@ai-sdk/deepseek')));
566
- providerInstance = createDeepSeek({ apiKey, ...(baseURL && { baseURL }) });
723
+ providerInstance = createDeepSeek({
724
+ apiKey: config.apiKey,
725
+ ...(config.baseURL && { baseURL: config.baseURL })
726
+ });
567
727
  break;
568
728
  case 'groq':
569
729
  const { createGroq } = await Promise.resolve().then(() => __importStar(require('@ai-sdk/groq')));
570
- providerInstance = createGroq({ apiKey, ...(baseURL && { baseURL }) });
730
+ providerInstance = createGroq({
731
+ apiKey: config.apiKey,
732
+ ...(config.baseURL && { baseURL: config.baseURL })
733
+ });
571
734
  break;
572
735
  case 'openrouter':
573
736
  const { createOpenRouter } = await Promise.resolve().then(() => __importStar(require('@openrouter/ai-sdk-provider')));
574
- providerInstance = createOpenRouter({ apiKey, ...(baseURL && { baseURL }) });
737
+ providerInstance = createOpenRouter({
738
+ apiKey: config.apiKey,
739
+ ...(config.baseURL && { baseURL: config.baseURL })
740
+ });
575
741
  break;
576
742
  default:
577
- throw new Error(`Unsupported provider: ${provider}`);
743
+ throw new ProviderError(`Unsupported provider: ${provider}`);
578
744
  }
579
745
  providerCache.set(cacheKey, providerInstance);
580
746
  return providerInstance;
581
747
  }
582
748
  catch (error) {
583
- throw new Error(`Failed to initialize ${provider} provider: ${error.message}`);
749
+ throw new ProviderError(`Failed to initialize ${provider} provider: ${error.message}`, { provider, baseURL: config.baseURL });
584
750
  }
585
751
  }
586
752
  function parseAndValidateSchema(rawSchema, exec) {
587
- const cacheKey = `schema:${Buffer.from(rawSchema).toString('base64').substring(0, 50)}`;
753
+ if (!rawSchema.trim()) {
754
+ throw new ValidationError('Schema field is empty');
755
+ }
756
+ if (rawSchema.length > 100000) {
757
+ throw new ValidationError('Schema is too large (max 100,000 characters)');
758
+ }
759
+ const cacheKey = generateCacheKey(rawSchema, 'schema:');
588
760
  const cached = schemaCache.get(cacheKey);
589
761
  if (cached)
590
762
  return cached;
591
- let parsedSchema;
592
763
  try {
593
- parsedSchema = JSON.parse(rawSchema);
764
+ const parsedSchema = JSON.parse(rawSchema);
765
+ if (!ajv.validateSchema(parsedSchema)) {
766
+ throw new ValidationError(`Invalid JSON Schema: ${ajv.errorsText(ajv.errors)}`);
767
+ }
768
+ schemaCache.set(cacheKey, parsedSchema);
769
+ return parsedSchema;
594
770
  }
595
771
  catch (err) {
596
- throw new n8n_workflow_1.NodeOperationError(exec.getNode(), 'Schema is not valid JSON: ' + err.message);
597
- }
598
- if (!ajv.validateSchema(parsedSchema)) {
599
- throw new n8n_workflow_1.NodeOperationError(exec.getNode(), `Invalid JSON Schema: ${ajv.errorsText(ajv.errors)}`);
772
+ throw new ValidationError(`Schema is not valid JSON: ${err.message}`);
600
773
  }
601
- schemaCache.set(cacheKey, parsedSchema);
602
- return parsedSchema;
603
774
  }
604
775
  function parseStopSequences(stopSequencesStr) {
605
776
  if (!stopSequencesStr)
606
777
  return undefined;
607
- return stopSequencesStr.split(',').map(s => s.trim()).filter(s => s.length > 0);
778
+ const sequences = stopSequencesStr
779
+ .split(',')
780
+ .map(s => s.trim())
781
+ .filter(Boolean);
782
+ return sequences.length > 0 ? sequences : undefined;
608
783
  }
609
784
  function applyNumericOptions(params, options, keys) {
610
785
  for (const key of keys) {
611
786
  const value = options[key];
612
787
  if (value !== undefined && value !== null && value !== '') {
613
- params[key] = value;
788
+ const numValue = Number(value);
789
+ if (!isNaN(numValue)) {
790
+ params[key] = numValue;
791
+ }
614
792
  }
615
793
  }
616
794
  }
617
- class UniversalAI {
618
- constructor() {
619
- this.description = descriptions_1.UNIVERSAL_AI_DESCRIPTION;
620
- this.methods = {
621
- loadOptions: {
622
- async getModels() {
623
- const provider = this.getCurrentNodeParameter('provider');
624
- const cacheKey = `models:${provider}`;
625
- const cached = modelCache.get(cacheKey);
626
- if (cached)
627
- return cached;
628
- const { OPENROUTER_MODELS, GOOGLE_GEMINI_MODELS, DEEPSEEK_MODELS, GROQ_MODELS } = await Promise.resolve().then(() => __importStar(require('./model-lists')));
629
- const models = {
630
- google: GOOGLE_GEMINI_MODELS,
631
- deepseek: DEEPSEEK_MODELS,
632
- groq: GROQ_MODELS,
633
- openrouter: OPENROUTER_MODELS,
634
- }[provider] || [];
635
- modelCache.set(cacheKey, models);
636
- return models;
637
- },
638
- },
639
- };
795
+ function resolveFinalContext(input, cachedContentName, cacheContentInfo) {
796
+ const finalContext = {};
797
+ if (!cachedContentName) {
798
+ return input;
640
799
  }
641
- async execute() {
642
- const items = this.getInputData();
643
- const returnData = [];
644
- const provider = this.getNodeParameter('provider', 0);
645
- const credentialType = {
646
- google: 'googleGenerativeAIApi',
647
- deepseek: 'deepSeekApi',
648
- groq: 'groqApi',
649
- openrouter: 'openRouterApi',
650
- }[provider];
651
- if (!credentialType) {
652
- throw new n8n_workflow_1.NodeOperationError(this.getNode(), `Unsupported provider: ${provider}`);
800
+ if (input.prompt) {
801
+ finalContext.prompt = input.prompt;
802
+ }
803
+ else if (input.messages) {
804
+ const filteredMessages = input.messages.filter(msg => msg.role !== 'system');
805
+ if (filteredMessages.length > 0) {
806
+ finalContext.messages = filteredMessages;
653
807
  }
654
- const credentials = await this.getCredentials(credentialType);
655
- if (!(credentials === null || credentials === void 0 ? void 0 : credentials.apiKey)) {
656
- throw new n8n_workflow_1.NodeOperationError(this.getNode(), 'No API key provided in credentials');
808
+ }
809
+ return finalContext;
810
+ }
811
+ function withErrorHandling(fn, context, exec, itemIndex) {
812
+ return fn().catch((error) => {
813
+ if (error instanceof UniversalAIError) {
814
+ throw error;
657
815
  }
658
- const customHeaders = provider === 'google' ? getGoogleCustomHeaders(this, 0) : undefined;
659
- const aiProvider = await getProvider(provider, credentials.apiKey, credentials.baseUrl, customHeaders);
660
- for (let i = 0; i < items.length; i++) {
661
- if (this.continueOnFail()) {
662
- try {
663
- const result = await processItem(this, i, provider, aiProvider, credentials.apiKey);
664
- returnData.push(...result);
665
- }
666
- catch (error) {
667
- const errorMessage = error instanceof Error ? error.message : 'Unknown error';
668
- returnData.push({
669
- json: { error: errorMessage },
670
- pairedItem: { item: i },
671
- });
816
+ if (error instanceof n8n_workflow_1.NodeOperationError) {
817
+ throw error;
818
+ }
819
+ const errorMessage = error instanceof Error ? error.message : String(error);
820
+ const enhancedError = new UniversalAIError(`${context}: ${errorMessage}`, 'UNKNOWN_ERROR', { originalError: error });
821
+ if (exec && itemIndex !== undefined && exec.continueOnFail()) {
822
+ return {
823
+ json: {
824
+ error: enhancedError.message,
825
+ errorCode: enhancedError.code,
826
+ success: false
672
827
  }
673
- }
674
- else {
675
- const result = await processItem(this, i, provider, aiProvider, credentials.apiKey);
676
- returnData.push(...result);
677
- }
828
+ };
678
829
  }
679
- return [returnData];
680
- }
830
+ throw enhancedError;
831
+ });
681
832
  }
682
- exports.UniversalAI = UniversalAI;
683
833
  async function processItem(exec, index, provider, aiProvider, apiKey) {
684
- const operation = exec.getNodeParameter('operation', index);
685
- const model = exec.getNodeParameter('model', index);
686
- const options = exec.getNodeParameter('options', index, {});
687
- const input = await buildInput(exec, index);
688
- const modelSettings = getModelSettings(exec, index, provider, operation, options);
689
- return operation === 'generateText'
690
- ? await generateTextOperation(exec, index, provider, aiProvider, model, modelSettings, input, options, apiKey)
691
- : await generateObjectOperation(exec, index, provider, aiProvider, model, modelSettings, input, options, apiKey);
834
+ return withErrorHandling(async () => {
835
+ const operation = exec.getNodeParameter('operation', index);
836
+ const model = exec.getNodeParameter('model', index);
837
+ const options = exec.getNodeParameter('options', index, {});
838
+ const input = await buildInput(exec, index);
839
+ const modelSettings = getModelSettings(exec, index, provider, operation, options);
840
+ return operation === 'generateText'
841
+ ? await generateTextOperation(exec, index, provider, aiProvider, model, modelSettings, input, options, apiKey)
842
+ : await generateObjectOperation(exec, index, provider, aiProvider, model, modelSettings, input, options, apiKey);
843
+ }, 'processItem', exec, index);
692
844
  }
693
845
  function getModelSettings(exec, index, provider, operation, options) {
694
846
  const settings = {};
@@ -699,12 +851,14 @@ function getModelSettings(exec, index, provider, operation, options) {
699
851
  if (provider === 'google') {
700
852
  const safetySettingsRaw = exec.getNodeParameter('safetySettings.settings', index, []);
701
853
  if (safetySettingsRaw.length > 0) {
702
- settings.safetySettings = safetySettingsRaw.map((s) => ({
854
+ settings.safetySettings = safetySettingsRaw.map(s => ({
703
855
  category: s.category,
704
856
  threshold: s.threshold,
705
857
  }));
706
858
  }
707
- settings.structuredOutputs = operation === 'generateObject';
859
+ if (operation === 'generateObject') {
860
+ settings.structuredOutputs = true;
861
+ }
708
862
  const responseModalities = exec.getNodeParameter('responseModalities', index, []);
709
863
  if (responseModalities.length > 0) {
710
864
  settings.responseModalities = responseModalities;
@@ -719,7 +873,7 @@ function buildGoogleProviderOptions(exec, index, cachedContentName) {
719
873
  if (!Number.isNaN(thinkingBudgetValue) && thinkingBudgetValue > -1) {
720
874
  options.thinkingConfig = {
721
875
  thinkingBudget: Math.max(0, thinkingBudgetValue),
722
- includeThoughts,
876
+ includeThoughts
723
877
  };
724
878
  }
725
879
  if (cachedContentName) {
@@ -728,20 +882,16 @@ function buildGoogleProviderOptions(exec, index, cachedContentName) {
728
882
  return Object.keys(options).length > 0 ? options : undefined;
729
883
  }
730
884
  function getGoogleCustomHeaders(exec, index) {
731
- var _a, _b;
885
+ var _a, _b, _c, _d;
732
886
  const headersCollection = exec.getNodeParameter('customHeaders', index, {});
733
887
  const entries = (_a = headersCollection === null || headersCollection === void 0 ? void 0 : headersCollection.headers) !== null && _a !== void 0 ? _a : [];
734
- if (!entries || entries.length === 0) {
888
+ if (!entries.length)
735
889
  return undefined;
736
- }
737
890
  const headers = {};
738
891
  for (const entry of entries) {
739
- if (!entry)
740
- continue;
741
- const name = (entry.name || '').trim();
742
- if (!name)
743
- continue;
744
- headers[name] = (_b = entry.value) !== null && _b !== void 0 ? _b : '';
892
+ if ((_b = entry === null || entry === void 0 ? void 0 : entry.name) === null || _b === void 0 ? void 0 : _b.trim()) {
893
+ headers[entry.name.trim()] = (_d = (_c = entry.value) === null || _c === void 0 ? void 0 : _c.trim()) !== null && _d !== void 0 ? _d : '';
894
+ }
745
895
  }
746
896
  return Object.keys(headers).length > 0 ? headers : undefined;
747
897
  }
@@ -754,82 +904,101 @@ async function prepareGoogleCache(exec, index, apiKey, input, tools, context) {
754
904
  cachedContentName = await createGoogleCache(exec, index, apiKey, cacheContentInfo.content, tools);
755
905
  }
756
906
  catch (error) {
757
- console.warn(`UniversalAI: Cache creation for ${context} generation failed, continuing without cache:`, error);
907
+ if (error instanceof CacheError) {
908
+ console.warn(`Cache creation failed for ${context}:`, error.message);
909
+ }
910
+ else {
911
+ console.warn(`Unexpected cache error for ${context}:`, error);
912
+ }
758
913
  }
759
914
  }
760
915
  const googleProviderOptions = buildGoogleProviderOptions(exec, index, cachedContentName || undefined);
761
916
  return {
762
917
  cachedContentName,
763
918
  cacheContentInfo,
764
- googleProviderOptions,
919
+ googleProviderOptions
765
920
  };
766
921
  }
767
- function resolveFinalContext(input, cachedContentName, cacheContentInfo) {
768
- const finalContext = {};
769
- if (!cachedContentName) {
770
- return input;
771
- }
772
- if (input.system) {
773
- }
774
- if (input.prompt) {
775
- finalContext.prompt = input.prompt;
776
- }
777
- else if (input.messages) {
778
- const filteredMessages = input.messages.filter(msg => msg.role !== 'system');
779
- if (filteredMessages.length > 0) {
780
- finalContext.messages = filteredMessages;
922
+ async function generateTextOperation(exec, index, provider, aiProvider, model, modelSettings, input, options, apiKey) {
923
+ return withErrorHandling(async () => {
924
+ const enableStreaming = exec.getNodeParameter('enableStreaming', index, false);
925
+ const includeRequestBody = options.includeRequestBody;
926
+ const tools = provider === 'google' ? await buildGoogleTools(exec, index) : undefined;
927
+ let cachedContentName = null;
928
+ let googleProviderOptions;
929
+ let cacheContentInfo;
930
+ if (provider === 'google') {
931
+ const cacheSetup = await prepareGoogleCache(exec, index, apiKey, input, tools, 'text');
932
+ cachedContentName = cacheSetup.cachedContentName;
933
+ cacheContentInfo = cacheSetup.cacheContentInfo;
934
+ googleProviderOptions = cacheSetup.googleProviderOptions;
781
935
  }
782
- }
783
- return finalContext;
936
+ const finalContext = resolveFinalContext(input, cachedContentName, cacheContentInfo);
937
+ const params = {
938
+ model: aiProvider(model, modelSettings),
939
+ ...finalContext,
940
+ };
941
+ if (tools && !cachedContentName) {
942
+ params.tools = tools;
943
+ }
944
+ if (provider === 'google' && googleProviderOptions) {
945
+ params.providerOptions = {
946
+ google: googleProviderOptions,
947
+ };
948
+ }
949
+ applyNumericOptions(params, options, [
950
+ 'maxTokens', 'temperature', 'topP', 'topK',
951
+ 'frequencyPenalty', 'presencePenalty', 'seed'
952
+ ]);
953
+ if (enableStreaming) {
954
+ return await handleStreaming(params, provider, includeRequestBody);
955
+ }
956
+ const result = await (0, ai_1.generateText)(params);
957
+ const formattedResult = formatTextResult(result, includeRequestBody, provider);
958
+ return [{ json: formattedResult }];
959
+ }, 'generateTextOperation', exec, index);
784
960
  }
785
- async function generateTextOperation(exec, index, provider, aiProvider, model, modelSettings, input, options, apiKey) {
786
- const enableStreaming = exec.getNodeParameter('enableStreaming', index, false);
787
- const includeRequestBody = options.includeRequestBody;
788
- const tools = provider === 'google' ? await buildGoogleTools(exec, index) : undefined;
789
- let cachedContentName = null;
790
- let googleProviderOptions;
791
- let cacheContentInfo;
792
- if (provider === 'google') {
793
- const cacheSetup = await prepareGoogleCache(exec, index, apiKey, input, tools, 'text');
794
- cachedContentName = cacheSetup.cachedContentName;
795
- cacheContentInfo = cacheSetup.cacheContentInfo;
796
- googleProviderOptions = cacheSetup.googleProviderOptions;
797
- }
798
- const finalContext = resolveFinalContext(input, cachedContentName, cacheContentInfo);
799
- const params = {
800
- model: aiProvider(model, modelSettings),
801
- ...finalContext,
802
- };
803
- if (tools && !cachedContentName) {
804
- params.tools = tools;
805
- }
806
- if (provider === 'google' && googleProviderOptions) {
807
- params.providerOptions = {
808
- google: googleProviderOptions,
961
+ async function generateObjectOperation(exec, index, provider, aiProvider, model, modelSettings, input, options, apiKey) {
962
+ return withErrorHandling(async () => {
963
+ const schemaName = exec.getNodeParameter('schemaName', index, '').trim();
964
+ const schemaDescription = exec.getNodeParameter('schemaDescription', index, '').trim();
965
+ const rawSchema = exec.getNodeParameter('schema', index);
966
+ const parsedSchema = parseAndValidateSchema(rawSchema, exec);
967
+ let cachedContentName = null;
968
+ let googleProviderOptions;
969
+ let cacheContentInfo;
970
+ if (provider === 'google') {
971
+ const cacheSetup = await prepareGoogleCache(exec, index, apiKey, input, undefined, 'object');
972
+ cachedContentName = cacheSetup.cachedContentName;
973
+ cacheContentInfo = cacheSetup.cacheContentInfo;
974
+ googleProviderOptions = cacheSetup.googleProviderOptions;
975
+ }
976
+ const finalContext = resolveFinalContext(input, cachedContentName, cacheContentInfo);
977
+ const params = {
978
+ model: aiProvider(model, modelSettings),
979
+ schema: (0, ai_1.jsonSchema)(parsedSchema),
980
+ schemaName,
981
+ schemaDescription,
982
+ ...finalContext,
809
983
  };
810
- }
811
- const textNumericKeys = [
812
- 'maxTokens',
813
- 'temperature',
814
- 'topP',
815
- 'topK',
816
- 'frequencyPenalty',
817
- 'presencePenalty',
818
- 'seed',
819
- ];
820
- applyNumericOptions(params, options, textNumericKeys);
821
- if (enableStreaming) {
822
- return await handleStreaming(params, provider, includeRequestBody);
823
- }
824
- const result = await (0, ai_1.generateText)(params);
825
- const formattedResult = formatTextResult(result, includeRequestBody, provider);
826
- return [{ json: formattedResult }];
984
+ if (provider === 'google' && googleProviderOptions) {
985
+ params.providerOptions = {
986
+ google: googleProviderOptions,
987
+ };
988
+ }
989
+ applyNumericOptions(params, options, [
990
+ 'temperature', 'topP', 'topK',
991
+ 'frequencyPenalty', 'presencePenalty', 'seed'
992
+ ]);
993
+ const result = await (0, ai_1.generateObject)(params);
994
+ const formattedResult = formatObjectResult(result, options.includeRequestBody, provider);
995
+ return [{ json: formattedResult }];
996
+ }, 'generateObjectOperation', exec, index);
827
997
  }
828
998
  async function buildGoogleTools(exec, index) {
829
999
  const googleTools = exec.getNodeParameter('googleTools', index, []);
830
- if (!googleTools || googleTools.length === 0) {
1000
+ if (!(googleTools === null || googleTools === void 0 ? void 0 : googleTools.length))
831
1001
  return undefined;
832
- }
833
1002
  const tools = {};
834
1003
  const { google } = await Promise.resolve().then(() => __importStar(require('@ai-sdk/google')));
835
1004
  const toolSet = new Set(googleTools);
@@ -845,82 +1014,127 @@ async function buildGoogleTools(exec, index) {
845
1014
  return tools;
846
1015
  }
847
1016
  async function handleStreaming(params, provider, includeRequestBody) {
848
- const stream = await (0, ai_1.streamText)(params);
849
- const chunks = [];
850
- let fullText = '';
851
- for await (const textPart of stream.textStream) {
852
- fullText += textPart;
853
- chunks.push({ json: { chunk: textPart, isStreaming: true } });
854
- }
855
- let finalUsage;
856
- try {
857
- finalUsage = await stream.usage;
858
- }
859
- catch (error) {
860
- console.warn('UniversalAI: Failed to get usage from stream:', error);
861
- finalUsage = undefined;
862
- }
863
- const finalJson = {
864
- text: fullText,
865
- toolCalls: stream.toolCalls || [],
866
- toolResults: stream.toolResults || [],
867
- finishReason: stream.finishReason,
868
- usage: finalUsage ? formatUsage({ usage: finalUsage }, provider) : undefined,
869
- isStreaming: false,
870
- isFinal: true,
871
- };
872
- if (includeRequestBody) {
1017
+ return withErrorHandling(async () => {
1018
+ const stream = await (0, ai_1.streamText)(params);
1019
+ const chunks = [];
1020
+ let fullText = '';
873
1021
  try {
874
- const requestMetadata = stream.request ? await stream.request : undefined;
875
- if ((requestMetadata === null || requestMetadata === void 0 ? void 0 : requestMetadata.body) !== undefined) {
876
- finalJson.request = { body: requestMetadata.body };
1022
+ for await (const textPart of stream.textStream) {
1023
+ fullText += textPart;
1024
+ chunks.push({
1025
+ json: {
1026
+ chunk: textPart,
1027
+ isStreaming: true
1028
+ }
1029
+ });
877
1030
  }
878
1031
  }
879
1032
  catch (error) {
880
- console.warn('UniversalAI: Failed to get request metadata from stream:', error);
1033
+ throw new UniversalAIError(`Stream processing failed: ${error.message}`, 'STREAM_ERROR', { provider });
881
1034
  }
882
- }
883
- chunks.push({ json: finalJson });
884
- return chunks;
1035
+ let finalUsage;
1036
+ let requestMetadata;
1037
+ try {
1038
+ finalUsage = await stream.usage;
1039
+ }
1040
+ catch (error) {
1041
+ console.warn('Could not get usage from stream:', error);
1042
+ }
1043
+ try {
1044
+ requestMetadata = stream.request ? await stream.request : undefined;
1045
+ }
1046
+ catch (error) {
1047
+ console.warn('Could not get request metadata from stream:', error);
1048
+ }
1049
+ const finalJson = {
1050
+ text: fullText,
1051
+ toolCalls: stream.toolCalls || [],
1052
+ toolResults: stream.toolResults || [],
1053
+ finishReason: stream.finishReason,
1054
+ usage: finalUsage ? formatUsage({ usage: finalUsage }, provider) : undefined,
1055
+ isStreaming: false,
1056
+ isFinal: true,
1057
+ };
1058
+ if (includeRequestBody && (requestMetadata === null || requestMetadata === void 0 ? void 0 : requestMetadata.body) !== undefined) {
1059
+ finalJson.request = { body: requestMetadata.body };
1060
+ }
1061
+ chunks.push({ json: finalJson });
1062
+ return chunks;
1063
+ }, 'handleStreaming');
885
1064
  }
886
- async function generateObjectOperation(exec, index, provider, aiProvider, model, modelSettings, input, options, apiKey) {
887
- const schemaName = exec.getNodeParameter('schemaName', index, '');
888
- const schemaDescription = exec.getNodeParameter('schemaDescription', index, '');
889
- const rawSchema = exec.getNodeParameter('schema', index);
890
- const parsedSchema = parseAndValidateSchema(rawSchema, exec);
891
- let cachedContentName = null;
892
- let googleProviderOptions;
893
- let cacheContentInfo;
894
- if (provider === 'google') {
895
- const cacheSetup = await prepareGoogleCache(exec, index, apiKey, input, undefined, 'object');
896
- cachedContentName = cacheSetup.cachedContentName;
897
- cacheContentInfo = cacheSetup.cacheContentInfo;
898
- googleProviderOptions = cacheSetup.googleProviderOptions;
899
- }
900
- const finalContext = resolveFinalContext(input, cachedContentName, cacheContentInfo);
901
- const params = {
902
- model: aiProvider(model, modelSettings),
903
- schema: (0, ai_1.jsonSchema)(parsedSchema),
904
- schemaName,
905
- schemaDescription,
906
- ...finalContext,
907
- };
908
- if (provider === 'google' && googleProviderOptions) {
909
- params.providerOptions = {
910
- google: googleProviderOptions,
1065
+ class UniversalAI {
1066
+ constructor() {
1067
+ this.description = descriptions_1.UNIVERSAL_AI_DESCRIPTION;
1068
+ this.methods = {
1069
+ loadOptions: {
1070
+ async getModels() {
1071
+ return withErrorHandling(async () => {
1072
+ const provider = this.getCurrentNodeParameter('provider');
1073
+ const cacheKey = generateCacheKey(provider, 'models:');
1074
+ const cached = modelCache.get(cacheKey);
1075
+ if (cached)
1076
+ return cached;
1077
+ const { OPENROUTER_MODELS, GOOGLE_GEMINI_MODELS, DEEPSEEK_MODELS, GROQ_MODELS } = await Promise.resolve().then(() => __importStar(require('./model-lists')));
1078
+ const models = {
1079
+ google: GOOGLE_GEMINI_MODELS,
1080
+ deepseek: DEEPSEEK_MODELS,
1081
+ groq: GROQ_MODELS,
1082
+ openrouter: OPENROUTER_MODELS
1083
+ }[provider] || [];
1084
+ modelCache.set(cacheKey, models);
1085
+ return models;
1086
+ }, 'getModels');
1087
+ },
1088
+ },
1089
+ };
1090
+ }
1091
+ async execute() {
1092
+ const items = this.getInputData();
1093
+ const returnData = [];
1094
+ const provider = this.getNodeParameter('provider', 0);
1095
+ const credentialType = {
1096
+ google: 'googleGenerativeAIApi',
1097
+ deepseek: 'deepSeekApi',
1098
+ groq: 'groqApi',
1099
+ openrouter: 'openRouterApi'
1100
+ }[provider];
1101
+ if (!credentialType) {
1102
+ throw new ValidationError(`Unsupported provider: ${provider}`);
1103
+ }
1104
+ const credentials = await this.getCredentials(credentialType);
1105
+ if (!(credentials === null || credentials === void 0 ? void 0 : credentials.apiKey)) {
1106
+ throw new ValidationError('No API key provided in credentials');
1107
+ }
1108
+ const customHeaders = provider === 'google' ? getGoogleCustomHeaders(this, 0) : undefined;
1109
+ const providerConfig = {
1110
+ apiKey: credentials.apiKey,
1111
+ customHeaders,
1112
+ ...(credentials.baseUrl ? { baseURL: credentials.baseUrl } : {}),
911
1113
  };
1114
+ const aiProvider = await getProvider(provider, providerConfig);
1115
+ for (let i = 0; i < items.length; i++) {
1116
+ try {
1117
+ const result = await processItem(this, i, provider, aiProvider, credentials.apiKey);
1118
+ returnData.push(...result);
1119
+ }
1120
+ catch (error) {
1121
+ if (this.continueOnFail()) {
1122
+ returnData.push({
1123
+ json: {
1124
+ error: error instanceof Error ? error.message : 'Unknown error',
1125
+ errorCode: error instanceof UniversalAIError ? error.code : 'UNKNOWN_ERROR',
1126
+ itemIndex: i,
1127
+ success: false
1128
+ },
1129
+ pairedItem: { item: i }
1130
+ });
1131
+ continue;
1132
+ }
1133
+ throw error;
1134
+ }
1135
+ }
1136
+ return [returnData];
912
1137
  }
913
- const objectNumericKeys = [
914
- 'temperature',
915
- 'topP',
916
- 'topK',
917
- 'frequencyPenalty',
918
- 'presencePenalty',
919
- 'seed',
920
- ];
921
- applyNumericOptions(params, options, objectNumericKeys);
922
- const result = await (0, ai_1.generateObject)(params);
923
- const formattedResult = formatObjectResult(result, options.includeRequestBody, provider);
924
- return [{ json: formattedResult }];
925
1138
  }
1139
+ exports.UniversalAI = UniversalAI;
926
1140
  //# sourceMappingURL=UniversalAI.node.js.map