n8n-nodes-vercel-ai-sdk-universal-temp 0.1.50 → 0.1.52

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -41,15 +41,42 @@ const n8n_workflow_1 = require("n8n-workflow");
41
41
  const ai_1 = require("ai");
42
42
  const zod_1 = require("zod");
43
43
  const ajv_1 = __importDefault(require("ajv"));
44
+ const crypto_1 = require("crypto");
44
45
  const descriptions_1 = require("../shared/descriptions");
46
+ class UniversalAIError extends Error {
47
+ constructor(message, code, context) {
48
+ super(message);
49
+ this.code = code;
50
+ this.context = context;
51
+ this.name = 'UniversalAIError';
52
+ }
53
+ }
54
+ class CacheError extends UniversalAIError {
55
+ constructor(message, context) {
56
+ super(message, 'CACHE_ERROR', context);
57
+ }
58
+ }
59
+ class ProviderError extends UniversalAIError {
60
+ constructor(message, context) {
61
+ super(message, 'PROVIDER_ERROR', context);
62
+ }
63
+ }
64
+ class ValidationError extends UniversalAIError {
65
+ constructor(message, context) {
66
+ super(message, 'VALIDATION_ERROR', context);
67
+ }
68
+ }
45
69
  class Cache {
46
- constructor(maxSize = 100, ttl = 5 * 60 * 1000) {
70
+ constructor(_name, maxSize = 100, ttl = 5 * 60 * 1000) {
47
71
  this.cache = new Map();
48
72
  this.totalHits = 0;
49
73
  this.totalMisses = 0;
50
74
  this.totalEvictions = 0;
51
75
  this.maxSize = maxSize;
52
76
  this.ttl = ttl;
77
+ if (ttl > 0) {
78
+ setInterval(() => this.cleanupExpired(), Math.min(ttl, 60000));
79
+ }
53
80
  }
54
81
  get(key) {
55
82
  const item = this.cache.get(key);
@@ -65,6 +92,7 @@ class Cache {
65
92
  return undefined;
66
93
  }
67
94
  item.hits++;
95
+ item.lastAccessed = now;
68
96
  this.totalHits++;
69
97
  return item.value;
70
98
  }
@@ -72,20 +100,42 @@ class Cache {
72
100
  const now = Date.now();
73
101
  const expiresAt = customTTL ? now + customTTL : (this.ttl > 0 ? now + this.ttl : undefined);
74
102
  if (this.cache.size >= this.maxSize) {
75
- let oldestKey;
76
- let oldestTime = now;
77
- for (const [k, v] of this.cache.entries()) {
78
- if (v.timestamp < oldestTime) {
79
- oldestTime = v.timestamp;
80
- oldestKey = k;
81
- }
103
+ this.evictLRU();
104
+ }
105
+ this.cache.set(key, {
106
+ value,
107
+ timestamp: now,
108
+ hits: 0,
109
+ expiresAt,
110
+ lastAccessed: now
111
+ });
112
+ }
113
+ evictLRU() {
114
+ let lruKey;
115
+ let oldestAccess = Date.now();
116
+ for (const [key, item] of this.cache.entries()) {
117
+ if (item.lastAccessed < oldestAccess) {
118
+ oldestAccess = item.lastAccessed;
119
+ lruKey = key;
82
120
  }
83
- if (oldestKey) {
84
- this.cache.delete(oldestKey);
85
- this.totalEvictions++;
121
+ }
122
+ if (lruKey) {
123
+ this.cache.delete(lruKey);
124
+ this.totalEvictions++;
125
+ }
126
+ }
127
+ cleanupExpired() {
128
+ const now = Date.now();
129
+ let cleaned = 0;
130
+ for (const [key, item] of this.cache.entries()) {
131
+ if (item.expiresAt && now > item.expiresAt) {
132
+ this.cache.delete(key);
133
+ cleaned++;
86
134
  }
87
135
  }
88
- this.cache.set(key, { value, timestamp: now, hits: 0, expiresAt });
136
+ if (cleaned > 0) {
137
+ this.totalEvictions += cleaned;
138
+ }
89
139
  }
90
140
  delete(key) {
91
141
  return this.cache.delete(key);
@@ -96,93 +146,112 @@ class Cache {
96
146
  this.totalMisses = 0;
97
147
  this.totalEvictions = 0;
98
148
  }
149
+ entries() {
150
+ return this.cache.entries();
151
+ }
99
152
  getStats() {
153
+ const totalRequests = this.totalHits + this.totalMisses;
154
+ let totalSize = 0;
155
+ for (const item of this.cache.values()) {
156
+ totalSize += this.estimateSize(item.value);
157
+ }
100
158
  return {
101
159
  size: this.cache.size,
102
160
  maxSize: this.maxSize,
103
- hitRate: this.totalHits / (this.totalHits + this.totalMisses) || 0,
161
+ hitRate: totalRequests > 0 ? this.totalHits / totalRequests : 0,
104
162
  totalHits: this.totalHits,
105
163
  totalMisses: this.totalMisses,
106
164
  totalEvictions: this.totalEvictions,
107
165
  ttl: this.ttl,
166
+ averageItemSize: this.cache.size > 0 ? totalSize / this.cache.size : 0,
108
167
  };
109
168
  }
110
- }
111
- const modelCache = new Cache(50);
112
- const providerCache = new Cache(20);
113
- const schemaCache = new Cache(30);
114
- const googleCacheClients = new Cache(10, 60 * 60 * 1000);
115
- const googleCachedContexts = new Cache(50, 55 * 60 * 1000);
116
- async function getGoogleCacheManager(apiKey) {
117
- let client = googleCacheClients.get(apiKey);
118
- if (!client) {
119
- const { GoogleGenAI } = await Promise.resolve().then(() => __importStar(require('@google/genai')));
120
- client = new GoogleGenAI({ apiKey });
121
- googleCacheClients.set(apiKey, client);
169
+ estimateSize(value) {
170
+ try {
171
+ return JSON.stringify(value).length;
172
+ }
173
+ catch {
174
+ return 1024;
175
+ }
122
176
  }
123
- return client;
124
177
  }
125
- async function createGoogleCache(exec, index, apiKey, cacheContent, tools) {
126
- var _a;
127
- try {
128
- const useGoogleCache = exec.getNodeParameter('useGoogleCache', index, false);
129
- if (!useGoogleCache) {
130
- return null;
131
- }
132
- const googleCacheManager = await getGoogleCacheManager(apiKey);
133
- const normalizedCacheContent = (_a = cacheContent === null || cacheContent === void 0 ? void 0 : cacheContent.trim()) !== null && _a !== void 0 ? _a : '';
134
- if (!normalizedCacheContent) {
135
- return null;
178
+ function generateCacheKey(data, prefix = '') {
179
+ const dataStr = typeof data === 'string' ? data : JSON.stringify(data);
180
+ const hash = (0, crypto_1.createHash)('sha256').update(dataStr).digest('hex').substring(0, 16);
181
+ return `${prefix}${hash}`;
182
+ }
183
+ const modelCache = new Cache('models', 50, 10 * 60 * 1000);
184
+ const providerCache = new Cache('providers', 20, 30 * 60 * 1000);
185
+ const schemaCache = new Cache('schemas', 30, 60 * 60 * 1000);
186
+ const googleCacheClients = new Cache('google_clients', 10, 60 * 60 * 1000);
187
+ const googleCachedContexts = new Cache('google_contexts', 50, 55 * 60 * 1000);
188
+ async function cleanupExpiredGoogleCaches() {
189
+ const now = Date.now();
190
+ const expiredEntries = [];
191
+ for (const [key, item] of googleCachedContexts.entries()) {
192
+ if (item.expiresAt && now > item.expiresAt) {
193
+ expiredEntries.push({ key, context: item.value });
136
194
  }
137
- const cacheKeyData = {
138
- content: normalizedCacheContent,
139
- tools: tools ? Object.keys(tools).sort() : [],
140
- model: 'gemini-2.0-flash-001',
141
- };
142
- const cacheKey = JSON.stringify(cacheKeyData);
143
- const existingCache = googleCachedContexts.get(cacheKey);
144
- if (existingCache) {
145
- return existingCache.name;
195
+ }
196
+ for (const { key, context } of expiredEntries) {
197
+ try {
198
+ const client = await getGoogleCacheManager(context.apiKey);
199
+ await client.caches.delete(context.name);
146
200
  }
147
- const ttlSeconds = 3600;
148
- const displayName = `universal_ai_cache_${Date.now()}`;
149
- const cacheConfig = {
150
- model: 'gemini-2.0-flash-001',
151
- config: {
152
- displayName,
153
- ttl: `${ttlSeconds}s`,
154
- contents: [{
155
- role: 'user',
156
- parts: [{ text: normalizedCacheContent }],
157
- }],
158
- },
159
- };
160
- if (tools && Object.keys(tools).length > 0) {
161
- cacheConfig.config.tools = Object.values(tools);
201
+ catch (error) {
202
+ console.warn(`Failed to cleanup Google cache ${context.name}:`, error);
162
203
  }
163
- const result = await googleCacheManager.caches.create(cacheConfig);
164
- const cachedContentName = result === null || result === void 0 ? void 0 : result.name;
165
- if (!cachedContentName) {
166
- throw new Error('Failed to get cached content name from creation response');
204
+ finally {
205
+ googleCachedContexts.delete(key);
167
206
  }
168
- googleCachedContexts.set(cacheKey, { name: cachedContentName }, ttlSeconds * 1000);
169
- return cachedContentName;
207
+ }
208
+ }
209
+ setInterval(() => {
210
+ cleanupExpiredGoogleCaches().catch(console.error);
211
+ }, 5 * 60 * 1000);
212
+ async function getGoogleCacheManager(apiKey) {
213
+ const cacheKey = generateCacheKey(apiKey, 'google_client:');
214
+ const cachedClient = googleCacheClients.get(cacheKey);
215
+ if (cachedClient) {
216
+ return cachedClient;
217
+ }
218
+ try {
219
+ const { GoogleGenAI } = await Promise.resolve().then(() => __importStar(require('@google/genai')));
220
+ const client = new GoogleGenAI({ apiKey });
221
+ googleCacheClients.set(cacheKey, client);
222
+ return client;
170
223
  }
171
224
  catch (error) {
172
- console.error('UniversalAI: Failed to create Google cache. Falling back to non-cached execution:', error);
173
- return null;
225
+ throw new CacheError(`Failed to initialize Google cache client: ${error.message}`, { apiKey: apiKey.substring(0, 8) + '...' });
174
226
  }
175
227
  }
176
- function canUseCache(cacheContent) {
177
- return Boolean(cacheContent && cacheContent.trim().length > 0);
228
+ function isUrl(str) {
229
+ if (typeof str !== 'string')
230
+ return false;
231
+ try {
232
+ const url = new URL(str);
233
+ return url.protocol === 'http:' || url.protocol === 'https:';
234
+ }
235
+ catch {
236
+ return str.startsWith('data:');
237
+ }
238
+ }
239
+ function isLikelyBase64(str) {
240
+ if (typeof str !== 'string')
241
+ return false;
242
+ if (str.length % 4 !== 0)
243
+ return false;
244
+ if (!/^[A-Za-z0-9+/]*={0,2}$/.test(str))
245
+ return false;
246
+ if (str.length > 10000)
247
+ return true;
248
+ return true;
178
249
  }
179
250
  function extractTextFromMessageContent(content) {
180
- if (!content) {
251
+ if (!content)
181
252
  return '';
182
- }
183
- if (typeof content === 'string') {
253
+ if (typeof content === 'string')
184
254
  return content;
185
- }
186
255
  if (Array.isArray(content)) {
187
256
  return content
188
257
  .map((part) => {
@@ -202,16 +271,20 @@ function extractTextFromMessageContent(content) {
202
271
  }
203
272
  return '';
204
273
  }
274
+ function canUseCache(cacheContent) {
275
+ return Boolean(cacheContent && cacheContent.trim().length > 0);
276
+ }
205
277
  function resolveCacheContent(input) {
278
+ var _a, _b, _c;
206
279
  const sections = [];
207
280
  let hasSystem = false;
208
281
  let hasMessages = false;
209
282
  let hasPrompt = false;
210
- if (input.system && input.system.trim()) {
283
+ if ((_a = input.system) === null || _a === void 0 ? void 0 : _a.trim()) {
211
284
  sections.push(`System Instruction:\n${input.system.trim()}`);
212
285
  hasSystem = true;
213
286
  }
214
- if (input.messages && input.messages.length > 0) {
287
+ if ((_b = input.messages) === null || _b === void 0 ? void 0 : _b.length) {
215
288
  const messageSections = [];
216
289
  for (const message of input.messages) {
217
290
  const text = extractTextFromMessageContent(message.content);
@@ -222,34 +295,31 @@ function resolveCacheContent(input) {
222
295
  sections.push(`System Instruction (from messages):\n${text.trim()}`);
223
296
  hasSystem = true;
224
297
  }
225
- continue;
226
298
  }
227
- messageSections.push(`${message.role.toUpperCase()}:\n${text.trim()}`);
299
+ else {
300
+ messageSections.push(`${message.role.toUpperCase()}:\n${text.trim()}`);
301
+ }
228
302
  }
229
303
  if (messageSections.length > 0) {
230
304
  sections.push(`Messages:\n${messageSections.join('\n\n')}`);
231
305
  hasMessages = true;
232
306
  }
233
307
  }
234
- if (input.prompt && input.prompt.trim()) {
308
+ if ((_c = input.prompt) === null || _c === void 0 ? void 0 : _c.trim()) {
235
309
  sections.push(`Prompt Template:\n${input.prompt.trim()}`);
236
310
  hasPrompt = true;
237
311
  }
238
312
  const content = sections.join('\n\n').trim();
239
313
  let source;
240
314
  const sourceCount = [hasSystem, hasMessages, hasPrompt].filter(Boolean).length;
241
- if (sourceCount > 1) {
315
+ if (sourceCount > 1)
242
316
  source = 'combined';
243
- }
244
- else if (hasSystem) {
317
+ else if (hasSystem)
245
318
  source = 'system';
246
- }
247
- else if (hasMessages) {
319
+ else if (hasMessages)
248
320
  source = 'messages';
249
- }
250
- else if (hasPrompt) {
321
+ else if (hasPrompt)
251
322
  source = 'prompt';
252
- }
253
323
  return {
254
324
  content: content || undefined,
255
325
  hasSystem,
@@ -258,120 +328,234 @@ function resolveCacheContent(input) {
258
328
  source,
259
329
  };
260
330
  }
261
- const messageSchema = zod_1.z.object({
262
- role: zod_1.z.enum(['system', 'user', 'assistant']),
263
- content: zod_1.z.any(),
264
- });
265
- const messagesArraySchema = zod_1.z.array(messageSchema);
266
- const ajv = new ajv_1.default({
267
- allErrors: true,
268
- verbose: true,
269
- strict: false,
270
- });
271
- const isUrl = (str) => {
272
- if (typeof str !== 'string')
273
- return false;
274
- return str.startsWith('http://') ||
275
- str.startsWith('https://') ||
276
- str.startsWith('data:');
277
- };
278
- const isLikelyBase64 = (str) => {
279
- if (str.length % 4 !== 0)
280
- return false;
281
- if (!/^[A-Za-z0-9+/]*={0,2}$/.test(str))
282
- return false;
283
- if (str.length > 10000)
284
- return true;
285
- return true;
286
- };
331
+ async function createGoogleCache(exec, index, apiKey, cacheContent, tools) {
332
+ try {
333
+ const useGoogleCache = exec.getNodeParameter('useGoogleCache', index, false);
334
+ if (!useGoogleCache || !(cacheContent === null || cacheContent === void 0 ? void 0 : cacheContent.trim())) {
335
+ return null;
336
+ }
337
+ const googleCacheManager = await getGoogleCacheManager(apiKey);
338
+ const normalizedCacheContent = cacheContent.trim();
339
+ const cacheKeyData = {
340
+ content: normalizedCacheContent,
341
+ tools: tools ? Object.keys(tools).sort() : [],
342
+ model: 'gemini-2.0-flash-001',
343
+ };
344
+ const cacheKey = generateCacheKey(cacheKeyData, 'google_cache:');
345
+ const existingCache = googleCachedContexts.get(cacheKey);
346
+ if (existingCache) {
347
+ return existingCache.name;
348
+ }
349
+ const ttlSeconds = 3600;
350
+ const displayName = `n8n_cache_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
351
+ const cacheConfig = {
352
+ model: 'gemini-2.0-flash-001',
353
+ config: {
354
+ displayName,
355
+ ttl: `${ttlSeconds}s`,
356
+ contents: [{
357
+ role: 'user',
358
+ parts: [{ text: normalizedCacheContent }],
359
+ }],
360
+ },
361
+ };
362
+ if (tools && Object.keys(tools).length > 0) {
363
+ cacheConfig.config.tools = Object.values(tools);
364
+ }
365
+ const result = await googleCacheManager.caches.create(cacheConfig);
366
+ const cachedContentName = result === null || result === void 0 ? void 0 : result.name;
367
+ if (!cachedContentName) {
368
+ throw new CacheError('Failed to create cache: No name in response', { displayName });
369
+ }
370
+ googleCachedContexts.set(cacheKey, {
371
+ name: cachedContentName,
372
+ apiKey
373
+ }, ttlSeconds * 1000);
374
+ return cachedContentName;
375
+ }
376
+ catch (error) {
377
+ if (error instanceof CacheError)
378
+ throw error;
379
+ throw new CacheError(`Google cache creation failed: ${error.message}`, {
380
+ cacheContentLength: cacheContent === null || cacheContent === void 0 ? void 0 : cacheContent.length,
381
+ hasTools: !!tools && Object.keys(tools).length > 0
382
+ });
383
+ }
384
+ }
287
385
  async function buildInput(exec, itemIndex) {
288
386
  const inputType = exec.getNodeParameter('inputType', itemIndex);
289
387
  if (inputType === 'prompt') {
290
- const promptValue = exec.getNodeParameter('prompt', itemIndex, '');
291
- const systemValue = exec.getNodeParameter('system', itemIndex, '');
292
- const result = {};
293
- const trimmedPrompt = typeof promptValue === 'string' ? promptValue.trim() : '';
294
- if (trimmedPrompt) {
295
- result.prompt = trimmedPrompt;
296
- }
297
- const trimmedSystem = typeof systemValue === 'string' ? systemValue.trim() : '';
298
- if (trimmedSystem) {
299
- result.system = trimmedSystem;
300
- }
301
- return result;
388
+ return buildPromptInput(exec, itemIndex);
302
389
  }
303
390
  const messageAsJson = exec.getNodeParameter('messageAsJson', itemIndex, false);
304
391
  return messageAsJson
305
392
  ? buildMessagesFromJson(exec, itemIndex)
306
393
  : buildMessagesFromUI(exec, itemIndex);
307
394
  }
395
+ function buildPromptInput(exec, itemIndex) {
396
+ const result = {};
397
+ const promptValue = exec.getNodeParameter('prompt', itemIndex, '').trim();
398
+ if (promptValue) {
399
+ if (promptValue.length > 100000) {
400
+ throw new ValidationError('Prompt is too long (max 100,000 characters)');
401
+ }
402
+ result.prompt = promptValue;
403
+ }
404
+ const systemValue = exec.getNodeParameter('system', itemIndex, '').trim();
405
+ if (systemValue) {
406
+ if (systemValue.length > 50000) {
407
+ throw new ValidationError('System instruction is too long (max 50,000 characters)');
408
+ }
409
+ result.system = systemValue;
410
+ }
411
+ return result;
412
+ }
413
+ const messageSchema = zod_1.z.object({
414
+ role: zod_1.z.enum(['system', 'user', 'assistant', 'tool']),
415
+ content: zod_1.z.any(),
416
+ });
417
+ const messagesArraySchema = zod_1.z.array(messageSchema);
418
+ const ajv = new ajv_1.default({
419
+ allErrors: true,
420
+ verbose: true,
421
+ strict: false,
422
+ useDefaults: true,
423
+ removeAdditional: true,
424
+ });
308
425
  async function buildMessagesFromJson(exec, itemIndex) {
309
426
  const rawJson = exec.getNodeParameter('messagesJson', itemIndex);
427
+ if (!rawJson.trim()) {
428
+ throw new ValidationError('Messages JSON field is empty');
429
+ }
430
+ if (rawJson.length > 200000) {
431
+ throw new ValidationError('Messages JSON is too large (max 200,000 characters)');
432
+ }
310
433
  try {
311
434
  const parsed = JSON.parse(rawJson);
312
435
  const result = messagesArraySchema.safeParse(parsed);
313
436
  if (!result.success) {
314
- throw new n8n_workflow_1.NodeOperationError(exec.getNode(), 'Messages must be an array of objects with role and content.');
437
+ const errorDetails = result.error.issues
438
+ .map((issue) => {
439
+ const path = issue.path.length > 0 ? issue.path.join('.') : '(root)';
440
+ return `${path}: ${issue.message}`;
441
+ })
442
+ .join('; ');
443
+ throw new ValidationError(`Invalid messages format: ${errorDetails}`);
444
+ }
445
+ if (result.data.length > 100) {
446
+ throw new ValidationError('Too many messages (max 100)');
315
447
  }
316
448
  return { messages: result.data };
317
449
  }
318
450
  catch (error) {
319
- throw new n8n_workflow_1.NodeOperationError(exec.getNode(), `Invalid JSON in "Messages (JSON)" field: ${error.message}`);
451
+ if (error instanceof ValidationError)
452
+ throw error;
453
+ throw new ValidationError(`Invalid JSON in messages field: ${error.message}`);
320
454
  }
321
455
  }
322
456
  async function buildMessagesFromUI(exec, itemIndex) {
323
- var _a;
457
+ var _a, _b, _c;
324
458
  const items = exec.getInputData();
325
459
  const messagesUi = exec.getNodeParameter('messages.messagesUi', itemIndex, []);
460
+ if (messagesUi.length > 100) {
461
+ throw new ValidationError('Too many messages (max 100)');
462
+ }
326
463
  const builtMessages = [];
327
464
  const itemBinary = items[itemIndex].binary;
328
465
  for (const msg of messagesUi) {
329
466
  const role = msg.role;
330
467
  if (role === 'system') {
331
- builtMessages.push({ role, content: msg.systemContent || '' });
468
+ if ((_a = msg.systemContent) === null || _a === void 0 ? void 0 : _a.trim()) {
469
+ builtMessages.push({ role, content: msg.systemContent.trim() });
470
+ }
332
471
  continue;
333
472
  }
334
- const attachments = ((_a = msg.attachments) === null || _a === void 0 ? void 0 : _a.attachment) || [];
473
+ const attachments = ((_b = msg.attachments) === null || _b === void 0 ? void 0 : _b.attachment) || [];
474
+ const content = ((_c = msg.content) === null || _c === void 0 ? void 0 : _c.trim()) || '';
335
475
  if (attachments.length === 0) {
336
- builtMessages.push({ role, content: msg.content || '' });
476
+ if (content) {
477
+ builtMessages.push({ role, content });
478
+ }
337
479
  }
338
480
  else {
339
- const messageWithAttachments = await buildMessageWithAttachments(role, msg.content, attachments, itemBinary, exec, itemIndex);
481
+ const messageWithAttachments = await buildMessageWithAttachments(role, content, attachments, itemBinary, exec, itemIndex);
340
482
  if (messageWithAttachments) {
341
483
  builtMessages.push(messageWithAttachments);
342
484
  }
343
485
  }
344
486
  }
345
487
  const convertMessagesToModel = exec.getNodeParameter('convertMessagesToModel', itemIndex, false);
346
- if (convertMessagesToModel) {
347
- return { messages: (0, ai_1.convertToModelMessages)(builtMessages) };
348
- }
349
- return { messages: builtMessages };
488
+ return {
489
+ messages: convertMessagesToModel ? (0, ai_1.convertToModelMessages)(builtMessages) : builtMessages
490
+ };
350
491
  }
492
+ const MAX_ATTACHMENT_SIZE = 50 * 1024 * 1024;
493
+ const MAX_TOTAL_ATTACHMENTS_SIZE = 100 * 1024 * 1024;
351
494
  async function buildMessageWithAttachments(role, content, attachments, itemBinary, exec, itemIndex) {
352
495
  const parts = [];
353
496
  if (content) {
354
497
  parts.push({ type: 'text', text: content });
355
498
  }
499
+ let totalSize = 0;
356
500
  const MAX_CONCURRENT_ATTACHMENTS = 3;
357
- const processedAttachments = [];
358
501
  for (let i = 0; i < attachments.length; i += MAX_CONCURRENT_ATTACHMENTS) {
359
502
  const batch = attachments.slice(i, i + MAX_CONCURRENT_ATTACHMENTS);
360
503
  const batchPromises = batch.map(attachment => processAttachment(attachment, itemBinary, exec, itemIndex));
361
- const batchResults = await Promise.all(batchPromises);
362
- processedAttachments.push(...batchResults);
363
- }
364
- for (const attachment of processedAttachments) {
365
- if (attachment) {
366
- parts.push(attachment);
504
+ const processedAttachments = await Promise.all(batchPromises);
505
+ for (const attachment of processedAttachments) {
506
+ if (attachment) {
507
+ if (attachment.data instanceof Buffer) {
508
+ totalSize += attachment.data.length;
509
+ if (totalSize > MAX_TOTAL_ATTACHMENTS_SIZE) {
510
+ throw new ValidationError(`Total attachments size exceeds limit of ${MAX_TOTAL_ATTACHMENTS_SIZE / 1024 / 1024}MB`);
511
+ }
512
+ }
513
+ parts.push(attachment);
514
+ }
367
515
  }
368
516
  }
369
517
  return parts.length > 0 ? { role, content: parts } : null;
370
518
  }
519
+ function getMimeType(attachment) {
520
+ return attachment.mimeType === 'other' ? attachment.mimeTypeOther : attachment.mimeType;
521
+ }
522
+ async function getBinaryData(fileContentInput, itemBinary, exec, itemIndex) {
523
+ if (itemBinary === null || itemBinary === void 0 ? void 0 : itemBinary[fileContentInput]) {
524
+ const binaryData = itemBinary[fileContentInput];
525
+ if (binaryData.data) {
526
+ const buffer = Buffer.from(binaryData.data, 'base64');
527
+ if (buffer.length > MAX_ATTACHMENT_SIZE) {
528
+ throw new ValidationError(`Attachment too large: ${buffer.length / 1024 / 1024}MB (max ${MAX_ATTACHMENT_SIZE / 1024 / 1024}MB)`);
529
+ }
530
+ return {
531
+ data: buffer,
532
+ mimeType: binaryData.mimeType
533
+ };
534
+ }
535
+ }
536
+ try {
537
+ if (isLikelyBase64(fileContentInput)) {
538
+ const buffer = Buffer.from(fileContentInput, 'base64');
539
+ if (buffer.length > MAX_ATTACHMENT_SIZE) {
540
+ throw new ValidationError(`Attachment too large: ${buffer.length / 1024 / 1024}MB (max ${MAX_ATTACHMENT_SIZE / 1024 / 1024}MB)`);
541
+ }
542
+ if (buffer.length > 0) {
543
+ return { data: buffer, mimeType: undefined };
544
+ }
545
+ }
546
+ }
547
+ catch (error) {
548
+ if (error instanceof ValidationError)
549
+ throw error;
550
+ throw new ValidationError(`Invalid file content for attachment: ${error.message}`);
551
+ }
552
+ return { data: null, mimeType: undefined };
553
+ }
371
554
  async function processAttachment(attachment, itemBinary, exec, itemIndex) {
372
555
  const fileContentInput = attachment.fileContent;
373
- if (!fileContentInput || typeof fileContentInput !== 'string')
556
+ if (!fileContentInput || typeof fileContentInput !== 'string') {
374
557
  return null;
558
+ }
375
559
  let mimeType = getMimeType(attachment);
376
560
  let fileData;
377
561
  if (isUrl(fileContentInput)) {
@@ -379,46 +563,19 @@ async function processAttachment(attachment, itemBinary, exec, itemIndex) {
379
563
  }
380
564
  else {
381
565
  const result = await getBinaryData(fileContentInput, itemBinary, exec, itemIndex);
566
+ if (!result.data)
567
+ return null;
382
568
  fileData = result.data;
383
569
  if (!mimeType && result.mimeType) {
384
570
  mimeType = result.mimeType;
385
571
  }
386
572
  }
387
- if (!fileData || (Buffer.isBuffer(fileData) && fileData.length === 0)) {
388
- return null;
389
- }
390
573
  return {
391
574
  type: 'file',
392
575
  data: fileData,
393
- mediaType: mimeType || 'application/octet-stream',
576
+ mediaType: mimeType || 'application/octet-stream'
394
577
  };
395
578
  }
396
- function getMimeType(attachment) {
397
- return attachment.mimeType === 'other'
398
- ? attachment.mimeTypeOther
399
- : attachment.mimeType;
400
- }
401
- async function getBinaryData(fileContentInput, itemBinary, exec, itemIndex) {
402
- if (itemBinary === null || itemBinary === void 0 ? void 0 : itemBinary[fileContentInput]) {
403
- const binaryData = itemBinary[fileContentInput];
404
- return {
405
- data: Buffer.from(binaryData.data, 'base64'),
406
- mimeType: binaryData.mimeType,
407
- };
408
- }
409
- try {
410
- if (isLikelyBase64(fileContentInput)) {
411
- const buffer = Buffer.from(fileContentInput, 'base64');
412
- if (buffer.length > 0 && buffer.length < 50 * 1024 * 1024) {
413
- return { data: buffer, mimeType: undefined };
414
- }
415
- }
416
- }
417
- catch (error) {
418
- throw new n8n_workflow_1.NodeOperationError(exec.getNode(), `Invalid file content for attachment: ${error.message}`);
419
- }
420
- return { data: null, mimeType: undefined };
421
- }
422
579
  function formatTextResult(result, includeRequestBody, provider) {
423
580
  var _a, _b, _c, _d, _e;
424
581
  let { text, reasoning } = result;
@@ -532,163 +689,159 @@ function getCacheMetrics(result, provider, metadata) {
532
689
  }
533
690
  function formatResponse(result) {
534
691
  var _a, _b, _c, _d;
535
- const response = {
692
+ return {
536
693
  id: (_a = result.response) === null || _a === void 0 ? void 0 : _a.id,
537
694
  modelId: (_b = result.response) === null || _b === void 0 ? void 0 : _b.modelId,
538
695
  timestamp: (_c = result.response) === null || _c === void 0 ? void 0 : _c.timestamp,
539
696
  headers: (_d = result.response) === null || _d === void 0 ? void 0 : _d.headers,
540
697
  };
541
- return response;
542
698
  }
543
- async function getProvider(provider, apiKey, baseURL, customHeaders) {
544
- const headersKey = customHeaders
545
- ? JSON.stringify(Object.keys(customHeaders)
699
+ async function getProvider(provider, config) {
700
+ const headersKey = config.customHeaders
701
+ ? generateCacheKey(Object.keys(config.customHeaders)
546
702
  .sort()
547
- .map((key) => [key, customHeaders[key]]))
703
+ .map((key) => [key, config.customHeaders[key]]), 'headers:')
548
704
  : '';
549
- const cacheKey = `${provider}:${apiKey}:${baseURL || ''}:${headersKey}`;
705
+ const cacheKey = generateCacheKey(`${provider}:${config.apiKey}:${config.baseURL || ''}:${headersKey}`, 'provider:');
550
706
  const cached = providerCache.get(cacheKey);
551
707
  if (cached)
552
708
  return cached;
553
- let providerInstance;
554
709
  try {
710
+ let providerInstance;
555
711
  switch (provider) {
556
712
  case 'google':
557
713
  const { createGoogleGenerativeAI } = await Promise.resolve().then(() => __importStar(require('@ai-sdk/google')));
558
714
  providerInstance = createGoogleGenerativeAI({
559
- apiKey,
560
- ...(baseURL && { baseURL }),
561
- ...(customHeaders && Object.keys(customHeaders).length > 0 && { headers: customHeaders }),
715
+ apiKey: config.apiKey,
716
+ ...(config.baseURL && { baseURL: config.baseURL }),
717
+ ...(config.customHeaders && Object.keys(config.customHeaders).length > 0 && {
718
+ headers: config.customHeaders
719
+ }),
562
720
  });
563
721
  break;
564
722
  case 'deepseek':
565
723
  const { createDeepSeek } = await Promise.resolve().then(() => __importStar(require('@ai-sdk/deepseek')));
566
- providerInstance = createDeepSeek({ apiKey, ...(baseURL && { baseURL }) });
724
+ providerInstance = createDeepSeek({
725
+ apiKey: config.apiKey,
726
+ ...(config.baseURL && { baseURL: config.baseURL })
727
+ });
567
728
  break;
568
729
  case 'groq':
569
730
  const { createGroq } = await Promise.resolve().then(() => __importStar(require('@ai-sdk/groq')));
570
- providerInstance = createGroq({ apiKey, ...(baseURL && { baseURL }) });
731
+ providerInstance = createGroq({
732
+ apiKey: config.apiKey,
733
+ ...(config.baseURL && { baseURL: config.baseURL })
734
+ });
571
735
  break;
572
736
  case 'openrouter':
573
737
  const { createOpenRouter } = await Promise.resolve().then(() => __importStar(require('@openrouter/ai-sdk-provider')));
574
- providerInstance = createOpenRouter({ apiKey, ...(baseURL && { baseURL }) });
738
+ providerInstance = createOpenRouter({
739
+ apiKey: config.apiKey,
740
+ ...(config.baseURL && { baseURL: config.baseURL })
741
+ });
575
742
  break;
576
743
  default:
577
- throw new Error(`Unsupported provider: ${provider}`);
744
+ throw new ProviderError(`Unsupported provider: ${provider}`);
578
745
  }
579
746
  providerCache.set(cacheKey, providerInstance);
580
747
  return providerInstance;
581
748
  }
582
749
  catch (error) {
583
- throw new Error(`Failed to initialize ${provider} provider: ${error.message}`);
750
+ throw new ProviderError(`Failed to initialize ${provider} provider: ${error.message}`, { provider, baseURL: config.baseURL });
584
751
  }
585
752
  }
586
753
  function parseAndValidateSchema(rawSchema, exec) {
587
- const cacheKey = `schema:${Buffer.from(rawSchema).toString('base64').substring(0, 50)}`;
754
+ if (!rawSchema.trim()) {
755
+ throw new ValidationError('Schema field is empty');
756
+ }
757
+ if (rawSchema.length > 100000) {
758
+ throw new ValidationError('Schema is too large (max 100,000 characters)');
759
+ }
760
+ const cacheKey = generateCacheKey(rawSchema, 'schema:');
588
761
  const cached = schemaCache.get(cacheKey);
589
762
  if (cached)
590
763
  return cached;
591
- let parsedSchema;
592
764
  try {
593
- parsedSchema = JSON.parse(rawSchema);
765
+ const parsedSchema = JSON.parse(rawSchema);
766
+ if (!ajv.validateSchema(parsedSchema)) {
767
+ throw new ValidationError(`Invalid JSON Schema: ${ajv.errorsText(ajv.errors)}`);
768
+ }
769
+ schemaCache.set(cacheKey, parsedSchema);
770
+ return parsedSchema;
594
771
  }
595
772
  catch (err) {
596
- throw new n8n_workflow_1.NodeOperationError(exec.getNode(), 'Schema is not valid JSON: ' + err.message);
597
- }
598
- if (!ajv.validateSchema(parsedSchema)) {
599
- throw new n8n_workflow_1.NodeOperationError(exec.getNode(), `Invalid JSON Schema: ${ajv.errorsText(ajv.errors)}`);
773
+ throw new ValidationError(`Schema is not valid JSON: ${err.message}`);
600
774
  }
601
- schemaCache.set(cacheKey, parsedSchema);
602
- return parsedSchema;
603
775
  }
604
776
  function parseStopSequences(stopSequencesStr) {
605
777
  if (!stopSequencesStr)
606
778
  return undefined;
607
- return stopSequencesStr.split(',').map(s => s.trim()).filter(s => s.length > 0);
779
+ const sequences = stopSequencesStr
780
+ .split(',')
781
+ .map(s => s.trim())
782
+ .filter(Boolean);
783
+ return sequences.length > 0 ? sequences : undefined;
608
784
  }
609
785
  function applyNumericOptions(params, options, keys) {
610
786
  for (const key of keys) {
611
787
  const value = options[key];
612
788
  if (value !== undefined && value !== null && value !== '') {
613
- params[key] = value;
789
+ const numValue = Number(value);
790
+ if (!isNaN(numValue)) {
791
+ params[key] = numValue;
792
+ }
614
793
  }
615
794
  }
616
795
  }
617
- class UniversalAI {
618
- constructor() {
619
- this.description = descriptions_1.UNIVERSAL_AI_DESCRIPTION;
620
- this.methods = {
621
- loadOptions: {
622
- async getModels() {
623
- const provider = this.getCurrentNodeParameter('provider');
624
- const cacheKey = `models:${provider}`;
625
- const cached = modelCache.get(cacheKey);
626
- if (cached)
627
- return cached;
628
- const { OPENROUTER_MODELS, GOOGLE_GEMINI_MODELS, DEEPSEEK_MODELS, GROQ_MODELS } = await Promise.resolve().then(() => __importStar(require('./model-lists')));
629
- const models = {
630
- google: GOOGLE_GEMINI_MODELS,
631
- deepseek: DEEPSEEK_MODELS,
632
- groq: GROQ_MODELS,
633
- openrouter: OPENROUTER_MODELS,
634
- }[provider] || [];
635
- modelCache.set(cacheKey, models);
636
- return models;
637
- },
638
- },
639
- };
796
+ function resolveFinalContext(input, cachedContentName, cacheContentInfo) {
797
+ const finalContext = {};
798
+ if (!cachedContentName) {
799
+ return input;
800
+ }
801
+ if (input.prompt) {
802
+ finalContext.prompt = input.prompt;
803
+ }
804
+ else if (input.messages) {
805
+ const filteredMessages = input.messages.filter(msg => msg.role !== 'system');
806
+ if (filteredMessages.length > 0) {
807
+ finalContext.messages = filteredMessages;
808
+ }
640
809
  }
641
- async execute() {
642
- const items = this.getInputData();
643
- const returnData = [];
644
- const provider = this.getNodeParameter('provider', 0);
645
- const credentialType = {
646
- google: 'googleGenerativeAIApi',
647
- deepseek: 'deepSeekApi',
648
- groq: 'groqApi',
649
- openrouter: 'openRouterApi',
650
- }[provider];
651
- if (!credentialType) {
652
- throw new n8n_workflow_1.NodeOperationError(this.getNode(), `Unsupported provider: ${provider}`);
810
+ return finalContext;
811
+ }
812
+ function withErrorHandling(fn, context, exec, itemIndex) {
813
+ return fn().catch((error) => {
814
+ if (error instanceof UniversalAIError) {
815
+ throw error;
653
816
  }
654
- const credentials = await this.getCredentials(credentialType);
655
- if (!(credentials === null || credentials === void 0 ? void 0 : credentials.apiKey)) {
656
- throw new n8n_workflow_1.NodeOperationError(this.getNode(), 'No API key provided in credentials');
817
+ if (error instanceof n8n_workflow_1.NodeOperationError) {
818
+ throw error;
657
819
  }
658
- const customHeaders = provider === 'google' ? getGoogleCustomHeaders(this, 0) : undefined;
659
- const aiProvider = await getProvider(provider, credentials.apiKey, credentials.baseUrl, customHeaders);
660
- for (let i = 0; i < items.length; i++) {
661
- if (this.continueOnFail()) {
662
- try {
663
- const result = await processItem(this, i, provider, aiProvider, credentials.apiKey);
664
- returnData.push(...result);
820
+ const errorMessage = error instanceof Error ? error.message : String(error);
821
+ const enhancedError = new UniversalAIError(`${context}: ${errorMessage}`, 'UNKNOWN_ERROR', { originalError: error });
822
+ if (exec && itemIndex !== undefined && exec.continueOnFail()) {
823
+ return {
824
+ json: {
825
+ error: enhancedError.message,
826
+ errorCode: enhancedError.code,
827
+ success: false
665
828
  }
666
- catch (error) {
667
- const errorMessage = error instanceof Error ? error.message : 'Unknown error';
668
- returnData.push({
669
- json: { error: errorMessage },
670
- pairedItem: { item: i },
671
- });
672
- }
673
- }
674
- else {
675
- const result = await processItem(this, i, provider, aiProvider, credentials.apiKey);
676
- returnData.push(...result);
677
- }
829
+ };
678
830
  }
679
- return [returnData];
680
- }
831
+ throw enhancedError;
832
+ });
681
833
  }
682
- exports.UniversalAI = UniversalAI;
683
834
  async function processItem(exec, index, provider, aiProvider, apiKey) {
684
- const operation = exec.getNodeParameter('operation', index);
685
- const model = exec.getNodeParameter('model', index);
686
- const options = exec.getNodeParameter('options', index, {});
687
- const input = await buildInput(exec, index);
688
- const modelSettings = getModelSettings(exec, index, provider, operation, options);
689
- return operation === 'generateText'
690
- ? await generateTextOperation(exec, index, provider, aiProvider, model, modelSettings, input, options, apiKey)
691
- : await generateObjectOperation(exec, index, provider, aiProvider, model, modelSettings, input, options, apiKey);
835
+ return withErrorHandling(async () => {
836
+ const operation = exec.getNodeParameter('operation', index);
837
+ const model = exec.getNodeParameter('model', index);
838
+ const options = exec.getNodeParameter('options', index, {});
839
+ const input = await buildInput(exec, index);
840
+ const modelSettings = getModelSettings(exec, index, provider, operation, options);
841
+ return operation === 'generateText'
842
+ ? await generateTextOperation(exec, index, provider, aiProvider, model, modelSettings, input, options, apiKey)
843
+ : await generateObjectOperation(exec, index, provider, aiProvider, model, modelSettings, input, options, apiKey);
844
+ }, 'processItem', exec, index);
692
845
  }
693
846
  function getModelSettings(exec, index, provider, operation, options) {
694
847
  const settings = {};
@@ -699,12 +852,14 @@ function getModelSettings(exec, index, provider, operation, options) {
699
852
  if (provider === 'google') {
700
853
  const safetySettingsRaw = exec.getNodeParameter('safetySettings.settings', index, []);
701
854
  if (safetySettingsRaw.length > 0) {
702
- settings.safetySettings = safetySettingsRaw.map((s) => ({
855
+ settings.safetySettings = safetySettingsRaw.map(s => ({
703
856
  category: s.category,
704
857
  threshold: s.threshold,
705
858
  }));
706
859
  }
707
- settings.structuredOutputs = operation === 'generateObject';
860
+ if (operation === 'generateObject') {
861
+ settings.structuredOutputs = true;
862
+ }
708
863
  const responseModalities = exec.getNodeParameter('responseModalities', index, []);
709
864
  if (responseModalities.length > 0) {
710
865
  settings.responseModalities = responseModalities;
@@ -719,7 +874,7 @@ function buildGoogleProviderOptions(exec, index, cachedContentName) {
719
874
  if (!Number.isNaN(thinkingBudgetValue) && thinkingBudgetValue > -1) {
720
875
  options.thinkingConfig = {
721
876
  thinkingBudget: Math.max(0, thinkingBudgetValue),
722
- includeThoughts,
877
+ includeThoughts
723
878
  };
724
879
  }
725
880
  if (cachedContentName) {
@@ -728,20 +883,16 @@ function buildGoogleProviderOptions(exec, index, cachedContentName) {
728
883
  return Object.keys(options).length > 0 ? options : undefined;
729
884
  }
730
885
  function getGoogleCustomHeaders(exec, index) {
731
- var _a, _b;
886
+ var _a, _b, _c, _d;
732
887
  const headersCollection = exec.getNodeParameter('customHeaders', index, {});
733
888
  const entries = (_a = headersCollection === null || headersCollection === void 0 ? void 0 : headersCollection.headers) !== null && _a !== void 0 ? _a : [];
734
- if (!entries || entries.length === 0) {
889
+ if (!entries.length)
735
890
  return undefined;
736
- }
737
891
  const headers = {};
738
892
  for (const entry of entries) {
739
- if (!entry)
740
- continue;
741
- const name = (entry.name || '').trim();
742
- if (!name)
743
- continue;
744
- headers[name] = (_b = entry.value) !== null && _b !== void 0 ? _b : '';
893
+ if ((_b = entry === null || entry === void 0 ? void 0 : entry.name) === null || _b === void 0 ? void 0 : _b.trim()) {
894
+ headers[entry.name.trim()] = (_d = (_c = entry.value) === null || _c === void 0 ? void 0 : _c.trim()) !== null && _d !== void 0 ? _d : '';
895
+ }
745
896
  }
746
897
  return Object.keys(headers).length > 0 ? headers : undefined;
747
898
  }
@@ -754,69 +905,101 @@ async function prepareGoogleCache(exec, index, apiKey, input, tools, context) {
754
905
  cachedContentName = await createGoogleCache(exec, index, apiKey, cacheContentInfo.content, tools);
755
906
  }
756
907
  catch (error) {
757
- console.warn(`UniversalAI: Cache creation for ${context} generation failed, continuing without cache:`, error);
908
+ if (error instanceof CacheError) {
909
+ console.warn(`Cache creation failed for ${context}:`, error.message);
910
+ }
911
+ else {
912
+ console.warn(`Unexpected cache error for ${context}:`, error);
913
+ }
758
914
  }
759
915
  }
760
916
  const googleProviderOptions = buildGoogleProviderOptions(exec, index, cachedContentName || undefined);
761
917
  return {
762
918
  cachedContentName,
763
919
  cacheContentInfo,
764
- googleProviderOptions,
920
+ googleProviderOptions
765
921
  };
766
922
  }
767
923
  async function generateTextOperation(exec, index, provider, aiProvider, model, modelSettings, input, options, apiKey) {
768
- const enableStreaming = exec.getNodeParameter('enableStreaming', index, false);
769
- const includeRequestBody = options.includeRequestBody;
770
- const tools = provider === 'google' ? await buildGoogleTools(exec, index) : undefined;
771
- let cachedContentName = null;
772
- let googleProviderOptions;
773
- let cacheContentInfo;
774
- if (provider === 'google') {
775
- const cacheSetup = await prepareGoogleCache(exec, index, apiKey, input, tools, 'text');
776
- cachedContentName = cacheSetup.cachedContentName;
777
- cacheContentInfo = cacheSetup.cacheContentInfo;
778
- googleProviderOptions = cacheSetup.googleProviderOptions;
779
- }
780
- const params = {
781
- model: aiProvider(model, modelSettings),
782
- ...input,
783
- ...(tools && { tools }),
784
- };
785
- if (cachedContentName) {
786
- if ((cacheContentInfo === null || cacheContentInfo === void 0 ? void 0 : cacheContentInfo.hasSystem) && params.system) {
787
- delete params.system;
924
+ return withErrorHandling(async () => {
925
+ const enableStreaming = exec.getNodeParameter('enableStreaming', index, false);
926
+ const includeRequestBody = options.includeRequestBody;
927
+ const tools = provider === 'google' ? await buildGoogleTools(exec, index) : undefined;
928
+ let cachedContentName = null;
929
+ let googleProviderOptions;
930
+ let cacheContentInfo;
931
+ if (provider === 'google') {
932
+ const cacheSetup = await prepareGoogleCache(exec, index, apiKey, input, tools, 'text');
933
+ cachedContentName = cacheSetup.cachedContentName;
934
+ cacheContentInfo = cacheSetup.cacheContentInfo;
935
+ googleProviderOptions = cacheSetup.googleProviderOptions;
788
936
  }
789
- if (params.tools) {
790
- delete params.tools;
937
+ const finalContext = resolveFinalContext(input, cachedContentName, cacheContentInfo);
938
+ const params = {
939
+ model: aiProvider(model, modelSettings),
940
+ ...finalContext,
941
+ };
942
+ if (tools && !cachedContentName) {
943
+ params.tools = tools;
791
944
  }
792
- }
793
- if (provider === 'google' && googleProviderOptions) {
794
- params.providerOptions = {
795
- google: googleProviderOptions,
945
+ if (provider === 'google' && googleProviderOptions) {
946
+ params.providerOptions = {
947
+ google: googleProviderOptions,
948
+ };
949
+ }
950
+ applyNumericOptions(params, options, [
951
+ 'maxTokens', 'temperature', 'topP', 'topK',
952
+ 'frequencyPenalty', 'presencePenalty', 'seed'
953
+ ]);
954
+ if (enableStreaming) {
955
+ return await handleStreaming(params, provider, includeRequestBody);
956
+ }
957
+ const result = await (0, ai_1.generateText)(params);
958
+ const formattedResult = formatTextResult(result, includeRequestBody, provider);
959
+ return [{ json: formattedResult }];
960
+ }, 'generateTextOperation', exec, index);
961
+ }
962
+ async function generateObjectOperation(exec, index, provider, aiProvider, model, modelSettings, input, options, apiKey) {
963
+ return withErrorHandling(async () => {
964
+ const schemaName = exec.getNodeParameter('schemaName', index, '').trim();
965
+ const schemaDescription = exec.getNodeParameter('schemaDescription', index, '').trim();
966
+ const rawSchema = exec.getNodeParameter('schema', index);
967
+ const parsedSchema = parseAndValidateSchema(rawSchema, exec);
968
+ let cachedContentName = null;
969
+ let googleProviderOptions;
970
+ let cacheContentInfo;
971
+ if (provider === 'google') {
972
+ const cacheSetup = await prepareGoogleCache(exec, index, apiKey, input, undefined, 'object');
973
+ cachedContentName = cacheSetup.cachedContentName;
974
+ cacheContentInfo = cacheSetup.cacheContentInfo;
975
+ googleProviderOptions = cacheSetup.googleProviderOptions;
976
+ }
977
+ const finalContext = resolveFinalContext(input, cachedContentName, cacheContentInfo);
978
+ const params = {
979
+ model: aiProvider(model, modelSettings),
980
+ schema: (0, ai_1.jsonSchema)(parsedSchema),
981
+ schemaName,
982
+ schemaDescription,
983
+ ...finalContext,
796
984
  };
797
- }
798
- const textNumericKeys = [
799
- 'maxTokens',
800
- 'temperature',
801
- 'topP',
802
- 'topK',
803
- 'frequencyPenalty',
804
- 'presencePenalty',
805
- 'seed',
806
- ];
807
- applyNumericOptions(params, options, textNumericKeys);
808
- if (enableStreaming) {
809
- return await handleStreaming(params, provider, includeRequestBody);
810
- }
811
- const result = await (0, ai_1.generateText)(params);
812
- const formattedResult = formatTextResult(result, includeRequestBody, provider);
813
- return [{ json: formattedResult }];
985
+ if (provider === 'google' && googleProviderOptions) {
986
+ params.providerOptions = {
987
+ google: googleProviderOptions,
988
+ };
989
+ }
990
+ applyNumericOptions(params, options, [
991
+ 'temperature', 'topP', 'topK',
992
+ 'frequencyPenalty', 'presencePenalty', 'seed'
993
+ ]);
994
+ const result = await (0, ai_1.generateObject)(params);
995
+ const formattedResult = formatObjectResult(result, options.includeRequestBody, provider);
996
+ return [{ json: formattedResult }];
997
+ }, 'generateObjectOperation', exec, index);
814
998
  }
815
999
  async function buildGoogleTools(exec, index) {
816
1000
  const googleTools = exec.getNodeParameter('googleTools', index, []);
817
- if (!googleTools || googleTools.length === 0) {
1001
+ if (!(googleTools === null || googleTools === void 0 ? void 0 : googleTools.length))
818
1002
  return undefined;
819
- }
820
1003
  const tools = {};
821
1004
  const { google } = await Promise.resolve().then(() => __importStar(require('@ai-sdk/google')));
822
1005
  const toolSet = new Set(googleTools);
@@ -832,86 +1015,127 @@ async function buildGoogleTools(exec, index) {
832
1015
  return tools;
833
1016
  }
834
1017
  async function handleStreaming(params, provider, includeRequestBody) {
835
- const stream = await (0, ai_1.streamText)(params);
836
- const chunks = [];
837
- let fullText = '';
838
- for await (const textPart of stream.textStream) {
839
- fullText += textPart;
840
- chunks.push({ json: { chunk: textPart, isStreaming: true } });
841
- }
842
- let finalUsage;
843
- try {
844
- finalUsage = await stream.usage;
845
- }
846
- catch (error) {
847
- console.warn('UniversalAI: Failed to get usage from stream:', error);
848
- finalUsage = undefined;
849
- }
850
- const finalJson = {
851
- text: fullText,
852
- toolCalls: stream.toolCalls || [],
853
- toolResults: stream.toolResults || [],
854
- finishReason: stream.finishReason,
855
- usage: finalUsage ? formatUsage({ usage: finalUsage }, provider) : undefined,
856
- isStreaming: false,
857
- isFinal: true,
858
- };
859
- if (includeRequestBody) {
1018
+ return withErrorHandling(async () => {
1019
+ const stream = await (0, ai_1.streamText)(params);
1020
+ const chunks = [];
1021
+ let fullText = '';
860
1022
  try {
861
- const requestMetadata = stream.request ? await stream.request : undefined;
862
- if ((requestMetadata === null || requestMetadata === void 0 ? void 0 : requestMetadata.body) !== undefined) {
863
- finalJson.request = { body: requestMetadata.body };
1023
+ for await (const textPart of stream.textStream) {
1024
+ fullText += textPart;
1025
+ chunks.push({
1026
+ json: {
1027
+ chunk: textPart,
1028
+ isStreaming: true
1029
+ }
1030
+ });
864
1031
  }
865
1032
  }
866
1033
  catch (error) {
867
- console.warn('UniversalAI: Failed to get request metadata from stream:', error);
1034
+ throw new UniversalAIError(`Stream processing failed: ${error.message}`, 'STREAM_ERROR', { provider });
868
1035
  }
869
- }
870
- chunks.push({ json: finalJson });
871
- return chunks;
872
- }
873
- async function generateObjectOperation(exec, index, provider, aiProvider, model, modelSettings, input, options, apiKey) {
874
- const schemaName = exec.getNodeParameter('schemaName', index, '');
875
- const schemaDescription = exec.getNodeParameter('schemaDescription', index, '');
876
- const rawSchema = exec.getNodeParameter('schema', index);
877
- const parsedSchema = parseAndValidateSchema(rawSchema, exec);
878
- let cachedContentName = null;
879
- let googleProviderOptions;
880
- let cacheContentInfo;
881
- if (provider === 'google') {
882
- const cacheSetup = await prepareGoogleCache(exec, index, apiKey, input, undefined, 'object');
883
- cachedContentName = cacheSetup.cachedContentName;
884
- cacheContentInfo = cacheSetup.cacheContentInfo;
885
- googleProviderOptions = cacheSetup.googleProviderOptions;
886
- }
887
- const params = {
888
- model: aiProvider(model, modelSettings),
889
- schema: (0, ai_1.jsonSchema)(parsedSchema),
890
- schemaName,
891
- schemaDescription,
892
- ...input,
893
- };
894
- if (cachedContentName) {
895
- if ((cacheContentInfo === null || cacheContentInfo === void 0 ? void 0 : cacheContentInfo.hasSystem) && params.system) {
896
- delete params.system;
1036
+ let finalUsage;
1037
+ let requestMetadata;
1038
+ try {
1039
+ finalUsage = await stream.usage;
897
1040
  }
1041
+ catch (error) {
1042
+ console.warn('Could not get usage from stream:', error);
1043
+ }
1044
+ try {
1045
+ requestMetadata = stream.request ? await stream.request : undefined;
1046
+ }
1047
+ catch (error) {
1048
+ console.warn('Could not get request metadata from stream:', error);
1049
+ }
1050
+ const finalJson = {
1051
+ text: fullText,
1052
+ toolCalls: stream.toolCalls || [],
1053
+ toolResults: stream.toolResults || [],
1054
+ finishReason: stream.finishReason,
1055
+ usage: finalUsage ? formatUsage({ usage: finalUsage }, provider) : undefined,
1056
+ isStreaming: false,
1057
+ isFinal: true,
1058
+ };
1059
+ if (includeRequestBody && (requestMetadata === null || requestMetadata === void 0 ? void 0 : requestMetadata.body) !== undefined) {
1060
+ finalJson.request = { body: requestMetadata.body };
1061
+ }
1062
+ chunks.push({ json: finalJson });
1063
+ return chunks;
1064
+ }, 'handleStreaming');
1065
+ }
1066
+ class UniversalAI {
1067
+ constructor() {
1068
+ this.description = descriptions_1.UNIVERSAL_AI_DESCRIPTION;
1069
+ this.methods = {
1070
+ loadOptions: {
1071
+ async getModels() {
1072
+ return withErrorHandling(async () => {
1073
+ const provider = this.getCurrentNodeParameter('provider');
1074
+ const cacheKey = generateCacheKey(provider, 'models:');
1075
+ const cached = modelCache.get(cacheKey);
1076
+ if (cached)
1077
+ return cached;
1078
+ const { OPENROUTER_MODELS, GOOGLE_GEMINI_MODELS, DEEPSEEK_MODELS, GROQ_MODELS } = await Promise.resolve().then(() => __importStar(require('./model-lists')));
1079
+ const models = {
1080
+ google: GOOGLE_GEMINI_MODELS,
1081
+ deepseek: DEEPSEEK_MODELS,
1082
+ groq: GROQ_MODELS,
1083
+ openrouter: OPENROUTER_MODELS
1084
+ }[provider] || [];
1085
+ modelCache.set(cacheKey, models);
1086
+ return models;
1087
+ }, 'getModels');
1088
+ },
1089
+ },
1090
+ };
898
1091
  }
899
- if (provider === 'google' && googleProviderOptions) {
900
- params.providerOptions = {
901
- google: googleProviderOptions,
1092
+ async execute() {
1093
+ const items = this.getInputData();
1094
+ const returnData = [];
1095
+ const provider = this.getNodeParameter('provider', 0);
1096
+ const credentialType = {
1097
+ google: 'googleGenerativeAIApi',
1098
+ deepseek: 'deepSeekApi',
1099
+ groq: 'groqApi',
1100
+ openrouter: 'openRouterApi'
1101
+ }[provider];
1102
+ if (!credentialType) {
1103
+ throw new ValidationError(`Unsupported provider: ${provider}`);
1104
+ }
1105
+ const credentials = await this.getCredentials(credentialType);
1106
+ if (!(credentials === null || credentials === void 0 ? void 0 : credentials.apiKey)) {
1107
+ throw new ValidationError('No API key provided in credentials');
1108
+ }
1109
+ const customHeaders = provider === 'google' ? getGoogleCustomHeaders(this, 0) : undefined;
1110
+ const providerConfig = {
1111
+ apiKey: credentials.apiKey,
1112
+ customHeaders,
1113
+ ...(credentials.baseUrl ? { baseURL: credentials.baseUrl } : {}),
902
1114
  };
1115
+ const aiProvider = await getProvider(provider, providerConfig);
1116
+ for (let i = 0; i < items.length; i++) {
1117
+ try {
1118
+ const result = await processItem(this, i, provider, aiProvider, credentials.apiKey);
1119
+ returnData.push(...result);
1120
+ }
1121
+ catch (error) {
1122
+ if (this.continueOnFail()) {
1123
+ returnData.push({
1124
+ json: {
1125
+ error: error instanceof Error ? error.message : 'Unknown error',
1126
+ errorCode: error instanceof UniversalAIError ? error.code : 'UNKNOWN_ERROR',
1127
+ itemIndex: i,
1128
+ success: false
1129
+ },
1130
+ pairedItem: { item: i }
1131
+ });
1132
+ continue;
1133
+ }
1134
+ throw error;
1135
+ }
1136
+ }
1137
+ return [returnData];
903
1138
  }
904
- const objectNumericKeys = [
905
- 'temperature',
906
- 'topP',
907
- 'topK',
908
- 'frequencyPenalty',
909
- 'presencePenalty',
910
- 'seed',
911
- ];
912
- applyNumericOptions(params, options, objectNumericKeys);
913
- const result = await (0, ai_1.generateObject)(params);
914
- const formattedResult = formatObjectResult(result, options.includeRequestBody, provider);
915
- return [{ json: formattedResult }];
916
1139
  }
1140
+ exports.UniversalAI = UniversalAI;
917
1141
  //# sourceMappingURL=UniversalAI.node.js.map