n8n-nodes-vercel-ai-sdk-universal-temp 0.1.47 → 0.1.49
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/nodes/UniversalAI/UniversalAI.node.js +108 -168
- package/dist/nodes/UniversalAI/UniversalAI.node.js.map +1 -1
- package/dist/nodes/shared/descriptions.js +38 -1
- package/dist/nodes/shared/descriptions.js.map +1 -1
- package/dist/package.json +1 -1
- package/dist/tsconfig.tsbuildinfo +1 -1
- package/package.json +1 -1
|
@@ -38,10 +38,6 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
38
38
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
39
|
exports.UniversalAI = void 0;
|
|
40
40
|
const n8n_workflow_1 = require("n8n-workflow");
|
|
41
|
-
let googleProvider;
|
|
42
|
-
let deepseekProvider;
|
|
43
|
-
let groqProvider;
|
|
44
|
-
let openrouterProvider;
|
|
45
41
|
const ai_1 = require("ai");
|
|
46
42
|
const zod_1 = require("zod");
|
|
47
43
|
const ajv_1 = __importDefault(require("ajv"));
|
|
@@ -115,8 +111,17 @@ class Cache {
|
|
|
115
111
|
const modelCache = new Cache(50);
|
|
116
112
|
const providerCache = new Cache(20);
|
|
117
113
|
const schemaCache = new Cache(30);
|
|
118
|
-
|
|
114
|
+
const googleCacheClients = new Cache(10, 60 * 60 * 1000);
|
|
119
115
|
const googleCachedContexts = new Cache(50, 55 * 60 * 1000);
|
|
116
|
+
async function getGoogleCacheManager(apiKey) {
|
|
117
|
+
let client = googleCacheClients.get(apiKey);
|
|
118
|
+
if (!client) {
|
|
119
|
+
const { GoogleGenAI } = await Promise.resolve().then(() => __importStar(require('@google/genai')));
|
|
120
|
+
client = new GoogleGenAI({ apiKey });
|
|
121
|
+
googleCacheClients.set(apiKey, client);
|
|
122
|
+
}
|
|
123
|
+
return client;
|
|
124
|
+
}
|
|
120
125
|
async function createGoogleCache(exec, index, apiKey, cacheContent, tools) {
|
|
121
126
|
var _a;
|
|
122
127
|
try {
|
|
@@ -124,13 +129,9 @@ async function createGoogleCache(exec, index, apiKey, cacheContent, tools) {
|
|
|
124
129
|
if (!useGoogleCache) {
|
|
125
130
|
return null;
|
|
126
131
|
}
|
|
127
|
-
|
|
128
|
-
const { GoogleGenAI } = await Promise.resolve().then(() => __importStar(require('@google/genai')));
|
|
129
|
-
googleCacheManager = new GoogleGenAI({ apiKey });
|
|
130
|
-
}
|
|
132
|
+
const googleCacheManager = await getGoogleCacheManager(apiKey);
|
|
131
133
|
const normalizedCacheContent = (_a = cacheContent === null || cacheContent === void 0 ? void 0 : cacheContent.trim()) !== null && _a !== void 0 ? _a : '';
|
|
132
134
|
if (!normalizedCacheContent) {
|
|
133
|
-
console.log('UniversalAI: No cache content provided, skipping cache creation');
|
|
134
135
|
return null;
|
|
135
136
|
}
|
|
136
137
|
const cacheKeyData = {
|
|
@@ -141,7 +142,6 @@ async function createGoogleCache(exec, index, apiKey, cacheContent, tools) {
|
|
|
141
142
|
const cacheKey = JSON.stringify(cacheKeyData);
|
|
142
143
|
const existingCache = googleCachedContexts.get(cacheKey);
|
|
143
144
|
if (existingCache) {
|
|
144
|
-
console.log('UniversalAI: Reusing cached content:', existingCache.name);
|
|
145
145
|
return existingCache.name;
|
|
146
146
|
}
|
|
147
147
|
const ttlSeconds = 3600;
|
|
@@ -160,19 +160,16 @@ async function createGoogleCache(exec, index, apiKey, cacheContent, tools) {
|
|
|
160
160
|
if (tools && Object.keys(tools).length > 0) {
|
|
161
161
|
cacheConfig.config.tools = Object.values(tools);
|
|
162
162
|
}
|
|
163
|
-
console.log('UniversalAI: Creating Google cache with config:', JSON.stringify(cacheConfig, null, 2));
|
|
164
163
|
const result = await googleCacheManager.caches.create(cacheConfig);
|
|
165
164
|
const cachedContentName = result === null || result === void 0 ? void 0 : result.name;
|
|
166
165
|
if (!cachedContentName) {
|
|
167
166
|
throw new Error('Failed to get cached content name from creation response');
|
|
168
167
|
}
|
|
169
168
|
googleCachedContexts.set(cacheKey, { name: cachedContentName }, ttlSeconds * 1000);
|
|
170
|
-
console.log('UniversalAI: Cache created successfully:', cachedContentName);
|
|
171
169
|
return cachedContentName;
|
|
172
170
|
}
|
|
173
171
|
catch (error) {
|
|
174
|
-
console.error('UniversalAI: Failed to create Google cache:', error);
|
|
175
|
-
console.log('UniversalAI: Falling back to non-cached execution');
|
|
172
|
+
console.error('UniversalAI: Failed to create Google cache. Falling back to non-cached execution:', error);
|
|
176
173
|
return null;
|
|
177
174
|
}
|
|
178
175
|
}
|
|
@@ -381,12 +378,10 @@ async function processAttachment(attachment, itemBinary, exec, itemIndex) {
|
|
|
381
378
|
fileData = fileContentInput;
|
|
382
379
|
}
|
|
383
380
|
else {
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
if (!mimeType && (binaryItem === null || binaryItem === void 0 ? void 0 : binaryItem.mimeType)) {
|
|
389
|
-
mimeType = binaryItem.mimeType;
|
|
381
|
+
const result = await getBinaryData(fileContentInput, itemBinary, exec, itemIndex);
|
|
382
|
+
fileData = result.data;
|
|
383
|
+
if (!mimeType && result.mimeType) {
|
|
384
|
+
mimeType = result.mimeType;
|
|
390
385
|
}
|
|
391
386
|
}
|
|
392
387
|
if (!fileData || (Buffer.isBuffer(fileData) && fileData.length === 0)) {
|
|
@@ -406,20 +401,23 @@ function getMimeType(attachment) {
|
|
|
406
401
|
async function getBinaryData(fileContentInput, itemBinary, exec, itemIndex) {
|
|
407
402
|
if (itemBinary === null || itemBinary === void 0 ? void 0 : itemBinary[fileContentInput]) {
|
|
408
403
|
const binaryData = itemBinary[fileContentInput];
|
|
409
|
-
return
|
|
404
|
+
return {
|
|
405
|
+
data: Buffer.from(binaryData.data, 'base64'),
|
|
406
|
+
mimeType: binaryData.mimeType,
|
|
407
|
+
};
|
|
410
408
|
}
|
|
411
409
|
try {
|
|
412
410
|
if (isLikelyBase64(fileContentInput)) {
|
|
413
411
|
const buffer = Buffer.from(fileContentInput, 'base64');
|
|
414
412
|
if (buffer.length > 0 && buffer.length < 50 * 1024 * 1024) {
|
|
415
|
-
return buffer;
|
|
413
|
+
return { data: buffer, mimeType: undefined };
|
|
416
414
|
}
|
|
417
415
|
}
|
|
418
416
|
}
|
|
419
417
|
catch (error) {
|
|
420
418
|
throw new n8n_workflow_1.NodeOperationError(exec.getNode(), `Invalid file content for attachment: ${error.message}`);
|
|
421
419
|
}
|
|
422
|
-
return null;
|
|
420
|
+
return { data: null, mimeType: undefined };
|
|
423
421
|
}
|
|
424
422
|
function formatTextResult(result, includeRequestBody, provider) {
|
|
425
423
|
var _a, _b, _c, _d, _e;
|
|
@@ -492,15 +490,9 @@ function formatUsage(result, provider) {
|
|
|
492
490
|
return usage;
|
|
493
491
|
}
|
|
494
492
|
function getCacheMetrics(result, provider, metadata) {
|
|
495
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r
|
|
493
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r;
|
|
496
494
|
const resolvedMetadata = metadata !== null && metadata !== void 0 ? metadata : result.experimental_providerMetadata;
|
|
497
495
|
const metrics = {};
|
|
498
|
-
console.log('UniversalAI: Checking cache metrics from metadata:', {
|
|
499
|
-
provider,
|
|
500
|
-
hasMetadata: !!resolvedMetadata,
|
|
501
|
-
metadataKeys: resolvedMetadata ? Object.keys(resolvedMetadata) : [],
|
|
502
|
-
googleMetadata: resolvedMetadata === null || resolvedMetadata === void 0 ? void 0 : resolvedMetadata.google,
|
|
503
|
-
});
|
|
504
496
|
switch (provider) {
|
|
505
497
|
case 'deepseek':
|
|
506
498
|
if (((_a = resolvedMetadata === null || resolvedMetadata === void 0 ? void 0 : resolvedMetadata.deepseek) === null || _a === void 0 ? void 0 : _a.promptCacheHitTokens) !== undefined) {
|
|
@@ -519,53 +511,42 @@ function getCacheMetrics(result, provider, metadata) {
|
|
|
519
511
|
}
|
|
520
512
|
break;
|
|
521
513
|
case 'google':
|
|
522
|
-
|
|
523
|
-
google: resolvedMetadata === null || resolvedMetadata === void 0 ? void 0 : resolvedMetadata.google,
|
|
524
|
-
usageMetadata: (_e = resolvedMetadata === null || resolvedMetadata === void 0 ? void 0 : resolvedMetadata.google) === null || _e === void 0 ? void 0 : _e.usageMetadata,
|
|
525
|
-
candidates: (_f = resolvedMetadata === null || resolvedMetadata === void 0 ? void 0 : resolvedMetadata.google) === null || _f === void 0 ? void 0 : _f.candidates,
|
|
526
|
-
});
|
|
527
|
-
if (((_h = (_g = resolvedMetadata === null || resolvedMetadata === void 0 ? void 0 : resolvedMetadata.google) === null || _g === void 0 ? void 0 : _g.usageMetadata) === null || _h === void 0 ? void 0 : _h.cachedContentTokenCount) !== undefined) {
|
|
514
|
+
if (((_f = (_e = resolvedMetadata === null || resolvedMetadata === void 0 ? void 0 : resolvedMetadata.google) === null || _e === void 0 ? void 0 : _e.usageMetadata) === null || _f === void 0 ? void 0 : _f.cachedContentTokenCount) !== undefined) {
|
|
528
515
|
metrics.cachedContentTokenCount = resolvedMetadata.google.usageMetadata.cachedContentTokenCount;
|
|
529
|
-
console.log('UniversalAI: Found cachedContentTokenCount in usageMetadata:', metrics.cachedContentTokenCount);
|
|
530
516
|
}
|
|
531
|
-
if (((
|
|
517
|
+
if (((_h = (_g = result.response) === null || _g === void 0 ? void 0 : _g.usageMetadata) === null || _h === void 0 ? void 0 : _h.cachedContentTokenCount) !== undefined) {
|
|
532
518
|
metrics.cachedContentTokenCount = result.response.usageMetadata.cachedContentTokenCount;
|
|
533
|
-
console.log('UniversalAI: Found cachedContentTokenCount in response metadata:', metrics.cachedContentTokenCount);
|
|
534
519
|
}
|
|
535
|
-
if (((
|
|
520
|
+
if (((_m = (_l = (_k = (_j = resolvedMetadata === null || resolvedMetadata === void 0 ? void 0 : resolvedMetadata.google) === null || _j === void 0 ? void 0 : _j.candidates) === null || _k === void 0 ? void 0 : _k[0]) === null || _l === void 0 ? void 0 : _l.usageMetadata) === null || _m === void 0 ? void 0 : _m.cachedContentTokenCount) !== undefined) {
|
|
536
521
|
metrics.cachedContentTokenCount = resolvedMetadata.google.candidates[0].usageMetadata.cachedContentTokenCount;
|
|
537
|
-
console.log('UniversalAI: Found cachedContentTokenCount in candidates metadata:', metrics.cachedContentTokenCount);
|
|
538
522
|
}
|
|
539
|
-
if (((
|
|
523
|
+
if (((_p = (_o = resolvedMetadata === null || resolvedMetadata === void 0 ? void 0 : resolvedMetadata.google) === null || _o === void 0 ? void 0 : _o.usageMetadata) === null || _p === void 0 ? void 0 : _p.thoughtsTokenCount) !== undefined) {
|
|
540
524
|
metrics.thoughtsTokenCount = resolvedMetadata.google.usageMetadata.thoughtsTokenCount;
|
|
541
|
-
console.log('UniversalAI: Found thoughtsTokenCount:', metrics.thoughtsTokenCount);
|
|
542
525
|
}
|
|
543
|
-
if (((
|
|
526
|
+
if (((_r = (_q = resolvedMetadata === null || resolvedMetadata === void 0 ? void 0 : resolvedMetadata.google) === null || _q === void 0 ? void 0 : _q.usageMetadata) === null || _r === void 0 ? void 0 : _r.totalBillableTokenCount) !== undefined) {
|
|
544
527
|
metrics.totalBillableTokenCount = resolvedMetadata.google.usageMetadata.totalBillableTokenCount;
|
|
545
|
-
console.log('UniversalAI: Found totalBillableTokenCount:', metrics.totalBillableTokenCount);
|
|
546
528
|
}
|
|
547
529
|
break;
|
|
548
530
|
}
|
|
549
|
-
console.log('UniversalAI: Extracted cache metrics:', metrics);
|
|
550
531
|
return metrics;
|
|
551
532
|
}
|
|
552
533
|
function formatResponse(result) {
|
|
553
|
-
var _a, _b, _c, _d
|
|
534
|
+
var _a, _b, _c, _d;
|
|
554
535
|
const response = {
|
|
555
536
|
id: (_a = result.response) === null || _a === void 0 ? void 0 : _a.id,
|
|
556
537
|
modelId: (_b = result.response) === null || _b === void 0 ? void 0 : _b.modelId,
|
|
557
538
|
timestamp: (_c = result.response) === null || _c === void 0 ? void 0 : _c.timestamp,
|
|
558
539
|
headers: (_d = result.response) === null || _d === void 0 ? void 0 : _d.headers,
|
|
559
540
|
};
|
|
560
|
-
console.log('UniversalAI: Response metadata:', {
|
|
561
|
-
hasResponse: !!result.response,
|
|
562
|
-
responseKeys: result.response ? Object.keys(result.response) : [],
|
|
563
|
-
usageMetadata: (_e = result.response) === null || _e === void 0 ? void 0 : _e.usageMetadata,
|
|
564
|
-
});
|
|
565
541
|
return response;
|
|
566
542
|
}
|
|
567
|
-
async function getProvider(provider, apiKey, baseURL) {
|
|
568
|
-
const
|
|
543
|
+
async function getProvider(provider, apiKey, baseURL, customHeaders) {
|
|
544
|
+
const headersKey = customHeaders
|
|
545
|
+
? JSON.stringify(Object.keys(customHeaders)
|
|
546
|
+
.sort()
|
|
547
|
+
.map((key) => [key, customHeaders[key]]))
|
|
548
|
+
: '';
|
|
549
|
+
const cacheKey = `${provider}:${apiKey}:${baseURL || ''}:${headersKey}`;
|
|
569
550
|
const cached = providerCache.get(cacheKey);
|
|
570
551
|
if (cached)
|
|
571
552
|
return cached;
|
|
@@ -573,28 +554,24 @@ async function getProvider(provider, apiKey, baseURL) {
|
|
|
573
554
|
try {
|
|
574
555
|
switch (provider) {
|
|
575
556
|
case 'google':
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
557
|
+
const { createGoogleGenerativeAI } = await Promise.resolve().then(() => __importStar(require('@ai-sdk/google')));
|
|
558
|
+
providerInstance = createGoogleGenerativeAI({
|
|
559
|
+
apiKey,
|
|
560
|
+
...(baseURL && { baseURL }),
|
|
561
|
+
...(customHeaders && Object.keys(customHeaders).length > 0 && { headers: customHeaders }),
|
|
562
|
+
});
|
|
580
563
|
break;
|
|
581
564
|
case 'deepseek':
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
}
|
|
585
|
-
providerInstance = deepseekProvider({ apiKey, ...(baseURL && { baseURL }) });
|
|
565
|
+
const { createDeepSeek } = await Promise.resolve().then(() => __importStar(require('@ai-sdk/deepseek')));
|
|
566
|
+
providerInstance = createDeepSeek({ apiKey, ...(baseURL && { baseURL }) });
|
|
586
567
|
break;
|
|
587
568
|
case 'groq':
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
}
|
|
591
|
-
providerInstance = groqProvider({ apiKey, ...(baseURL && { baseURL }) });
|
|
569
|
+
const { createGroq } = await Promise.resolve().then(() => __importStar(require('@ai-sdk/groq')));
|
|
570
|
+
providerInstance = createGroq({ apiKey, ...(baseURL && { baseURL }) });
|
|
592
571
|
break;
|
|
593
572
|
case 'openrouter':
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
}
|
|
597
|
-
providerInstance = openrouterProvider({ apiKey, ...(baseURL && { baseURL }) });
|
|
573
|
+
const { createOpenRouter } = await Promise.resolve().then(() => __importStar(require('@openrouter/ai-sdk-provider')));
|
|
574
|
+
providerInstance = createOpenRouter({ apiKey, ...(baseURL && { baseURL }) });
|
|
598
575
|
break;
|
|
599
576
|
default:
|
|
600
577
|
throw new Error(`Unsupported provider: ${provider}`);
|
|
@@ -629,6 +606,14 @@ function parseStopSequences(stopSequencesStr) {
|
|
|
629
606
|
return undefined;
|
|
630
607
|
return stopSequencesStr.split(',').map(s => s.trim()).filter(s => s.length > 0);
|
|
631
608
|
}
|
|
609
|
+
function applyNumericOptions(params, options, keys) {
|
|
610
|
+
for (const key of keys) {
|
|
611
|
+
const value = options[key];
|
|
612
|
+
if (value !== undefined && value !== null && value !== '') {
|
|
613
|
+
params[key] = value;
|
|
614
|
+
}
|
|
615
|
+
}
|
|
616
|
+
}
|
|
632
617
|
class UniversalAI {
|
|
633
618
|
constructor() {
|
|
634
619
|
this.description = descriptions_1.UNIVERSAL_AI_DESCRIPTION;
|
|
@@ -670,7 +655,8 @@ class UniversalAI {
|
|
|
670
655
|
if (!(credentials === null || credentials === void 0 ? void 0 : credentials.apiKey)) {
|
|
671
656
|
throw new n8n_workflow_1.NodeOperationError(this.getNode(), 'No API key provided in credentials');
|
|
672
657
|
}
|
|
673
|
-
const
|
|
658
|
+
const customHeaders = provider === 'google' ? getGoogleCustomHeaders(this, 0) : undefined;
|
|
659
|
+
const aiProvider = await getProvider(provider, credentials.apiKey, credentials.baseUrl, customHeaders);
|
|
674
660
|
for (let i = 0; i < items.length; i++) {
|
|
675
661
|
if (this.continueOnFail()) {
|
|
676
662
|
try {
|
|
@@ -741,6 +727,43 @@ function buildGoogleProviderOptions(exec, index, cachedContentName) {
|
|
|
741
727
|
}
|
|
742
728
|
return Object.keys(options).length > 0 ? options : undefined;
|
|
743
729
|
}
|
|
730
|
+
function getGoogleCustomHeaders(exec, index) {
|
|
731
|
+
var _a, _b;
|
|
732
|
+
const headersCollection = exec.getNodeParameter('customHeaders', index, {});
|
|
733
|
+
const entries = (_a = headersCollection === null || headersCollection === void 0 ? void 0 : headersCollection.headers) !== null && _a !== void 0 ? _a : [];
|
|
734
|
+
if (!entries || entries.length === 0) {
|
|
735
|
+
return undefined;
|
|
736
|
+
}
|
|
737
|
+
const headers = {};
|
|
738
|
+
for (const entry of entries) {
|
|
739
|
+
if (!entry)
|
|
740
|
+
continue;
|
|
741
|
+
const name = (entry.name || '').trim();
|
|
742
|
+
if (!name)
|
|
743
|
+
continue;
|
|
744
|
+
headers[name] = (_b = entry.value) !== null && _b !== void 0 ? _b : '';
|
|
745
|
+
}
|
|
746
|
+
return Object.keys(headers).length > 0 ? headers : undefined;
|
|
747
|
+
}
|
|
748
|
+
async function prepareGoogleCache(exec, index, apiKey, input, tools, context) {
|
|
749
|
+
const useGoogleCache = exec.getNodeParameter('useGoogleCache', index, false);
|
|
750
|
+
const cacheContentInfo = resolveCacheContent(input);
|
|
751
|
+
let cachedContentName = null;
|
|
752
|
+
if (useGoogleCache && canUseCache(cacheContentInfo.content)) {
|
|
753
|
+
try {
|
|
754
|
+
cachedContentName = await createGoogleCache(exec, index, apiKey, cacheContentInfo.content, tools);
|
|
755
|
+
}
|
|
756
|
+
catch (error) {
|
|
757
|
+
console.warn(`UniversalAI: Cache creation for ${context} generation failed, continuing without cache:`, error);
|
|
758
|
+
}
|
|
759
|
+
}
|
|
760
|
+
const googleProviderOptions = buildGoogleProviderOptions(exec, index, cachedContentName || undefined);
|
|
761
|
+
return {
|
|
762
|
+
cachedContentName,
|
|
763
|
+
cacheContentInfo,
|
|
764
|
+
googleProviderOptions,
|
|
765
|
+
};
|
|
766
|
+
}
|
|
744
767
|
async function generateTextOperation(exec, index, provider, aiProvider, model, modelSettings, input, options, apiKey) {
|
|
745
768
|
const enableStreaming = exec.getNodeParameter('enableStreaming', index, false);
|
|
746
769
|
const includeRequestBody = options.includeRequestBody;
|
|
@@ -749,44 +772,19 @@ async function generateTextOperation(exec, index, provider, aiProvider, model, m
|
|
|
749
772
|
let googleProviderOptions;
|
|
750
773
|
let cacheContentInfo;
|
|
751
774
|
if (provider === 'google') {
|
|
752
|
-
const
|
|
753
|
-
|
|
754
|
-
|
|
755
|
-
|
|
756
|
-
console.log('UniversalAI: Creating Google cache...');
|
|
757
|
-
try {
|
|
758
|
-
cachedContentName = await createGoogleCache(exec, index, apiKey, cacheContentInfo.content, tools);
|
|
759
|
-
if (cachedContentName) {
|
|
760
|
-
console.log('UniversalAI: Successfully created/retrieved cache:', {
|
|
761
|
-
cacheName: cachedContentName,
|
|
762
|
-
source: cacheContentInfo.source,
|
|
763
|
-
});
|
|
764
|
-
}
|
|
765
|
-
else {
|
|
766
|
-
console.log('UniversalAI: Cache creation returned null, continuing without cache');
|
|
767
|
-
}
|
|
768
|
-
}
|
|
769
|
-
catch (error) {
|
|
770
|
-
console.warn('UniversalAI: Cache creation failed, continuing without cache:', error);
|
|
771
|
-
}
|
|
772
|
-
}
|
|
773
|
-
else if (useGoogleCache) {
|
|
774
|
-
console.log('UniversalAI: Skipping cache creation - no cache content available');
|
|
775
|
-
}
|
|
776
|
-
googleProviderOptions = buildGoogleProviderOptions(exec, index, cachedContentName || undefined);
|
|
775
|
+
const cacheSetup = await prepareGoogleCache(exec, index, apiKey, input, tools, 'text');
|
|
776
|
+
cachedContentName = cacheSetup.cachedContentName;
|
|
777
|
+
cacheContentInfo = cacheSetup.cacheContentInfo;
|
|
778
|
+
googleProviderOptions = cacheSetup.googleProviderOptions;
|
|
777
779
|
}
|
|
778
780
|
const params = {
|
|
779
781
|
model: aiProvider(model, modelSettings),
|
|
780
782
|
...input,
|
|
781
783
|
};
|
|
782
784
|
if (cachedContentName) {
|
|
783
|
-
console.log('UniversalAI: Using cache for context, adjusting params based on cache source');
|
|
784
785
|
if ((cacheContentInfo === null || cacheContentInfo === void 0 ? void 0 : cacheContentInfo.source) === 'system' && params.system) {
|
|
785
786
|
delete params.system;
|
|
786
787
|
}
|
|
787
|
-
if (tools && Object.keys(tools).length > 0) {
|
|
788
|
-
delete params.tools;
|
|
789
|
-
}
|
|
790
788
|
}
|
|
791
789
|
else {
|
|
792
790
|
if (tools) {
|
|
@@ -797,9 +795,8 @@ async function generateTextOperation(exec, index, provider, aiProvider, model, m
|
|
|
797
795
|
params.providerOptions = {
|
|
798
796
|
google: googleProviderOptions,
|
|
799
797
|
};
|
|
800
|
-
console.log('UniversalAI: Added Google provider options:', googleProviderOptions);
|
|
801
798
|
}
|
|
802
|
-
const
|
|
799
|
+
const textNumericKeys = [
|
|
803
800
|
'maxTokens',
|
|
804
801
|
'temperature',
|
|
805
802
|
'topP',
|
|
@@ -808,29 +805,11 @@ async function generateTextOperation(exec, index, provider, aiProvider, model, m
|
|
|
808
805
|
'presencePenalty',
|
|
809
806
|
'seed',
|
|
810
807
|
];
|
|
811
|
-
|
|
812
|
-
const value = options[key];
|
|
813
|
-
if (value !== undefined && value !== null && value !== '') {
|
|
814
|
-
params[key] = value;
|
|
815
|
-
}
|
|
816
|
-
}
|
|
817
|
-
console.log('UniversalAI: Final request params:', {
|
|
818
|
-
hasSystem: !!params.system,
|
|
819
|
-
hasTools: !!params.tools,
|
|
820
|
-
hasCachedContent: !!cachedContentName,
|
|
821
|
-
providerOptions: params.providerOptions,
|
|
822
|
-
});
|
|
808
|
+
applyNumericOptions(params, options, textNumericKeys);
|
|
823
809
|
if (enableStreaming) {
|
|
824
810
|
return await handleStreaming(params, provider, includeRequestBody);
|
|
825
811
|
}
|
|
826
812
|
const result = await (0, ai_1.generateText)(params);
|
|
827
|
-
console.log('UniversalAI: Debug - generateText result:', {
|
|
828
|
-
provider,
|
|
829
|
-
hasProviderMetadata: !!result.experimental_providerMetadata,
|
|
830
|
-
providerMetadata: result.experimental_providerMetadata,
|
|
831
|
-
usage: result.usage,
|
|
832
|
-
response: result.response,
|
|
833
|
-
});
|
|
834
813
|
const formattedResult = formatTextResult(result, includeRequestBody, provider);
|
|
835
814
|
return [{ json: formattedResult }];
|
|
836
815
|
}
|
|
@@ -840,7 +819,7 @@ async function buildGoogleTools(exec, index) {
|
|
|
840
819
|
return undefined;
|
|
841
820
|
}
|
|
842
821
|
const tools = {};
|
|
843
|
-
const google = require('@ai-sdk/google')
|
|
822
|
+
const { google } = await Promise.resolve().then(() => __importStar(require('@ai-sdk/google')));
|
|
844
823
|
const toolSet = new Set(googleTools);
|
|
845
824
|
if (toolSet.has('google_search')) {
|
|
846
825
|
tools.google_search = google.tools.googleSearch({});
|
|
@@ -901,31 +880,10 @@ async function generateObjectOperation(exec, index, provider, aiProvider, model,
|
|
|
901
880
|
let googleProviderOptions;
|
|
902
881
|
let cacheContentInfo;
|
|
903
882
|
if (provider === 'google') {
|
|
904
|
-
const
|
|
905
|
-
|
|
906
|
-
|
|
907
|
-
|
|
908
|
-
console.log('UniversalAI: Creating Google cache for object generation...');
|
|
909
|
-
try {
|
|
910
|
-
cachedContentName = await createGoogleCache(exec, index, apiKey, cacheContentInfo.content, undefined);
|
|
911
|
-
if (cachedContentName) {
|
|
912
|
-
console.log('UniversalAI: Successfully created/retrieved cache for object generation:', {
|
|
913
|
-
cacheName: cachedContentName,
|
|
914
|
-
source: cacheContentInfo.source,
|
|
915
|
-
});
|
|
916
|
-
}
|
|
917
|
-
else {
|
|
918
|
-
console.log('UniversalAI: Cache creation for object generation returned null, continuing without cache');
|
|
919
|
-
}
|
|
920
|
-
}
|
|
921
|
-
catch (error) {
|
|
922
|
-
console.warn('UniversalAI: Cache creation for object generation failed, continuing without cache:', error);
|
|
923
|
-
}
|
|
924
|
-
}
|
|
925
|
-
else if (useGoogleCache) {
|
|
926
|
-
console.log('UniversalAI: Skipping cache creation for object generation - no cache content available');
|
|
927
|
-
}
|
|
928
|
-
googleProviderOptions = buildGoogleProviderOptions(exec, index, cachedContentName || undefined);
|
|
883
|
+
const cacheSetup = await prepareGoogleCache(exec, index, apiKey, input, undefined, 'object');
|
|
884
|
+
cachedContentName = cacheSetup.cachedContentName;
|
|
885
|
+
cacheContentInfo = cacheSetup.cacheContentInfo;
|
|
886
|
+
googleProviderOptions = cacheSetup.googleProviderOptions;
|
|
929
887
|
}
|
|
930
888
|
const params = {
|
|
931
889
|
model: aiProvider(model, modelSettings),
|
|
@@ -935,7 +893,6 @@ async function generateObjectOperation(exec, index, provider, aiProvider, model,
|
|
|
935
893
|
...input,
|
|
936
894
|
};
|
|
937
895
|
if (cachedContentName) {
|
|
938
|
-
console.log('UniversalAI: Using cache for object generation, adjusting params');
|
|
939
896
|
if ((cacheContentInfo === null || cacheContentInfo === void 0 ? void 0 : cacheContentInfo.source) === 'system' && params.system) {
|
|
940
897
|
delete params.system;
|
|
941
898
|
}
|
|
@@ -944,9 +901,8 @@ async function generateObjectOperation(exec, index, provider, aiProvider, model,
|
|
|
944
901
|
params.providerOptions = {
|
|
945
902
|
google: googleProviderOptions,
|
|
946
903
|
};
|
|
947
|
-
console.log('UniversalAI: Added Google provider options for object generation:', googleProviderOptions);
|
|
948
904
|
}
|
|
949
|
-
const
|
|
905
|
+
const objectNumericKeys = [
|
|
950
906
|
'temperature',
|
|
951
907
|
'topP',
|
|
952
908
|
'topK',
|
|
@@ -954,24 +910,8 @@ async function generateObjectOperation(exec, index, provider, aiProvider, model,
|
|
|
954
910
|
'presencePenalty',
|
|
955
911
|
'seed',
|
|
956
912
|
];
|
|
957
|
-
|
|
958
|
-
const value = options[key];
|
|
959
|
-
if (value !== undefined && value !== null && value !== '') {
|
|
960
|
-
params[key] = value;
|
|
961
|
-
}
|
|
962
|
-
}
|
|
963
|
-
console.log('UniversalAI: Final object generation request params:', {
|
|
964
|
-
hasSystem: !!params.system,
|
|
965
|
-
hasCachedContent: !!cachedContentName,
|
|
966
|
-
providerOptions: params.providerOptions,
|
|
967
|
-
});
|
|
913
|
+
applyNumericOptions(params, options, objectNumericKeys);
|
|
968
914
|
const result = await (0, ai_1.generateObject)(params);
|
|
969
|
-
console.log('UniversalAI: Debug - generateObject result:', {
|
|
970
|
-
provider,
|
|
971
|
-
hasProviderMetadata: !!result.experimental_providerMetadata,
|
|
972
|
-
providerMetadata: result.experimental_providerMetadata,
|
|
973
|
-
usage: result.usage,
|
|
974
|
-
});
|
|
975
915
|
const formattedResult = formatObjectResult(result, options.includeRequestBody, provider);
|
|
976
916
|
return [{ json: formattedResult }];
|
|
977
917
|
}
|