@juspay/neurolink 8.3.0 → 8.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/README.md +1 -0
- package/dist/adapters/providerImageAdapter.d.ts +1 -1
- package/dist/adapters/providerImageAdapter.js +62 -0
- package/dist/agent/directTools.d.ts +0 -72
- package/dist/agent/directTools.js +3 -74
- package/dist/cli/commands/config.d.ts +18 -18
- package/dist/cli/factories/commandFactory.js +1 -0
- package/dist/constants/enums.d.ts +1 -0
- package/dist/constants/enums.js +3 -1
- package/dist/constants/tokens.d.ts +3 -0
- package/dist/constants/tokens.js +3 -0
- package/dist/core/baseProvider.d.ts +56 -53
- package/dist/core/baseProvider.js +107 -1095
- package/dist/core/constants.d.ts +3 -0
- package/dist/core/constants.js +6 -3
- package/dist/core/modelConfiguration.js +10 -0
- package/dist/core/modules/GenerationHandler.d.ts +63 -0
- package/dist/core/modules/GenerationHandler.js +230 -0
- package/dist/core/modules/MessageBuilder.d.ts +39 -0
- package/dist/core/modules/MessageBuilder.js +179 -0
- package/dist/core/modules/StreamHandler.d.ts +52 -0
- package/dist/core/modules/StreamHandler.js +103 -0
- package/dist/core/modules/TelemetryHandler.d.ts +64 -0
- package/dist/core/modules/TelemetryHandler.js +170 -0
- package/dist/core/modules/ToolsManager.d.ts +98 -0
- package/dist/core/modules/ToolsManager.js +521 -0
- package/dist/core/modules/Utilities.d.ts +88 -0
- package/dist/core/modules/Utilities.js +329 -0
- package/dist/factories/providerRegistry.js +1 -1
- package/dist/lib/adapters/providerImageAdapter.d.ts +1 -1
- package/dist/lib/adapters/providerImageAdapter.js +62 -0
- package/dist/lib/agent/directTools.d.ts +0 -72
- package/dist/lib/agent/directTools.js +3 -74
- package/dist/lib/constants/enums.d.ts +1 -0
- package/dist/lib/constants/enums.js +3 -1
- package/dist/lib/constants/tokens.d.ts +3 -0
- package/dist/lib/constants/tokens.js +3 -0
- package/dist/lib/core/baseProvider.d.ts +56 -53
- package/dist/lib/core/baseProvider.js +107 -1095
- package/dist/lib/core/constants.d.ts +3 -0
- package/dist/lib/core/constants.js +6 -3
- package/dist/lib/core/modelConfiguration.js +10 -0
- package/dist/lib/core/modules/GenerationHandler.d.ts +63 -0
- package/dist/lib/core/modules/GenerationHandler.js +231 -0
- package/dist/lib/core/modules/MessageBuilder.d.ts +39 -0
- package/dist/lib/core/modules/MessageBuilder.js +180 -0
- package/dist/lib/core/modules/StreamHandler.d.ts +52 -0
- package/dist/lib/core/modules/StreamHandler.js +104 -0
- package/dist/lib/core/modules/TelemetryHandler.d.ts +64 -0
- package/dist/lib/core/modules/TelemetryHandler.js +171 -0
- package/dist/lib/core/modules/ToolsManager.d.ts +98 -0
- package/dist/lib/core/modules/ToolsManager.js +522 -0
- package/dist/lib/core/modules/Utilities.d.ts +88 -0
- package/dist/lib/core/modules/Utilities.js +330 -0
- package/dist/lib/factories/providerRegistry.js +1 -1
- package/dist/lib/mcp/servers/agent/directToolsServer.js +0 -1
- package/dist/lib/memory/mem0Initializer.d.ts +32 -1
- package/dist/lib/memory/mem0Initializer.js +55 -2
- package/dist/lib/models/modelRegistry.js +44 -0
- package/dist/lib/neurolink.d.ts +1 -1
- package/dist/lib/neurolink.js +43 -10
- package/dist/lib/providers/amazonBedrock.js +59 -10
- package/dist/lib/providers/anthropic.js +2 -30
- package/dist/lib/providers/azureOpenai.js +2 -24
- package/dist/lib/providers/googleAiStudio.js +2 -24
- package/dist/lib/providers/googleVertex.js +2 -45
- package/dist/lib/providers/huggingFace.js +3 -31
- package/dist/lib/providers/litellm.d.ts +1 -1
- package/dist/lib/providers/litellm.js +110 -44
- package/dist/lib/providers/mistral.js +5 -32
- package/dist/lib/providers/ollama.d.ts +1 -0
- package/dist/lib/providers/ollama.js +476 -129
- package/dist/lib/providers/openAI.js +2 -28
- package/dist/lib/providers/openaiCompatible.js +3 -31
- package/dist/lib/types/content.d.ts +16 -113
- package/dist/lib/types/content.js +16 -2
- package/dist/lib/types/conversation.d.ts +3 -17
- package/dist/lib/types/generateTypes.d.ts +2 -2
- package/dist/lib/types/index.d.ts +2 -0
- package/dist/lib/types/index.js +2 -0
- package/dist/lib/types/multimodal.d.ts +282 -0
- package/dist/lib/types/multimodal.js +101 -0
- package/dist/lib/types/streamTypes.d.ts +2 -2
- package/dist/lib/utils/imageProcessor.d.ts +1 -1
- package/dist/lib/utils/messageBuilder.js +25 -2
- package/dist/lib/utils/multimodalOptionsBuilder.d.ts +1 -1
- package/dist/lib/utils/pdfProcessor.d.ts +9 -0
- package/dist/lib/utils/pdfProcessor.js +67 -9
- package/dist/mcp/servers/agent/directToolsServer.js +0 -1
- package/dist/memory/mem0Initializer.d.ts +32 -1
- package/dist/memory/mem0Initializer.js +55 -2
- package/dist/models/modelRegistry.js +44 -0
- package/dist/neurolink.d.ts +1 -1
- package/dist/neurolink.js +43 -10
- package/dist/providers/amazonBedrock.js +59 -10
- package/dist/providers/anthropic.js +2 -30
- package/dist/providers/azureOpenai.js +2 -24
- package/dist/providers/googleAiStudio.js +2 -24
- package/dist/providers/googleVertex.js +2 -45
- package/dist/providers/huggingFace.js +3 -31
- package/dist/providers/litellm.d.ts +1 -1
- package/dist/providers/litellm.js +110 -44
- package/dist/providers/mistral.js +5 -32
- package/dist/providers/ollama.d.ts +1 -0
- package/dist/providers/ollama.js +476 -129
- package/dist/providers/openAI.js +2 -28
- package/dist/providers/openaiCompatible.js +3 -31
- package/dist/types/content.d.ts +16 -113
- package/dist/types/content.js +16 -2
- package/dist/types/conversation.d.ts +3 -17
- package/dist/types/generateTypes.d.ts +2 -2
- package/dist/types/index.d.ts +2 -0
- package/dist/types/index.js +2 -0
- package/dist/types/multimodal.d.ts +282 -0
- package/dist/types/multimodal.js +100 -0
- package/dist/types/streamTypes.d.ts +2 -2
- package/dist/utils/imageProcessor.d.ts +1 -1
- package/dist/utils/messageBuilder.js +25 -2
- package/dist/utils/multimodalOptionsBuilder.d.ts +1 -1
- package/dist/utils/pdfProcessor.d.ts +9 -0
- package/dist/utils/pdfProcessor.js +67 -9
- package/package.json +5 -2
|
@@ -8,6 +8,7 @@ import { buildMultimodalMessagesArray } from "../utils/messageBuilder.js";
|
|
|
8
8
|
import { buildMultimodalOptions } from "../utils/multimodalOptionsBuilder.js";
|
|
9
9
|
import { DEFAULT_MAX_STEPS } from "../core/constants.js";
|
|
10
10
|
import { createAnalytics } from "../core/analytics.js";
|
|
11
|
+
import path from "path";
|
|
11
12
|
// Bedrock-specific types now imported from ../types/providerSpecific.js
|
|
12
13
|
export class AmazonBedrockProvider extends BaseProvider {
|
|
13
14
|
bedrockClient;
|
|
@@ -95,13 +96,45 @@ export class AmazonBedrockProvider extends BaseProvider {
|
|
|
95
96
|
: optionsOrPrompt;
|
|
96
97
|
// Clear conversation history for new generation
|
|
97
98
|
this.conversationHistory = [];
|
|
98
|
-
//
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
99
|
+
// Check for multimodal input (images, PDFs, CSVs, files)
|
|
100
|
+
// Cast to any to access multimodal properties (runtime check is safe)
|
|
101
|
+
const input = options.input;
|
|
102
|
+
const hasMultimodalInput = !!(input?.images?.length ||
|
|
103
|
+
input?.content?.length ||
|
|
104
|
+
input?.files?.length ||
|
|
105
|
+
input?.csvFiles?.length ||
|
|
106
|
+
input?.pdfFiles?.length);
|
|
107
|
+
if (hasMultimodalInput) {
|
|
108
|
+
logger.debug(`[AmazonBedrockProvider] Detected multimodal input in generate(), using multimodal message builder`, {
|
|
109
|
+
hasImages: !!input?.images?.length,
|
|
110
|
+
imageCount: input?.images?.length || 0,
|
|
111
|
+
hasContent: !!input?.content?.length,
|
|
112
|
+
contentCount: input?.content?.length || 0,
|
|
113
|
+
hasFiles: !!input?.files?.length,
|
|
114
|
+
fileCount: input?.files?.length || 0,
|
|
115
|
+
hasCSVFiles: !!input?.csvFiles?.length,
|
|
116
|
+
csvFileCount: input?.csvFiles?.length || 0,
|
|
117
|
+
hasPDFFiles: !!input?.pdfFiles?.length,
|
|
118
|
+
pdfFileCount: input?.pdfFiles?.length || 0,
|
|
119
|
+
});
|
|
120
|
+
// Cast options to StreamOptions for multimodal processing
|
|
121
|
+
const streamOptions = options;
|
|
122
|
+
const multimodalOptions = buildMultimodalOptions(streamOptions, this.providerName, this.modelName);
|
|
123
|
+
const multimodalMessages = await buildMultimodalMessagesArray(multimodalOptions, this.providerName, this.modelName);
|
|
124
|
+
// Convert to Bedrock format
|
|
125
|
+
this.conversationHistory =
|
|
126
|
+
this.convertToBedrockMessages(multimodalMessages);
|
|
127
|
+
}
|
|
128
|
+
else {
|
|
129
|
+
logger.debug(`[AmazonBedrockProvider] Text-only input in generate(), using simple message builder`);
|
|
130
|
+
// Add user message to conversation - simple text-only case
|
|
131
|
+
const userMessage = {
|
|
132
|
+
role: "user",
|
|
133
|
+
content: [{ text: options.prompt }],
|
|
134
|
+
};
|
|
135
|
+
this.conversationHistory.push(userMessage);
|
|
136
|
+
}
|
|
137
|
+
logger.debug(`[AmazonBedrockProvider] Starting conversation with ${this.conversationHistory.length} message(s)`);
|
|
105
138
|
// Start conversation loop and return enhanced result
|
|
106
139
|
const text = await this.conversationLoop(options);
|
|
107
140
|
return {
|
|
@@ -585,12 +618,28 @@ export class AmazonBedrockProvider extends BaseProvider {
|
|
|
585
618
|
else {
|
|
586
619
|
docData = contentItem.data;
|
|
587
620
|
}
|
|
621
|
+
// Extract basename and sanitize for Bedrock's filename requirements
|
|
622
|
+
// Bedrock only allows: alphanumeric, whitespace, hyphens, parentheses, brackets
|
|
623
|
+
// NOTE: Periods (.) are NOT allowed, so we remove the extension
|
|
624
|
+
let filename = typeof contentItem.name === "string" && contentItem.name
|
|
625
|
+
? path.basename(contentItem.name)
|
|
626
|
+
: "document-pdf";
|
|
627
|
+
// Remove file extension
|
|
628
|
+
filename = filename.replace(/\.[^.]+$/, "");
|
|
629
|
+
// Replace all disallowed characters with hyphens
|
|
630
|
+
// Bedrock constraint: only alphanumeric, whitespace, hyphens, parentheses, brackets allowed
|
|
631
|
+
filename = filename.replace(/[^a-zA-Z0-9\s\-()[\]]/g, "-");
|
|
632
|
+
// Clean up: remove multiple consecutive hyphens and trim
|
|
633
|
+
filename = filename
|
|
634
|
+
.replace(/-+/g, "-")
|
|
635
|
+
.trim()
|
|
636
|
+
.replace(/^-+|-+$/g, "");
|
|
637
|
+
// Fallback if filename becomes empty after sanitization
|
|
638
|
+
filename = filename || "document";
|
|
588
639
|
bedrockMessage.content.push({
|
|
589
640
|
document: {
|
|
590
641
|
format: "pdf",
|
|
591
|
-
name:
|
|
592
|
-
? contentItem.name
|
|
593
|
-
: "document.pdf",
|
|
642
|
+
name: filename,
|
|
594
643
|
source: {
|
|
595
644
|
bytes: docData,
|
|
596
645
|
},
|
|
@@ -7,8 +7,6 @@ import { createTimeoutController, TimeoutError } from "../utils/timeout.js";
|
|
|
7
7
|
import { AuthenticationError, NetworkError, ProviderError, RateLimitError, } from "../types/errors.js";
|
|
8
8
|
import { DEFAULT_MAX_STEPS } from "../core/constants.js";
|
|
9
9
|
import { validateApiKey, createAnthropicConfig, getProviderModel, } from "../utils/providerConfig.js";
|
|
10
|
-
import { buildMessagesArray, buildMultimodalMessagesArray, convertToCoreMessages, } from "../utils/messageBuilder.js";
|
|
11
|
-
import { buildMultimodalOptions } from "../utils/multimodalOptionsBuilder.js";
|
|
12
10
|
import { createProxyFetch } from "../proxy/proxyFetch.js";
|
|
13
11
|
// Configuration helpers - now using consolidated utility
|
|
14
12
|
const getAnthropicApiKey = () => {
|
|
@@ -94,34 +92,8 @@ export class AnthropicProvider extends BaseProvider {
|
|
|
94
92
|
const shouldUseTools = !options.disableTools && this.supportsTools();
|
|
95
93
|
const tools = shouldUseTools ? await this.getAllTools() : {};
|
|
96
94
|
// Build message array from options with multimodal support
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
options.input?.files?.length ||
|
|
100
|
-
options.input?.csvFiles?.length ||
|
|
101
|
-
options.input?.pdfFiles?.length);
|
|
102
|
-
let messages;
|
|
103
|
-
if (hasMultimodalInput) {
|
|
104
|
-
logger.debug(`Anthropic: Detected multimodal input, using multimodal message builder`, {
|
|
105
|
-
hasImages: !!options.input?.images?.length,
|
|
106
|
-
imageCount: options.input?.images?.length || 0,
|
|
107
|
-
hasContent: !!options.input?.content?.length,
|
|
108
|
-
contentCount: options.input?.content?.length || 0,
|
|
109
|
-
hasFiles: !!options.input?.files?.length,
|
|
110
|
-
fileCount: options.input?.files?.length || 0,
|
|
111
|
-
hasCSVFiles: !!options.input?.csvFiles?.length,
|
|
112
|
-
csvFileCount: options.input?.csvFiles?.length || 0,
|
|
113
|
-
hasPDFFiles: !!options.input?.pdfFiles?.length,
|
|
114
|
-
pdfFileCount: options.input?.pdfFiles?.length || 0,
|
|
115
|
-
});
|
|
116
|
-
const multimodalOptions = buildMultimodalOptions(options, this.providerName, this.modelName);
|
|
117
|
-
const mm = await buildMultimodalMessagesArray(multimodalOptions, this.providerName, this.modelName);
|
|
118
|
-
// Convert multimodal messages to Vercel AI SDK format (CoreMessage[])
|
|
119
|
-
messages = convertToCoreMessages(mm);
|
|
120
|
-
}
|
|
121
|
-
else {
|
|
122
|
-
logger.debug(`Anthropic: Text-only input, using standard message builder`);
|
|
123
|
-
messages = await buildMessagesArray(options);
|
|
124
|
-
}
|
|
95
|
+
// Using protected helper from BaseProvider to eliminate code duplication
|
|
96
|
+
const messages = await this.buildMessagesForStream(options);
|
|
125
97
|
const model = await this.getAISDKModelWithMiddleware(options);
|
|
126
98
|
const result = await streamText({
|
|
127
99
|
model: model,
|
|
@@ -4,8 +4,6 @@ import { BaseProvider } from "../core/baseProvider.js";
|
|
|
4
4
|
import { AIProviderName, APIVersions } from "../constants/enums.js";
|
|
5
5
|
import { validateApiKey, createAzureAPIKeyConfig, createAzureEndpointConfig, } from "../utils/providerConfig.js";
|
|
6
6
|
import { logger } from "../utils/logger.js";
|
|
7
|
-
import { buildMessagesArray, buildMultimodalMessagesArray, convertToCoreMessages, } from "../utils/messageBuilder.js";
|
|
8
|
-
import { buildMultimodalOptions } from "../utils/multimodalOptionsBuilder.js";
|
|
9
7
|
import { createProxyFetch } from "../proxy/proxyFetch.js";
|
|
10
8
|
import { DEFAULT_MAX_STEPS } from "../core/constants.js";
|
|
11
9
|
export class AzureOpenAIProvider extends BaseProvider {
|
|
@@ -111,28 +109,8 @@ export class AzureOpenAIProvider extends BaseProvider {
|
|
|
111
109
|
});
|
|
112
110
|
}
|
|
113
111
|
// Build message array from options with multimodal support
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
options.input?.files?.length ||
|
|
117
|
-
options.input?.csvFiles?.length ||
|
|
118
|
-
options.input?.pdfFiles?.length);
|
|
119
|
-
let messages;
|
|
120
|
-
if (hasMultimodalInput) {
|
|
121
|
-
logger.debug(`Azure OpenAI: Detected multimodal input, using multimodal message builder`, {
|
|
122
|
-
hasImages: !!options.input?.images?.length,
|
|
123
|
-
imageCount: options.input?.images?.length || 0,
|
|
124
|
-
hasContent: !!options.input?.content?.length,
|
|
125
|
-
contentCount: options.input?.content?.length || 0,
|
|
126
|
-
});
|
|
127
|
-
const multimodalOptions = buildMultimodalOptions(options, this.providerName, this.modelName);
|
|
128
|
-
const mm = await buildMultimodalMessagesArray(multimodalOptions, this.providerName, this.modelName);
|
|
129
|
-
// Convert multimodal messages to Vercel AI SDK format (CoreMessage[])
|
|
130
|
-
messages = convertToCoreMessages(mm);
|
|
131
|
-
}
|
|
132
|
-
else {
|
|
133
|
-
logger.debug(`Azure OpenAI: Text-only input, using standard message builder`);
|
|
134
|
-
messages = await buildMessagesArray(options);
|
|
135
|
-
}
|
|
112
|
+
// Using protected helper from BaseProvider to eliminate code duplication
|
|
113
|
+
const messages = await this.buildMessagesForStream(options);
|
|
136
114
|
const model = await this.getAISDKModelWithMiddleware(options);
|
|
137
115
|
const stream = await streamText({
|
|
138
116
|
model,
|
|
@@ -7,8 +7,6 @@ import { createTimeoutController, TimeoutError } from "../utils/timeout.js";
|
|
|
7
7
|
import { AuthenticationError, NetworkError, ProviderError, RateLimitError, } from "../types/errors.js";
|
|
8
8
|
import { DEFAULT_MAX_STEPS } from "../core/constants.js";
|
|
9
9
|
import { streamAnalyticsCollector } from "../core/streamAnalytics.js";
|
|
10
|
-
import { buildMessagesArray, buildMultimodalMessagesArray, convertToCoreMessages, } from "../utils/messageBuilder.js";
|
|
11
|
-
import { buildMultimodalOptions } from "../utils/multimodalOptionsBuilder.js";
|
|
12
10
|
// Google AI Live API types now imported from ../types/providerSpecific.js
|
|
13
11
|
// Import proper types for multimodal message handling
|
|
14
12
|
// Create Google GenAI client
|
|
@@ -93,28 +91,8 @@ export class GoogleAIStudioProvider extends BaseProvider {
|
|
|
93
91
|
const shouldUseTools = !options.disableTools && this.supportsTools();
|
|
94
92
|
const tools = shouldUseTools ? await this.getAllTools() : {};
|
|
95
93
|
// Build message array from options with multimodal support
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
options.input?.files?.length ||
|
|
99
|
-
options.input?.csvFiles?.length ||
|
|
100
|
-
options.input?.pdfFiles?.length);
|
|
101
|
-
let messages;
|
|
102
|
-
if (hasMultimodalInput) {
|
|
103
|
-
logger.debug(`Google AI Studio: Detected multimodal input, using multimodal message builder`, {
|
|
104
|
-
hasImages: !!options.input?.images?.length,
|
|
105
|
-
imageCount: options.input?.images?.length || 0,
|
|
106
|
-
hasContent: !!options.input?.content?.length,
|
|
107
|
-
contentCount: options.input?.content?.length || 0,
|
|
108
|
-
});
|
|
109
|
-
const multimodalOptions = buildMultimodalOptions(options, this.providerName, this.modelName);
|
|
110
|
-
const mm = await buildMultimodalMessagesArray(multimodalOptions, this.providerName, this.modelName);
|
|
111
|
-
// Convert multimodal messages to Vercel AI SDK format (CoreMessage[])
|
|
112
|
-
messages = convertToCoreMessages(mm);
|
|
113
|
-
}
|
|
114
|
-
else {
|
|
115
|
-
logger.debug(`Google AI Studio: Text-only input, using standard message builder`);
|
|
116
|
-
messages = await buildMessagesArray(options);
|
|
117
|
-
}
|
|
94
|
+
// Using protected helper from BaseProvider to eliminate code duplication
|
|
95
|
+
const messages = await this.buildMessagesForStream(options);
|
|
118
96
|
const result = await streamText({
|
|
119
97
|
model,
|
|
120
98
|
messages: messages,
|
|
@@ -12,7 +12,6 @@ import fs from "fs";
|
|
|
12
12
|
import path from "path";
|
|
13
13
|
import os from "os";
|
|
14
14
|
import dns from "dns";
|
|
15
|
-
import { buildMessagesArray, buildMultimodalMessagesArray, convertToCoreMessages, } from "../utils/messageBuilder.js";
|
|
16
15
|
import { createProxyFetch } from "../proxy/proxyFetch.js";
|
|
17
16
|
// Import proper types for multimodal message handling
|
|
18
17
|
// Enhanced Anthropic support with direct imports
|
|
@@ -605,50 +604,8 @@ export class GoogleVertexProvider extends BaseProvider {
|
|
|
605
604
|
// Validate stream options
|
|
606
605
|
this.validateStreamOptionsOnly(options);
|
|
607
606
|
// Build message array from options with multimodal support
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
options.input?.files?.length ||
|
|
611
|
-
options.input?.csvFiles?.length ||
|
|
612
|
-
options.input?.pdfFiles?.length);
|
|
613
|
-
let messages;
|
|
614
|
-
if (hasMultimodalInput) {
|
|
615
|
-
logger.debug(`${functionTag}: Detected multimodal input, using multimodal message builder`, {
|
|
616
|
-
hasImages: !!options.input?.images?.length,
|
|
617
|
-
imageCount: options.input?.images?.length || 0,
|
|
618
|
-
hasContent: !!options.input?.content?.length,
|
|
619
|
-
contentCount: options.input?.content?.length || 0,
|
|
620
|
-
hasPDFs: !!options.input?.pdfFiles?.length,
|
|
621
|
-
pdfCount: options.input?.pdfFiles?.length || 0,
|
|
622
|
-
});
|
|
623
|
-
// Create multimodal options for buildMultimodalMessagesArray
|
|
624
|
-
const multimodalOptions = {
|
|
625
|
-
input: {
|
|
626
|
-
text: options.input?.text || "",
|
|
627
|
-
images: options.input?.images,
|
|
628
|
-
content: options.input?.content,
|
|
629
|
-
files: options.input?.files,
|
|
630
|
-
csvFiles: options.input?.csvFiles,
|
|
631
|
-
pdfFiles: options.input?.pdfFiles,
|
|
632
|
-
},
|
|
633
|
-
csvOptions: options.csvOptions,
|
|
634
|
-
systemPrompt: options.systemPrompt,
|
|
635
|
-
conversationHistory: options.conversationMessages,
|
|
636
|
-
provider: this.providerName,
|
|
637
|
-
model: this.modelName,
|
|
638
|
-
temperature: options.temperature,
|
|
639
|
-
maxTokens: options.maxTokens,
|
|
640
|
-
enableAnalytics: options.enableAnalytics,
|
|
641
|
-
enableEvaluation: options.enableEvaluation,
|
|
642
|
-
context: options.context,
|
|
643
|
-
};
|
|
644
|
-
const mm = await buildMultimodalMessagesArray(multimodalOptions, this.providerName, this.modelName);
|
|
645
|
-
// Convert multimodal messages to Vercel AI SDK format (CoreMessage[])
|
|
646
|
-
messages = convertToCoreMessages(mm);
|
|
647
|
-
}
|
|
648
|
-
else {
|
|
649
|
-
logger.debug(`${functionTag}: Text-only input, using standard message builder`);
|
|
650
|
-
messages = await buildMessagesArray(options);
|
|
651
|
-
}
|
|
607
|
+
// Using protected helper from BaseProvider to eliminate code duplication
|
|
608
|
+
const messages = await this.buildMessagesForStream(options);
|
|
652
609
|
const model = await this.getAISDKModelWithMiddleware(options); // This is where network connection happens!
|
|
653
610
|
// Get all available tools (direct + MCP + external) for streaming
|
|
654
611
|
const shouldUseTools = !options.disableTools && this.supportsTools();
|
|
@@ -6,8 +6,6 @@ import { logger } from "../utils/logger.js";
|
|
|
6
6
|
import { createTimeoutController, TimeoutError } from "../utils/timeout.js";
|
|
7
7
|
import { DEFAULT_MAX_STEPS } from "../core/constants.js";
|
|
8
8
|
import { validateApiKey, createHuggingFaceConfig, getProviderModel, } from "../utils/providerConfig.js";
|
|
9
|
-
import { buildMessagesArray, buildMultimodalMessagesArray, convertToCoreMessages, } from "../utils/messageBuilder.js";
|
|
10
|
-
import { buildMultimodalOptions } from "../utils/multimodalOptionsBuilder.js";
|
|
11
9
|
import { createProxyFetch } from "../proxy/proxyFetch.js";
|
|
12
10
|
// Configuration helpers - now using consolidated utility
|
|
13
11
|
const getHuggingFaceApiKey = () => {
|
|
@@ -116,35 +114,9 @@ export class HuggingFaceProvider extends BaseProvider {
|
|
|
116
114
|
try {
|
|
117
115
|
// Enhanced tool handling for HuggingFace models
|
|
118
116
|
const streamOptions = this.prepareStreamOptions(options, analysisSchema);
|
|
119
|
-
//
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
options.input?.files?.length ||
|
|
123
|
-
options.input?.csvFiles?.length ||
|
|
124
|
-
options.input?.pdfFiles?.length);
|
|
125
|
-
let messages;
|
|
126
|
-
if (hasMultimodalInput) {
|
|
127
|
-
logger.debug(`HuggingFace: Detected multimodal input, using multimodal message builder`, {
|
|
128
|
-
hasImages: !!options.input?.images?.length,
|
|
129
|
-
imageCount: options.input?.images?.length || 0,
|
|
130
|
-
hasContent: !!options.input?.content?.length,
|
|
131
|
-
contentCount: options.input?.content?.length || 0,
|
|
132
|
-
hasFiles: !!options.input?.files?.length,
|
|
133
|
-
fileCount: options.input?.files?.length || 0,
|
|
134
|
-
hasCSVFiles: !!options.input?.csvFiles?.length,
|
|
135
|
-
csvFileCount: options.input?.csvFiles?.length || 0,
|
|
136
|
-
hasPDFFiles: !!options.input?.pdfFiles?.length,
|
|
137
|
-
pdfFileCount: options.input?.pdfFiles?.length || 0,
|
|
138
|
-
});
|
|
139
|
-
const multimodalOptions = buildMultimodalOptions(options, this.providerName, this.modelName);
|
|
140
|
-
const mm = await buildMultimodalMessagesArray(multimodalOptions, this.providerName, this.modelName);
|
|
141
|
-
// Convert multimodal messages to Vercel AI SDK format (CoreMessage[])
|
|
142
|
-
messages = convertToCoreMessages(mm);
|
|
143
|
-
}
|
|
144
|
-
else {
|
|
145
|
-
logger.debug(`HuggingFace: Text-only input, using standard message builder`);
|
|
146
|
-
messages = await buildMessagesArray(options);
|
|
147
|
-
}
|
|
117
|
+
// Build message array from options with multimodal support
|
|
118
|
+
// Using protected helper from BaseProvider to eliminate code duplication
|
|
119
|
+
const messages = await this.buildMessagesForStream(options);
|
|
148
120
|
const result = await streamText({
|
|
149
121
|
model: this.model,
|
|
150
122
|
messages: messages,
|
|
@@ -28,7 +28,7 @@ export declare class LiteLLMProvider extends BaseProvider {
|
|
|
28
28
|
* Provider-specific streaming implementation
|
|
29
29
|
* Note: This is only used when tools are disabled
|
|
30
30
|
*/
|
|
31
|
-
protected executeStream(options: StreamOptions,
|
|
31
|
+
protected executeStream(options: StreamOptions, analysisSchema?: ZodType<unknown, ZodTypeDef, unknown> | Schema<unknown>): Promise<StreamResult>;
|
|
32
32
|
/**
|
|
33
33
|
* Get available models from LiteLLM proxy server
|
|
34
34
|
* Dynamically fetches from /v1/models endpoint with caching and fallback
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { createOpenAI } from "@ai-sdk/openai";
|
|
2
|
-
import { streamText } from "ai";
|
|
2
|
+
import { streamText, Output } from "ai";
|
|
3
3
|
import { AIProviderName } from "../constants/enums.js";
|
|
4
4
|
import { BaseProvider } from "../core/baseProvider.js";
|
|
5
5
|
import { logger } from "../utils/logger.js";
|
|
@@ -7,8 +7,6 @@ import { createTimeoutController, TimeoutError } from "../utils/timeout.js";
|
|
|
7
7
|
import { getProviderModel } from "../utils/providerConfig.js";
|
|
8
8
|
import { streamAnalyticsCollector } from "../core/streamAnalytics.js";
|
|
9
9
|
import { DEFAULT_MAX_STEPS } from "../core/constants.js";
|
|
10
|
-
import { buildMessagesArray, buildMultimodalMessagesArray, convertToCoreMessages, } from "../utils/messageBuilder.js";
|
|
11
|
-
import { buildMultimodalOptions } from "../utils/multimodalOptionsBuilder.js";
|
|
12
10
|
import { createProxyFetch } from "../proxy/proxyFetch.js";
|
|
13
11
|
// Configuration helpers
|
|
14
12
|
const getLiteLLMConfig = () => {
|
|
@@ -117,52 +115,69 @@ export class LiteLLMProvider extends BaseProvider {
|
|
|
117
115
|
* Provider-specific streaming implementation
|
|
118
116
|
* Note: This is only used when tools are disabled
|
|
119
117
|
*/
|
|
120
|
-
async executeStream(options,
|
|
118
|
+
async executeStream(options, analysisSchema) {
|
|
121
119
|
this.validateStreamOptions(options);
|
|
122
120
|
const startTime = Date.now();
|
|
121
|
+
let chunkCount = 0; // Track chunk count for debugging
|
|
123
122
|
const timeout = this.getTimeout(options);
|
|
124
123
|
const timeoutController = createTimeoutController(timeout, this.providerName, "stream");
|
|
125
124
|
try {
|
|
126
|
-
//
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
125
|
+
// Build message array from options with multimodal support
|
|
126
|
+
// Using protected helper from BaseProvider to eliminate code duplication
|
|
127
|
+
const messages = await this.buildMessagesForStream(options);
|
|
128
|
+
const model = await this.getAISDKModelWithMiddleware(options); // This is where network connection happens!
|
|
129
|
+
// Get all available tools (direct + MCP + external) for streaming - matching Vertex pattern
|
|
130
|
+
const shouldUseTools = !options.disableTools && this.supportsTools();
|
|
131
|
+
const tools = shouldUseTools ? await this.getAllTools() : {};
|
|
132
|
+
logger.debug(`LiteLLM: Tools for streaming`, {
|
|
133
|
+
shouldUseTools,
|
|
134
|
+
toolCount: Object.keys(tools).length,
|
|
135
|
+
toolNames: Object.keys(tools),
|
|
136
|
+
});
|
|
137
|
+
// Model-specific maxTokens handling - Gemini 2.5 models have issues with maxTokens
|
|
138
|
+
const modelName = this.modelName || getDefaultLiteLLMModel();
|
|
139
|
+
const isGemini25Model = modelName.includes("gemini-2.5") || modelName.includes("gemini/2.5");
|
|
140
|
+
const maxTokens = isGemini25Model ? undefined : options.maxTokens;
|
|
141
|
+
if (isGemini25Model && options.maxTokens) {
|
|
142
|
+
logger.debug(`LiteLLM: Skipping maxTokens for Gemini 2.5 model (known compatibility issue)`, {
|
|
143
|
+
modelName,
|
|
144
|
+
requestedMaxTokens: options.maxTokens,
|
|
145
145
|
});
|
|
146
|
-
const multimodalOptions = buildMultimodalOptions(options, this.providerName, this.modelName);
|
|
147
|
-
const mm = await buildMultimodalMessagesArray(multimodalOptions, this.providerName, this.modelName);
|
|
148
|
-
// Convert multimodal messages to Vercel AI SDK format (CoreMessage[])
|
|
149
|
-
messages = convertToCoreMessages(mm);
|
|
150
146
|
}
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
messages = await buildMessagesArray(options);
|
|
154
|
-
}
|
|
155
|
-
const model = await this.getAISDKModelWithMiddleware(options); // This is where network connection happens!
|
|
156
|
-
const result = streamText({
|
|
147
|
+
// Build complete stream options with proper typing - matching Vertex pattern
|
|
148
|
+
let streamOptions = {
|
|
157
149
|
model: model,
|
|
158
150
|
messages: messages,
|
|
159
151
|
temperature: options.temperature,
|
|
160
|
-
maxTokens
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
152
|
+
...(maxTokens && { maxTokens }), // Conditionally include maxTokens
|
|
153
|
+
...(shouldUseTools &&
|
|
154
|
+
Object.keys(tools).length > 0 && {
|
|
155
|
+
tools,
|
|
156
|
+
toolChoice: "auto",
|
|
157
|
+
maxSteps: options.maxSteps || DEFAULT_MAX_STEPS,
|
|
158
|
+
}),
|
|
164
159
|
abortSignal: timeoutController?.controller.signal,
|
|
160
|
+
onError: (event) => {
|
|
161
|
+
const error = event.error;
|
|
162
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
163
|
+
logger.error(`LiteLLM: Stream error`, {
|
|
164
|
+
provider: this.providerName,
|
|
165
|
+
modelName: this.modelName,
|
|
166
|
+
error: errorMessage,
|
|
167
|
+
chunkCount,
|
|
168
|
+
});
|
|
169
|
+
},
|
|
170
|
+
onFinish: (event) => {
|
|
171
|
+
logger.debug(`LiteLLM: Stream finished`, {
|
|
172
|
+
finishReason: event.finishReason,
|
|
173
|
+
totalChunks: chunkCount,
|
|
174
|
+
});
|
|
175
|
+
},
|
|
176
|
+
onChunk: () => {
|
|
177
|
+
chunkCount++;
|
|
178
|
+
},
|
|
165
179
|
onStepFinish: ({ toolCalls, toolResults }) => {
|
|
180
|
+
logger.info("Tool execution completed", { toolResults, toolCalls });
|
|
166
181
|
this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
|
|
167
182
|
logger.warn("LiteLLMProvider] Failed to store tool executions", {
|
|
168
183
|
provider: this.providerName,
|
|
@@ -170,21 +185,72 @@ export class LiteLLMProvider extends BaseProvider {
|
|
|
170
185
|
});
|
|
171
186
|
});
|
|
172
187
|
},
|
|
173
|
-
}
|
|
188
|
+
};
|
|
189
|
+
// Add analysisSchema support if provided
|
|
190
|
+
if (analysisSchema) {
|
|
191
|
+
try {
|
|
192
|
+
streamOptions = {
|
|
193
|
+
...streamOptions,
|
|
194
|
+
experimental_output: Output.object({
|
|
195
|
+
schema: analysisSchema,
|
|
196
|
+
}),
|
|
197
|
+
};
|
|
198
|
+
}
|
|
199
|
+
catch (error) {
|
|
200
|
+
logger.warn("Schema application failed, continuing without schema", {
|
|
201
|
+
error: String(error),
|
|
202
|
+
});
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
const result = await streamText(streamOptions);
|
|
174
206
|
timeoutController?.cleanup();
|
|
175
|
-
// Transform stream to
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
207
|
+
// Transform stream to content object stream using fullStream (handles both text and tool calls)
|
|
208
|
+
// Note: fullStream includes tool results, textStream only has text
|
|
209
|
+
const transformedStream = (async function* () {
|
|
210
|
+
// Try fullStream first (handles both text and tool calls), fallback to textStream
|
|
211
|
+
const streamToUse = result.fullStream || result.textStream;
|
|
212
|
+
for await (const chunk of streamToUse) {
|
|
213
|
+
// Handle different chunk types from fullStream
|
|
214
|
+
if (chunk && typeof chunk === "object") {
|
|
215
|
+
// Check for error chunks first (critical error handling)
|
|
216
|
+
if ("type" in chunk && chunk.type === "error") {
|
|
217
|
+
const errorChunk = chunk;
|
|
218
|
+
logger.error(`LiteLLM: Error chunk received:`, {
|
|
219
|
+
errorType: errorChunk.type,
|
|
220
|
+
errorDetails: errorChunk.error,
|
|
221
|
+
});
|
|
222
|
+
throw new Error(`LiteLLM streaming error: ${errorChunk.error?.message ||
|
|
223
|
+
"Unknown error"}`);
|
|
224
|
+
}
|
|
225
|
+
if ("textDelta" in chunk) {
|
|
226
|
+
// Text delta from fullStream
|
|
227
|
+
const textDelta = chunk.textDelta;
|
|
228
|
+
if (textDelta) {
|
|
229
|
+
yield { content: textDelta };
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
else if (chunk.type === "tool-call-streaming-start") {
|
|
233
|
+
// Tool call streaming start event - log for debugging
|
|
234
|
+
const toolCall = chunk;
|
|
235
|
+
logger.debug("LiteLLM: Tool call streaming start", {
|
|
236
|
+
toolCallId: toolCall.toolCallId,
|
|
237
|
+
toolName: toolCall.toolName,
|
|
238
|
+
});
|
|
239
|
+
}
|
|
240
|
+
}
|
|
241
|
+
else if (typeof chunk === "string") {
|
|
242
|
+
// Direct string chunk from textStream fallback
|
|
243
|
+
yield { content: chunk };
|
|
244
|
+
}
|
|
179
245
|
}
|
|
180
|
-
};
|
|
246
|
+
})();
|
|
181
247
|
// Create analytics promise that resolves after stream completion
|
|
182
248
|
const analyticsPromise = streamAnalyticsCollector.createAnalytics(this.providerName, this.modelName, result, Date.now() - startTime, {
|
|
183
249
|
requestId: `litellm-stream-${Date.now()}`,
|
|
184
250
|
streamingMode: true,
|
|
185
251
|
});
|
|
186
252
|
return {
|
|
187
|
-
stream: transformedStream
|
|
253
|
+
stream: transformedStream,
|
|
188
254
|
provider: this.providerName,
|
|
189
255
|
model: this.modelName,
|
|
190
256
|
analytics: analyticsPromise,
|
|
@@ -7,15 +7,14 @@ import { createTimeoutController, TimeoutError } from "../utils/timeout.js";
|
|
|
7
7
|
import { DEFAULT_MAX_STEPS } from "../core/constants.js";
|
|
8
8
|
import { validateApiKey, createMistralConfig, getProviderModel, } from "../utils/providerConfig.js";
|
|
9
9
|
import { streamAnalyticsCollector } from "../core/streamAnalytics.js";
|
|
10
|
-
import { buildMessagesArray, buildMultimodalMessagesArray, convertToCoreMessages, } from "../utils/messageBuilder.js";
|
|
11
|
-
import { buildMultimodalOptions } from "../utils/multimodalOptionsBuilder.js";
|
|
12
10
|
import { createProxyFetch } from "../proxy/proxyFetch.js";
|
|
13
11
|
// Configuration helpers - now using consolidated utility
|
|
14
12
|
const getMistralApiKey = () => {
|
|
15
13
|
return validateApiKey(createMistralConfig());
|
|
16
14
|
};
|
|
17
15
|
const getDefaultMistralModel = () => {
|
|
18
|
-
|
|
16
|
+
// Default to vision-capable Mistral Small 2506 (June 2025) with multimodal support
|
|
17
|
+
return getProviderModel("MISTRAL_MODEL", "mistral-small-2506");
|
|
19
18
|
};
|
|
20
19
|
/**
|
|
21
20
|
* Mistral AI Provider v2 - BaseProvider Implementation
|
|
@@ -51,35 +50,9 @@ export class MistralProvider extends BaseProvider {
|
|
|
51
50
|
// Get tools consistently with generate method
|
|
52
51
|
const shouldUseTools = !options.disableTools && this.supportsTools();
|
|
53
52
|
const tools = shouldUseTools ? await this.getAllTools() : {};
|
|
54
|
-
//
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
options.input?.files?.length ||
|
|
58
|
-
options.input?.csvFiles?.length ||
|
|
59
|
-
options.input?.pdfFiles?.length);
|
|
60
|
-
let messages;
|
|
61
|
-
if (hasMultimodalInput) {
|
|
62
|
-
logger.debug(`Mistral: Detected multimodal input, using multimodal message builder`, {
|
|
63
|
-
hasImages: !!options.input?.images?.length,
|
|
64
|
-
imageCount: options.input?.images?.length || 0,
|
|
65
|
-
hasContent: !!options.input?.content?.length,
|
|
66
|
-
contentCount: options.input?.content?.length || 0,
|
|
67
|
-
hasFiles: !!options.input?.files?.length,
|
|
68
|
-
fileCount: options.input?.files?.length || 0,
|
|
69
|
-
hasCSVFiles: !!options.input?.csvFiles?.length,
|
|
70
|
-
csvFileCount: options.input?.csvFiles?.length || 0,
|
|
71
|
-
hasPDFFiles: !!options.input?.pdfFiles?.length,
|
|
72
|
-
pdfFileCount: options.input?.pdfFiles?.length || 0,
|
|
73
|
-
});
|
|
74
|
-
const multimodalOptions = buildMultimodalOptions(options, this.providerName, this.modelName);
|
|
75
|
-
const mm = await buildMultimodalMessagesArray(multimodalOptions, this.providerName, this.modelName);
|
|
76
|
-
// Convert multimodal messages to Vercel AI SDK format (CoreMessage[])
|
|
77
|
-
messages = convertToCoreMessages(mm);
|
|
78
|
-
}
|
|
79
|
-
else {
|
|
80
|
-
logger.debug(`Mistral: Text-only input, using standard message builder`);
|
|
81
|
-
messages = await buildMessagesArray(options);
|
|
82
|
-
}
|
|
53
|
+
// Build message array from options with multimodal support
|
|
54
|
+
// Using protected helper from BaseProvider to eliminate code duplication
|
|
55
|
+
const messages = await this.buildMessagesForStream(options);
|
|
83
56
|
const model = await this.getAISDKModelWithMiddleware(options); // This is where network connection happens!
|
|
84
57
|
const result = await streamText({
|
|
85
58
|
model,
|
|
@@ -114,6 +114,7 @@ export declare class OllamaProvider extends BaseProvider {
|
|
|
114
114
|
* Create stream generator for Ollama generate API (non-tool mode)
|
|
115
115
|
*/
|
|
116
116
|
private createOllamaStream;
|
|
117
|
+
private createOpenAIStream;
|
|
117
118
|
protected handleProviderError(error: unknown): Error;
|
|
118
119
|
/**
|
|
119
120
|
* Check if Ollama service is healthy and accessible
|