@juspay/neurolink 7.48.1 → 7.50.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +19 -0
- package/README.md +215 -16
- package/dist/agent/directTools.d.ts +55 -0
- package/dist/agent/directTools.js +266 -0
- package/dist/cli/factories/commandFactory.d.ts +6 -0
- package/dist/cli/factories/commandFactory.js +149 -16
- package/dist/cli/index.js +13 -2
- package/dist/cli/loop/conversationSelector.d.ts +45 -0
- package/dist/cli/loop/conversationSelector.js +222 -0
- package/dist/cli/loop/optionsSchema.d.ts +1 -1
- package/dist/cli/loop/session.d.ts +36 -8
- package/dist/cli/loop/session.js +257 -61
- package/dist/core/baseProvider.d.ts +9 -0
- package/dist/core/baseProvider.js +45 -5
- package/dist/core/evaluation.js +5 -2
- package/dist/factories/providerRegistry.js +2 -2
- package/dist/index.d.ts +8 -2
- package/dist/index.js +11 -10
- package/dist/lib/agent/directTools.d.ts +55 -0
- package/dist/lib/agent/directTools.js +266 -0
- package/dist/lib/core/baseProvider.d.ts +9 -0
- package/dist/lib/core/baseProvider.js +45 -5
- package/dist/lib/core/evaluation.js +5 -2
- package/dist/lib/factories/providerRegistry.js +2 -2
- package/dist/lib/index.d.ts +8 -2
- package/dist/lib/index.js +11 -10
- package/dist/lib/mcp/factory.d.ts +2 -157
- package/dist/lib/mcp/flexibleToolValidator.d.ts +1 -5
- package/dist/lib/mcp/index.d.ts +3 -2
- package/dist/lib/mcp/mcpCircuitBreaker.d.ts +1 -75
- package/dist/lib/mcp/mcpClientFactory.d.ts +1 -20
- package/dist/lib/mcp/mcpClientFactory.js +1 -0
- package/dist/lib/mcp/registry.d.ts +3 -10
- package/dist/lib/mcp/servers/agent/directToolsServer.d.ts +1 -1
- package/dist/lib/mcp/servers/aiProviders/aiCoreServer.d.ts +1 -1
- package/dist/lib/mcp/servers/utilities/utilityServer.d.ts +1 -1
- package/dist/lib/mcp/toolDiscoveryService.d.ts +3 -84
- package/dist/lib/mcp/toolRegistry.d.ts +2 -24
- package/dist/lib/middleware/builtin/guardrails.d.ts +5 -16
- package/dist/lib/middleware/builtin/guardrails.js +44 -39
- package/dist/lib/middleware/utils/guardrailsUtils.d.ts +64 -0
- package/dist/lib/middleware/utils/guardrailsUtils.js +387 -0
- package/dist/lib/neurolink.d.ts +36 -7
- package/dist/lib/neurolink.js +141 -0
- package/dist/lib/providers/anthropic.js +47 -3
- package/dist/lib/providers/azureOpenai.js +9 -2
- package/dist/lib/providers/googleAiStudio.js +9 -2
- package/dist/lib/providers/googleVertex.js +12 -2
- package/dist/lib/providers/huggingFace.js +1 -1
- package/dist/lib/providers/litellm.js +1 -1
- package/dist/lib/providers/mistral.js +1 -1
- package/dist/lib/providers/openAI.js +47 -3
- package/dist/lib/services/server/ai/observability/instrumentation.d.ts +57 -0
- package/dist/lib/services/server/ai/observability/instrumentation.js +170 -0
- package/dist/lib/session/globalSessionState.d.ts +26 -0
- package/dist/lib/session/globalSessionState.js +86 -1
- package/dist/lib/telemetry/index.d.ts +1 -0
- package/dist/lib/telemetry/telemetryService.d.ts +2 -0
- package/dist/lib/telemetry/telemetryService.js +7 -7
- package/dist/lib/types/cli.d.ts +28 -0
- package/dist/lib/types/content.d.ts +18 -5
- package/dist/lib/types/contextTypes.d.ts +1 -1
- package/dist/lib/types/conversation.d.ts +57 -4
- package/dist/lib/types/fileTypes.d.ts +65 -0
- package/dist/lib/types/fileTypes.js +4 -0
- package/dist/lib/types/generateTypes.d.ts +12 -0
- package/dist/lib/types/guardrails.d.ts +103 -0
- package/dist/lib/types/guardrails.js +1 -0
- package/dist/lib/types/index.d.ts +4 -2
- package/dist/lib/types/index.js +4 -0
- package/dist/lib/types/mcpTypes.d.ts +407 -14
- package/dist/lib/types/modelTypes.d.ts +6 -6
- package/dist/lib/types/observability.d.ts +49 -0
- package/dist/lib/types/observability.js +6 -0
- package/dist/lib/types/streamTypes.d.ts +7 -0
- package/dist/lib/types/tools.d.ts +132 -35
- package/dist/lib/utils/csvProcessor.d.ts +68 -0
- package/dist/lib/utils/csvProcessor.js +277 -0
- package/dist/lib/utils/fileDetector.d.ts +57 -0
- package/dist/lib/utils/fileDetector.js +457 -0
- package/dist/lib/utils/imageProcessor.d.ts +10 -0
- package/dist/lib/utils/imageProcessor.js +22 -0
- package/dist/lib/utils/loopUtils.d.ts +71 -0
- package/dist/lib/utils/loopUtils.js +262 -0
- package/dist/lib/utils/messageBuilder.d.ts +2 -1
- package/dist/lib/utils/messageBuilder.js +197 -2
- package/dist/lib/utils/optionsUtils.d.ts +1 -1
- package/dist/mcp/factory.d.ts +2 -157
- package/dist/mcp/flexibleToolValidator.d.ts +1 -5
- package/dist/mcp/index.d.ts +3 -2
- package/dist/mcp/mcpCircuitBreaker.d.ts +1 -75
- package/dist/mcp/mcpClientFactory.d.ts +1 -20
- package/dist/mcp/mcpClientFactory.js +1 -0
- package/dist/mcp/registry.d.ts +3 -10
- package/dist/mcp/servers/agent/directToolsServer.d.ts +1 -1
- package/dist/mcp/servers/aiProviders/aiCoreServer.d.ts +1 -1
- package/dist/mcp/servers/utilities/utilityServer.d.ts +1 -1
- package/dist/mcp/toolDiscoveryService.d.ts +3 -84
- package/dist/mcp/toolRegistry.d.ts +2 -24
- package/dist/middleware/builtin/guardrails.d.ts +5 -16
- package/dist/middleware/builtin/guardrails.js +44 -39
- package/dist/middleware/utils/guardrailsUtils.d.ts +64 -0
- package/dist/middleware/utils/guardrailsUtils.js +387 -0
- package/dist/neurolink.d.ts +36 -7
- package/dist/neurolink.js +141 -0
- package/dist/providers/anthropic.js +47 -3
- package/dist/providers/azureOpenai.js +9 -2
- package/dist/providers/googleAiStudio.js +9 -2
- package/dist/providers/googleVertex.js +12 -2
- package/dist/providers/huggingFace.js +1 -1
- package/dist/providers/litellm.js +1 -1
- package/dist/providers/mistral.js +1 -1
- package/dist/providers/openAI.js +47 -3
- package/dist/services/server/ai/observability/instrumentation.d.ts +57 -0
- package/dist/services/server/ai/observability/instrumentation.js +170 -0
- package/dist/session/globalSessionState.d.ts +26 -0
- package/dist/session/globalSessionState.js +86 -1
- package/dist/telemetry/index.d.ts +1 -0
- package/dist/telemetry/telemetryService.d.ts +2 -0
- package/dist/telemetry/telemetryService.js +7 -7
- package/dist/types/cli.d.ts +28 -0
- package/dist/types/content.d.ts +18 -5
- package/dist/types/contextTypes.d.ts +1 -1
- package/dist/types/conversation.d.ts +57 -4
- package/dist/types/fileTypes.d.ts +65 -0
- package/dist/types/fileTypes.js +4 -0
- package/dist/types/generateTypes.d.ts +12 -0
- package/dist/types/guardrails.d.ts +103 -0
- package/dist/types/guardrails.js +1 -0
- package/dist/types/index.d.ts +4 -2
- package/dist/types/index.js +4 -0
- package/dist/types/mcpTypes.d.ts +407 -14
- package/dist/types/modelTypes.d.ts +6 -6
- package/dist/types/observability.d.ts +49 -0
- package/dist/types/observability.js +6 -0
- package/dist/types/streamTypes.d.ts +7 -0
- package/dist/types/tools.d.ts +132 -35
- package/dist/utils/csvProcessor.d.ts +68 -0
- package/dist/utils/csvProcessor.js +277 -0
- package/dist/utils/fileDetector.d.ts +57 -0
- package/dist/utils/fileDetector.js +457 -0
- package/dist/utils/imageProcessor.d.ts +10 -0
- package/dist/utils/imageProcessor.js +22 -0
- package/dist/utils/loopUtils.d.ts +71 -0
- package/dist/utils/loopUtils.js +262 -0
- package/dist/utils/messageBuilder.d.ts +2 -1
- package/dist/utils/messageBuilder.js +197 -2
- package/dist/utils/optionsUtils.d.ts +1 -1
- package/package.json +18 -16
- package/dist/lib/mcp/contracts/mcpContract.d.ts +0 -106
- package/dist/lib/mcp/contracts/mcpContract.js +0 -5
- package/dist/mcp/contracts/mcpContract.d.ts +0 -106
- package/dist/mcp/contracts/mcpContract.js +0 -5
package/dist/lib/neurolink.js
CHANGED
|
@@ -39,6 +39,7 @@ import { directToolsServer } from "./mcp/servers/agent/directToolsServer.js";
|
|
|
39
39
|
// Import orchestration components
|
|
40
40
|
import { ModelRouter } from "./utils/modelRouter.js";
|
|
41
41
|
import { BinaryTaskClassifier } from "./utils/taskClassifier.js";
|
|
42
|
+
import { initializeOpenTelemetry, shutdownOpenTelemetry, flushOpenTelemetry, getLangfuseHealthStatus, } from "./services/server/ai/observability/instrumentation.js";
|
|
42
43
|
import { isNonNullObject } from "./utils/typeUtils.js";
|
|
43
44
|
import { isZodSchema } from "./utils/schemaConversion.js";
|
|
44
45
|
// Core types imported from "./types/index.js"
|
|
@@ -178,8 +179,10 @@ export class NeuroLink {
|
|
|
178
179
|
* @throws {Error} When external server manager initialization fails
|
|
179
180
|
* @throws {Error} When HITL configuration is invalid (if enabled)
|
|
180
181
|
*/
|
|
182
|
+
observabilityConfig;
|
|
181
183
|
constructor(config) {
|
|
182
184
|
this.toolRegistry = config?.toolRegistry || new MCPToolRegistry();
|
|
185
|
+
this.observabilityConfig = config?.observability;
|
|
183
186
|
// Initialize orchestration setting
|
|
184
187
|
this.enableOrchestration = config?.enableOrchestration ?? false;
|
|
185
188
|
// Read tool cache duration from environment variables, with a default
|
|
@@ -194,6 +197,7 @@ export class NeuroLink {
|
|
|
194
197
|
this.initializeConversationMemory(config, constructorId, constructorStartTime, constructorHrTimeStart);
|
|
195
198
|
this.initializeExternalServerManager(constructorId, constructorStartTime, constructorHrTimeStart);
|
|
196
199
|
this.initializeHITL(config, constructorId, constructorStartTime, constructorHrTimeStart);
|
|
200
|
+
this.initializeLangfuse(constructorId, constructorStartTime, constructorHrTimeStart);
|
|
197
201
|
this.logConstructorComplete(constructorId, constructorStartTime, constructorHrTimeStart);
|
|
198
202
|
}
|
|
199
203
|
/**
|
|
@@ -494,6 +498,81 @@ export class NeuroLink {
|
|
|
494
498
|
this.unregisterExternalMCPToolFromRegistry(event.toolName);
|
|
495
499
|
});
|
|
496
500
|
}
|
|
501
|
+
/**
|
|
502
|
+
* Initialize Langfuse observability for AI operations tracking
|
|
503
|
+
*/
|
|
504
|
+
initializeLangfuse(constructorId, constructorStartTime, constructorHrTimeStart) {
|
|
505
|
+
const langfuseInitStartTime = process.hrtime.bigint();
|
|
506
|
+
try {
|
|
507
|
+
const langfuseConfig = this.observabilityConfig?.langfuse;
|
|
508
|
+
if (langfuseConfig?.enabled) {
|
|
509
|
+
logger.debug(`[NeuroLink] 📊 LOG_POINT_C019_LANGFUSE_INIT_START`, {
|
|
510
|
+
logPoint: "C019_LANGFUSE_INIT_START",
|
|
511
|
+
constructorId,
|
|
512
|
+
timestamp: new Date().toISOString(),
|
|
513
|
+
elapsedMs: Date.now() - constructorStartTime,
|
|
514
|
+
elapsedNs: (process.hrtime.bigint() - constructorHrTimeStart).toString(),
|
|
515
|
+
langfuseInitStartTimeNs: langfuseInitStartTime.toString(),
|
|
516
|
+
message: "Starting Langfuse observability initialization",
|
|
517
|
+
});
|
|
518
|
+
// Initialize OpenTelemetry FIRST (required for Langfuse v4)
|
|
519
|
+
initializeOpenTelemetry(langfuseConfig);
|
|
520
|
+
const healthStatus = getLangfuseHealthStatus();
|
|
521
|
+
const langfuseInitDurationNs = process.hrtime.bigint() - langfuseInitStartTime;
|
|
522
|
+
if (healthStatus.initialized &&
|
|
523
|
+
healthStatus.hasProcessor &&
|
|
524
|
+
healthStatus.isHealthy) {
|
|
525
|
+
logger.debug(`[NeuroLink] ✅ LOG_POINT_C020_LANGFUSE_INIT_SUCCESS`, {
|
|
526
|
+
logPoint: "C020_LANGFUSE_INIT_SUCCESS",
|
|
527
|
+
constructorId,
|
|
528
|
+
timestamp: new Date().toISOString(),
|
|
529
|
+
elapsedMs: Date.now() - constructorStartTime,
|
|
530
|
+
elapsedNs: (process.hrtime.bigint() - constructorHrTimeStart).toString(),
|
|
531
|
+
langfuseInitDurationNs: langfuseInitDurationNs.toString(),
|
|
532
|
+
langfuseInitDurationMs: Number(langfuseInitDurationNs) / 1_000_000,
|
|
533
|
+
healthStatus,
|
|
534
|
+
message: "Langfuse observability initialized successfully",
|
|
535
|
+
});
|
|
536
|
+
}
|
|
537
|
+
else {
|
|
538
|
+
logger.warn(`[NeuroLink] ⚠️ LOG_POINT_C021_LANGFUSE_INIT_WARNING`, {
|
|
539
|
+
logPoint: "C021_LANGFUSE_INIT_WARNING",
|
|
540
|
+
constructorId,
|
|
541
|
+
timestamp: new Date().toISOString(),
|
|
542
|
+
elapsedMs: Date.now() - constructorStartTime,
|
|
543
|
+
elapsedNs: (process.hrtime.bigint() - constructorHrTimeStart).toString(),
|
|
544
|
+
langfuseInitDurationNs: langfuseInitDurationNs.toString(),
|
|
545
|
+
healthStatus,
|
|
546
|
+
message: "Langfuse initialized but not healthy",
|
|
547
|
+
});
|
|
548
|
+
}
|
|
549
|
+
}
|
|
550
|
+
else {
|
|
551
|
+
logger.debug(`[NeuroLink] 🚫 LOG_POINT_C022_LANGFUSE_DISABLED`, {
|
|
552
|
+
logPoint: "C022_LANGFUSE_DISABLED",
|
|
553
|
+
constructorId,
|
|
554
|
+
timestamp: new Date().toISOString(),
|
|
555
|
+
elapsedMs: Date.now() - constructorStartTime,
|
|
556
|
+
elapsedNs: (process.hrtime.bigint() - constructorHrTimeStart).toString(),
|
|
557
|
+
message: "Langfuse observability not enabled - skipping initialization",
|
|
558
|
+
});
|
|
559
|
+
}
|
|
560
|
+
}
|
|
561
|
+
catch (error) {
|
|
562
|
+
const langfuseInitErrorDurationNs = process.hrtime.bigint() - langfuseInitStartTime;
|
|
563
|
+
logger.error(`[NeuroLink] ❌ LOG_POINT_C023_LANGFUSE_INIT_ERROR`, {
|
|
564
|
+
logPoint: "C023_LANGFUSE_INIT_ERROR",
|
|
565
|
+
constructorId,
|
|
566
|
+
timestamp: new Date().toISOString(),
|
|
567
|
+
elapsedMs: Date.now() - constructorStartTime,
|
|
568
|
+
elapsedNs: (process.hrtime.bigint() - constructorHrTimeStart).toString(),
|
|
569
|
+
langfuseInitDurationNs: langfuseInitErrorDurationNs.toString(),
|
|
570
|
+
errorMessage: error instanceof Error ? error.message : String(error),
|
|
571
|
+
errorStack: error instanceof Error ? error.stack : undefined,
|
|
572
|
+
message: "Langfuse observability initialization failed",
|
|
573
|
+
});
|
|
574
|
+
}
|
|
575
|
+
}
|
|
497
576
|
/**
|
|
498
577
|
* Log constructor completion with final state summary
|
|
499
578
|
*/
|
|
@@ -992,6 +1071,68 @@ export class NeuroLink {
|
|
|
992
1071
|
* @throws {Error} When all providers fail to generate content
|
|
993
1072
|
* @throws {Error} When conversation memory operations fail (if enabled)
|
|
994
1073
|
*/
|
|
1074
|
+
/**
|
|
1075
|
+
* Get observability configuration
|
|
1076
|
+
*/
|
|
1077
|
+
getObservabilityConfig() {
|
|
1078
|
+
return this.observabilityConfig;
|
|
1079
|
+
}
|
|
1080
|
+
/**
|
|
1081
|
+
* Check if Langfuse telemetry is enabled
|
|
1082
|
+
* Centralized utility to avoid duplication across providers
|
|
1083
|
+
*/
|
|
1084
|
+
isTelemetryEnabled() {
|
|
1085
|
+
return this.observabilityConfig?.langfuse?.enabled || false;
|
|
1086
|
+
}
|
|
1087
|
+
/**
|
|
1088
|
+
* Public method to initialize Langfuse observability
|
|
1089
|
+
* This method can be called externally to ensure Langfuse is properly initialized
|
|
1090
|
+
*/
|
|
1091
|
+
async initializeLangfuseObservability() {
|
|
1092
|
+
try {
|
|
1093
|
+
const langfuseConfig = this.observabilityConfig?.langfuse;
|
|
1094
|
+
if (langfuseConfig?.enabled) {
|
|
1095
|
+
initializeOpenTelemetry(langfuseConfig);
|
|
1096
|
+
logger.debug("[NeuroLink] Langfuse observability initialized via public method");
|
|
1097
|
+
}
|
|
1098
|
+
else {
|
|
1099
|
+
logger.debug("[NeuroLink] Langfuse not enabled, skipping initialization");
|
|
1100
|
+
}
|
|
1101
|
+
}
|
|
1102
|
+
catch (error) {
|
|
1103
|
+
logger.warn("[NeuroLink] Failed to initialize Langfuse observability:", error);
|
|
1104
|
+
}
|
|
1105
|
+
}
|
|
1106
|
+
/**
|
|
1107
|
+
* Gracefully shutdown NeuroLink and all MCP connections
|
|
1108
|
+
*/
|
|
1109
|
+
async shutdown() {
|
|
1110
|
+
try {
|
|
1111
|
+
logger.debug("[NeuroLink] Starting graceful shutdown");
|
|
1112
|
+
try {
|
|
1113
|
+
await flushOpenTelemetry();
|
|
1114
|
+
await shutdownOpenTelemetry();
|
|
1115
|
+
logger.debug("[NeuroLink] OpenTelemetry shutdown completed");
|
|
1116
|
+
}
|
|
1117
|
+
catch (error) {
|
|
1118
|
+
logger.warn("[NeuroLink] OpenTelemetry shutdown failed:", error);
|
|
1119
|
+
}
|
|
1120
|
+
if (this.externalServerManager) {
|
|
1121
|
+
try {
|
|
1122
|
+
await this.externalServerManager.shutdown();
|
|
1123
|
+
logger.debug("[NeuroLink] MCP servers shutdown completed");
|
|
1124
|
+
}
|
|
1125
|
+
catch (error) {
|
|
1126
|
+
logger.warn("[NeuroLink] MCP servers shutdown failed:", error);
|
|
1127
|
+
}
|
|
1128
|
+
}
|
|
1129
|
+
logger.debug("[NeuroLink] Graceful shutdown completed");
|
|
1130
|
+
}
|
|
1131
|
+
catch (error) {
|
|
1132
|
+
logger.error("[NeuroLink] Shutdown failed:", error);
|
|
1133
|
+
throw error;
|
|
1134
|
+
}
|
|
1135
|
+
}
|
|
995
1136
|
async generate(optionsOrPrompt) {
|
|
996
1137
|
const originalPrompt = this._extractOriginalPrompt(optionsOrPrompt);
|
|
997
1138
|
// Convert string prompt to full options
|
|
@@ -7,7 +7,7 @@ import { createTimeoutController, TimeoutError } from "../utils/timeout.js";
|
|
|
7
7
|
import { AuthenticationError, NetworkError, ProviderError, RateLimitError, } from "../types/errors.js";
|
|
8
8
|
import { DEFAULT_MAX_STEPS } from "../core/constants.js";
|
|
9
9
|
import { validateApiKey, createAnthropicConfig, getProviderModel, } from "../utils/providerConfig.js";
|
|
10
|
-
import { buildMessagesArray } from "../utils/messageBuilder.js";
|
|
10
|
+
import { buildMessagesArray, buildMultimodalMessagesArray, convertToCoreMessages, } from "../utils/messageBuilder.js";
|
|
11
11
|
import { createProxyFetch } from "../proxy/proxyFetch.js";
|
|
12
12
|
// Configuration helpers - now using consolidated utility
|
|
13
13
|
const getAnthropicApiKey = () => {
|
|
@@ -92,8 +92,51 @@ export class AnthropicProvider extends BaseProvider {
|
|
|
92
92
|
// ✅ Get tools for streaming (same as generate method)
|
|
93
93
|
const shouldUseTools = !options.disableTools && this.supportsTools();
|
|
94
94
|
const tools = shouldUseTools ? await this.getAllTools() : {};
|
|
95
|
-
// Build message array from options
|
|
96
|
-
const
|
|
95
|
+
// Build message array from options with multimodal support
|
|
96
|
+
const hasMultimodalInput = !!(options.input?.images?.length ||
|
|
97
|
+
options.input?.content?.length ||
|
|
98
|
+
options.input?.files?.length ||
|
|
99
|
+
options.input?.csvFiles?.length);
|
|
100
|
+
let messages;
|
|
101
|
+
if (hasMultimodalInput) {
|
|
102
|
+
logger.debug(`Anthropic: Detected multimodal input, using multimodal message builder`, {
|
|
103
|
+
hasImages: !!options.input?.images?.length,
|
|
104
|
+
imageCount: options.input?.images?.length || 0,
|
|
105
|
+
hasContent: !!options.input?.content?.length,
|
|
106
|
+
contentCount: options.input?.content?.length || 0,
|
|
107
|
+
hasFiles: !!options.input?.files?.length,
|
|
108
|
+
fileCount: options.input?.files?.length || 0,
|
|
109
|
+
hasCSVFiles: !!options.input?.csvFiles?.length,
|
|
110
|
+
csvFileCount: options.input?.csvFiles?.length || 0,
|
|
111
|
+
});
|
|
112
|
+
// Create multimodal options for buildMultimodalMessagesArray
|
|
113
|
+
const multimodalOptions = {
|
|
114
|
+
input: {
|
|
115
|
+
text: options.input?.text || "",
|
|
116
|
+
images: options.input?.images,
|
|
117
|
+
content: options.input?.content,
|
|
118
|
+
files: options.input?.files,
|
|
119
|
+
csvFiles: options.input?.csvFiles,
|
|
120
|
+
},
|
|
121
|
+
csvOptions: options.csvOptions,
|
|
122
|
+
systemPrompt: options.systemPrompt,
|
|
123
|
+
conversationHistory: options.conversationMessages,
|
|
124
|
+
provider: this.providerName,
|
|
125
|
+
model: this.modelName,
|
|
126
|
+
temperature: options.temperature,
|
|
127
|
+
maxTokens: options.maxTokens,
|
|
128
|
+
enableAnalytics: options.enableAnalytics,
|
|
129
|
+
enableEvaluation: options.enableEvaluation,
|
|
130
|
+
context: options.context,
|
|
131
|
+
};
|
|
132
|
+
const mm = await buildMultimodalMessagesArray(multimodalOptions, this.providerName, this.modelName);
|
|
133
|
+
// Convert multimodal messages to Vercel AI SDK format (CoreMessage[])
|
|
134
|
+
messages = convertToCoreMessages(mm);
|
|
135
|
+
}
|
|
136
|
+
else {
|
|
137
|
+
logger.debug(`Anthropic: Text-only input, using standard message builder`);
|
|
138
|
+
messages = await buildMessagesArray(options);
|
|
139
|
+
}
|
|
97
140
|
const model = await this.getAISDKModelWithMiddleware(options);
|
|
98
141
|
const result = await streamText({
|
|
99
142
|
model: model,
|
|
@@ -104,6 +147,7 @@ export class AnthropicProvider extends BaseProvider {
|
|
|
104
147
|
maxSteps: options.maxSteps || DEFAULT_MAX_STEPS,
|
|
105
148
|
toolChoice: shouldUseTools ? "auto" : "none",
|
|
106
149
|
abortSignal: timeoutController?.controller.signal,
|
|
150
|
+
experimental_telemetry: this.getStreamTelemetryConfig(options),
|
|
107
151
|
onStepFinish: ({ toolCalls, toolResults }) => {
|
|
108
152
|
this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
|
|
109
153
|
logger.warn("[AnthropicProvider] Failed to store tool executions", {
|
|
@@ -110,7 +110,10 @@ export class AzureOpenAIProvider extends BaseProvider {
|
|
|
110
110
|
});
|
|
111
111
|
}
|
|
112
112
|
// Build message array from options with multimodal support
|
|
113
|
-
const hasMultimodalInput = !!(options.input?.images?.length ||
|
|
113
|
+
const hasMultimodalInput = !!(options.input?.images?.length ||
|
|
114
|
+
options.input?.content?.length ||
|
|
115
|
+
options.input?.files?.length ||
|
|
116
|
+
options.input?.csvFiles?.length);
|
|
114
117
|
let messages;
|
|
115
118
|
if (hasMultimodalInput) {
|
|
116
119
|
logger.debug(`Azure OpenAI: Detected multimodal input, using multimodal message builder`, {
|
|
@@ -125,7 +128,10 @@ export class AzureOpenAIProvider extends BaseProvider {
|
|
|
125
128
|
text: options.input?.text || "",
|
|
126
129
|
images: options.input?.images,
|
|
127
130
|
content: options.input?.content,
|
|
131
|
+
files: options.input?.files,
|
|
132
|
+
csvFiles: options.input?.csvFiles,
|
|
128
133
|
},
|
|
134
|
+
csvOptions: options.csvOptions,
|
|
129
135
|
systemPrompt: options.systemPrompt,
|
|
130
136
|
conversationHistory: options.conversationMessages,
|
|
131
137
|
provider: this.providerName,
|
|
@@ -142,7 +148,7 @@ export class AzureOpenAIProvider extends BaseProvider {
|
|
|
142
148
|
}
|
|
143
149
|
else {
|
|
144
150
|
logger.debug(`Azure OpenAI: Text-only input, using standard message builder`);
|
|
145
|
-
messages = buildMessagesArray(options);
|
|
151
|
+
messages = await buildMessagesArray(options);
|
|
146
152
|
}
|
|
147
153
|
const model = await this.getAISDKModelWithMiddleware(options);
|
|
148
154
|
const stream = await streamText({
|
|
@@ -156,6 +162,7 @@ export class AzureOpenAIProvider extends BaseProvider {
|
|
|
156
162
|
: {}),
|
|
157
163
|
tools,
|
|
158
164
|
toolChoice: shouldUseTools ? "auto" : "none",
|
|
165
|
+
experimental_telemetry: this.getStreamTelemetryConfig(options),
|
|
159
166
|
onStepFinish: ({ toolCalls, toolResults }) => {
|
|
160
167
|
this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
|
|
161
168
|
logger.warn("[AzureOpenaiProvider] Failed to store tool executions", {
|
|
@@ -92,7 +92,10 @@ export class GoogleAIStudioProvider extends BaseProvider {
|
|
|
92
92
|
const shouldUseTools = !options.disableTools && this.supportsTools();
|
|
93
93
|
const tools = shouldUseTools ? await this.getAllTools() : {};
|
|
94
94
|
// Build message array from options with multimodal support
|
|
95
|
-
const hasMultimodalInput = !!(options.input?.images?.length ||
|
|
95
|
+
const hasMultimodalInput = !!(options.input?.images?.length ||
|
|
96
|
+
options.input?.content?.length ||
|
|
97
|
+
options.input?.files?.length ||
|
|
98
|
+
options.input?.csvFiles?.length);
|
|
96
99
|
let messages;
|
|
97
100
|
if (hasMultimodalInput) {
|
|
98
101
|
logger.debug(`Google AI Studio: Detected multimodal input, using multimodal message builder`, {
|
|
@@ -107,7 +110,10 @@ export class GoogleAIStudioProvider extends BaseProvider {
|
|
|
107
110
|
text: options.input?.text || "",
|
|
108
111
|
images: options.input?.images,
|
|
109
112
|
content: options.input?.content,
|
|
113
|
+
files: options.input?.files,
|
|
114
|
+
csvFiles: options.input?.csvFiles,
|
|
110
115
|
},
|
|
116
|
+
csvOptions: options.csvOptions,
|
|
111
117
|
systemPrompt: options.systemPrompt,
|
|
112
118
|
conversationHistory: options.conversationMessages,
|
|
113
119
|
provider: this.providerName,
|
|
@@ -124,7 +130,7 @@ export class GoogleAIStudioProvider extends BaseProvider {
|
|
|
124
130
|
}
|
|
125
131
|
else {
|
|
126
132
|
logger.debug(`Google AI Studio: Text-only input, using standard message builder`);
|
|
127
|
-
messages = buildMessagesArray(options);
|
|
133
|
+
messages = await buildMessagesArray(options);
|
|
128
134
|
}
|
|
129
135
|
const result = await streamText({
|
|
130
136
|
model,
|
|
@@ -135,6 +141,7 @@ export class GoogleAIStudioProvider extends BaseProvider {
|
|
|
135
141
|
maxSteps: options.maxSteps || DEFAULT_MAX_STEPS,
|
|
136
142
|
toolChoice: shouldUseTools ? "auto" : "none",
|
|
137
143
|
abortSignal: timeoutController?.controller.signal,
|
|
144
|
+
experimental_telemetry: this.getStreamTelemetryConfig(options),
|
|
138
145
|
onStepFinish: ({ toolCalls, toolResults }) => {
|
|
139
146
|
this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
|
|
140
147
|
logger.warn("[GoogleAiStudioProvider] Failed to store tool executions", {
|
|
@@ -596,7 +596,10 @@ export class GoogleVertexProvider extends BaseProvider {
|
|
|
596
596
|
// Validate stream options
|
|
597
597
|
this.validateStreamOptionsOnly(options);
|
|
598
598
|
// Build message array from options with multimodal support
|
|
599
|
-
const hasMultimodalInput = !!(options.input?.images?.length ||
|
|
599
|
+
const hasMultimodalInput = !!(options.input?.images?.length ||
|
|
600
|
+
options.input?.content?.length ||
|
|
601
|
+
options.input?.files?.length ||
|
|
602
|
+
options.input?.csvFiles?.length);
|
|
600
603
|
let messages;
|
|
601
604
|
if (hasMultimodalInput) {
|
|
602
605
|
logger.debug(`${functionTag}: Detected multimodal input, using multimodal message builder`, {
|
|
@@ -611,7 +614,10 @@ export class GoogleVertexProvider extends BaseProvider {
|
|
|
611
614
|
text: options.input?.text || "",
|
|
612
615
|
images: options.input?.images,
|
|
613
616
|
content: options.input?.content,
|
|
617
|
+
files: options.input?.files,
|
|
618
|
+
csvFiles: options.input?.csvFiles,
|
|
614
619
|
},
|
|
620
|
+
csvOptions: options.csvOptions,
|
|
615
621
|
systemPrompt: options.systemPrompt,
|
|
616
622
|
conversationHistory: options.conversationMessages,
|
|
617
623
|
provider: this.providerName,
|
|
@@ -628,7 +634,7 @@ export class GoogleVertexProvider extends BaseProvider {
|
|
|
628
634
|
}
|
|
629
635
|
else {
|
|
630
636
|
logger.debug(`${functionTag}: Text-only input, using standard message builder`);
|
|
631
|
-
messages = buildMessagesArray(options);
|
|
637
|
+
messages = await buildMessagesArray(options);
|
|
632
638
|
}
|
|
633
639
|
const model = await this.getAISDKModelWithMiddleware(options); // This is where network connection happens!
|
|
634
640
|
// Get all available tools (direct + MCP + external) for streaming
|
|
@@ -660,6 +666,7 @@ export class GoogleVertexProvider extends BaseProvider {
|
|
|
660
666
|
maxSteps: options.maxSteps || DEFAULT_MAX_STEPS,
|
|
661
667
|
}),
|
|
662
668
|
abortSignal: timeoutController?.controller.signal,
|
|
669
|
+
experimental_telemetry: this.getStreamTelemetryConfig(options),
|
|
663
670
|
onError: (event) => {
|
|
664
671
|
const error = event.error;
|
|
665
672
|
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
@@ -937,6 +944,7 @@ export class GoogleVertexProvider extends BaseProvider {
|
|
|
937
944
|
modelName,
|
|
938
945
|
issue: modelValidation.issue,
|
|
939
946
|
recommendedModels: [
|
|
947
|
+
"claude-sonnet-4-5@20250929",
|
|
940
948
|
"claude-sonnet-4@20250514",
|
|
941
949
|
"claude-opus-4@20250514",
|
|
942
950
|
"claude-3-5-sonnet-20241022",
|
|
@@ -1169,6 +1177,7 @@ export class GoogleVertexProvider extends BaseProvider {
|
|
|
1169
1177
|
// Validate against known Claude model patterns
|
|
1170
1178
|
const validPatterns = [
|
|
1171
1179
|
/^claude-sonnet-4@\d{8}$/,
|
|
1180
|
+
/^claude-sonnet-4-5@\d{8}$/,
|
|
1172
1181
|
/^claude-opus-4@\d{8}$/,
|
|
1173
1182
|
/^claude-3-5-sonnet-\d{8}$/,
|
|
1174
1183
|
/^claude-3-5-haiku-\d{8}$/,
|
|
@@ -1390,6 +1399,7 @@ export class GoogleVertexProvider extends BaseProvider {
|
|
|
1390
1399
|
"gemini-1.5-flash",
|
|
1391
1400
|
],
|
|
1392
1401
|
claude: [
|
|
1402
|
+
"claude-sonnet-4-5@20250929",
|
|
1393
1403
|
"claude-sonnet-4@20250514",
|
|
1394
1404
|
"claude-opus-4@20250514",
|
|
1395
1405
|
"claude-3-5-sonnet-20241022",
|
|
@@ -114,7 +114,7 @@ export class HuggingFaceProvider extends BaseProvider {
|
|
|
114
114
|
// Enhanced tool handling for HuggingFace models
|
|
115
115
|
const streamOptions = this.prepareStreamOptions(options, analysisSchema);
|
|
116
116
|
// Build message array from options
|
|
117
|
-
const messages = buildMessagesArray(options);
|
|
117
|
+
const messages = await buildMessagesArray(options);
|
|
118
118
|
const result = await streamText({
|
|
119
119
|
model: this.model,
|
|
120
120
|
messages: messages,
|
|
@@ -121,7 +121,7 @@ export class LiteLLMProvider extends BaseProvider {
|
|
|
121
121
|
const timeoutController = createTimeoutController(timeout, this.providerName, "stream");
|
|
122
122
|
try {
|
|
123
123
|
// Build message array from options
|
|
124
|
-
const messages = buildMessagesArray(options);
|
|
124
|
+
const messages = await buildMessagesArray(options);
|
|
125
125
|
const model = await this.getAISDKModelWithMiddleware(options); // This is where network connection happens!
|
|
126
126
|
const result = streamText({
|
|
127
127
|
model: model,
|
|
@@ -49,7 +49,7 @@ export class MistralProvider extends BaseProvider {
|
|
|
49
49
|
// Get tools consistently with generate method
|
|
50
50
|
const shouldUseTools = !options.disableTools && this.supportsTools();
|
|
51
51
|
const tools = shouldUseTools ? await this.getAllTools() : {};
|
|
52
|
-
const messages = buildMessagesArray(options);
|
|
52
|
+
const messages = await buildMessagesArray(options);
|
|
53
53
|
const model = await this.getAISDKModelWithMiddleware(options); // This is where network connection happens!
|
|
54
54
|
const result = await streamText({
|
|
55
55
|
model,
|
|
@@ -8,7 +8,7 @@ import { AuthenticationError, InvalidModelError, NetworkError, ProviderError, Ra
|
|
|
8
8
|
import { DEFAULT_MAX_STEPS } from "../core/constants.js";
|
|
9
9
|
import { validateApiKey, createOpenAIConfig, getProviderModel, } from "../utils/providerConfig.js";
|
|
10
10
|
import { streamAnalyticsCollector } from "../core/streamAnalytics.js";
|
|
11
|
-
import { buildMessagesArray } from "../utils/messageBuilder.js";
|
|
11
|
+
import { buildMessagesArray, buildMultimodalMessagesArray, convertToCoreMessages, } from "../utils/messageBuilder.js";
|
|
12
12
|
import { createProxyFetch } from "../proxy/proxyFetch.js";
|
|
13
13
|
import { isZodSchema } from "../utils/schemaConversion.js";
|
|
14
14
|
// Configuration helpers - now using consolidated utility
|
|
@@ -244,8 +244,51 @@ export class OpenAIProvider extends BaseProvider {
|
|
|
244
244
|
toolNames: Object.keys(tools),
|
|
245
245
|
filteredOutTools: Object.keys(allTools).filter((name) => !tools[name]),
|
|
246
246
|
});
|
|
247
|
-
// Build message array from options
|
|
248
|
-
const
|
|
247
|
+
// Build message array from options with multimodal support
|
|
248
|
+
const hasMultimodalInput = !!(options.input?.images?.length ||
|
|
249
|
+
options.input?.content?.length ||
|
|
250
|
+
options.input?.files?.length ||
|
|
251
|
+
options.input?.csvFiles?.length);
|
|
252
|
+
let messages;
|
|
253
|
+
if (hasMultimodalInput) {
|
|
254
|
+
logger.debug(`OpenAI: Detected multimodal input, using multimodal message builder`, {
|
|
255
|
+
hasImages: !!options.input?.images?.length,
|
|
256
|
+
imageCount: options.input?.images?.length || 0,
|
|
257
|
+
hasContent: !!options.input?.content?.length,
|
|
258
|
+
contentCount: options.input?.content?.length || 0,
|
|
259
|
+
hasFiles: !!options.input?.files?.length,
|
|
260
|
+
fileCount: options.input?.files?.length || 0,
|
|
261
|
+
hasCSVFiles: !!options.input?.csvFiles?.length,
|
|
262
|
+
csvFileCount: options.input?.csvFiles?.length || 0,
|
|
263
|
+
});
|
|
264
|
+
// Create multimodal options for buildMultimodalMessagesArray
|
|
265
|
+
const multimodalOptions = {
|
|
266
|
+
input: {
|
|
267
|
+
text: options.input?.text || "",
|
|
268
|
+
images: options.input?.images,
|
|
269
|
+
content: options.input?.content,
|
|
270
|
+
files: options.input?.files,
|
|
271
|
+
csvFiles: options.input?.csvFiles,
|
|
272
|
+
},
|
|
273
|
+
csvOptions: options.csvOptions,
|
|
274
|
+
systemPrompt: options.systemPrompt,
|
|
275
|
+
conversationHistory: options.conversationMessages,
|
|
276
|
+
provider: this.providerName,
|
|
277
|
+
model: this.modelName,
|
|
278
|
+
temperature: options.temperature,
|
|
279
|
+
maxTokens: options.maxTokens,
|
|
280
|
+
enableAnalytics: options.enableAnalytics,
|
|
281
|
+
enableEvaluation: options.enableEvaluation,
|
|
282
|
+
context: options.context,
|
|
283
|
+
};
|
|
284
|
+
const mm = await buildMultimodalMessagesArray(multimodalOptions, this.providerName, this.modelName);
|
|
285
|
+
// Convert multimodal messages to Vercel AI SDK format (CoreMessage[])
|
|
286
|
+
messages = convertToCoreMessages(mm);
|
|
287
|
+
}
|
|
288
|
+
else {
|
|
289
|
+
logger.debug(`OpenAI: Text-only input, using standard message builder`);
|
|
290
|
+
messages = await buildMessagesArray(options);
|
|
291
|
+
}
|
|
249
292
|
// Debug the actual request being sent to OpenAI
|
|
250
293
|
logger.debug(`OpenAI: streamText request parameters:`, {
|
|
251
294
|
modelName: this.modelName,
|
|
@@ -273,6 +316,7 @@ export class OpenAIProvider extends BaseProvider {
|
|
|
273
316
|
maxSteps: options.maxSteps || DEFAULT_MAX_STEPS,
|
|
274
317
|
toolChoice: shouldUseTools && Object.keys(tools).length > 0 ? "auto" : "none",
|
|
275
318
|
abortSignal: timeoutController?.controller.signal,
|
|
319
|
+
experimental_telemetry: this.getStreamTelemetryConfig(options),
|
|
276
320
|
onStepFinish: ({ toolCalls, toolResults }) => {
|
|
277
321
|
logger.info("Tool execution completed", { toolResults, toolCalls });
|
|
278
322
|
// Handle tool execution storage
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenTelemetry Instrumentation for Langfuse v4
|
|
3
|
+
*
|
|
4
|
+
* Configures OpenTelemetry TracerProvider with LangfuseSpanProcessor to capture
|
|
5
|
+
* traces from Vercel AI SDK's experimental_telemetry feature.
|
|
6
|
+
*
|
|
7
|
+
* Flow: Vercel AI SDK → OpenTelemetry Spans → LangfuseSpanProcessor → Langfuse Platform
|
|
8
|
+
*/
|
|
9
|
+
import { NodeTracerProvider } from "@opentelemetry/sdk-trace-node";
|
|
10
|
+
import { LangfuseSpanProcessor } from "@langfuse/otel";
|
|
11
|
+
import type { LangfuseConfig } from "../../../../types/observability.js";
|
|
12
|
+
/**
|
|
13
|
+
* Initialize OpenTelemetry with Langfuse span processor
|
|
14
|
+
*
|
|
15
|
+
* This connects Vercel AI SDK's experimental_telemetry to Langfuse by:
|
|
16
|
+
* 1. Creating LangfuseSpanProcessor with Langfuse credentials
|
|
17
|
+
* 2. Creating a NodeTracerProvider with service metadata and span processor
|
|
18
|
+
* 3. Registering the provider globally for AI SDK to use
|
|
19
|
+
*
|
|
20
|
+
* @param config - Langfuse configuration passed from parent application
|
|
21
|
+
*/
|
|
22
|
+
export declare function initializeOpenTelemetry(config: LangfuseConfig): void;
|
|
23
|
+
/**
|
|
24
|
+
* Flush all pending spans to Langfuse
|
|
25
|
+
*/
|
|
26
|
+
export declare function flushOpenTelemetry(): Promise<void>;
|
|
27
|
+
/**
|
|
28
|
+
* Shutdown OpenTelemetry and Langfuse span processor
|
|
29
|
+
*/
|
|
30
|
+
export declare function shutdownOpenTelemetry(): Promise<void>;
|
|
31
|
+
/**
|
|
32
|
+
* Get the Langfuse span processor
|
|
33
|
+
*/
|
|
34
|
+
export declare function getLangfuseSpanProcessor(): LangfuseSpanProcessor | null;
|
|
35
|
+
/**
|
|
36
|
+
* Get the tracer provider
|
|
37
|
+
*/
|
|
38
|
+
export declare function getTracerProvider(): NodeTracerProvider | null;
|
|
39
|
+
/**
|
|
40
|
+
* Check if OpenTelemetry is initialized
|
|
41
|
+
*/
|
|
42
|
+
export declare function isOpenTelemetryInitialized(): boolean;
|
|
43
|
+
/**
|
|
44
|
+
* Get health status for Langfuse observability
|
|
45
|
+
*/
|
|
46
|
+
export declare function getLangfuseHealthStatus(): {
|
|
47
|
+
isHealthy: boolean | undefined;
|
|
48
|
+
initialized: boolean;
|
|
49
|
+
credentialsValid: boolean;
|
|
50
|
+
enabled: boolean;
|
|
51
|
+
hasProcessor: boolean;
|
|
52
|
+
config: {
|
|
53
|
+
baseUrl: string;
|
|
54
|
+
environment: string;
|
|
55
|
+
release: string;
|
|
56
|
+
} | undefined;
|
|
57
|
+
};
|