@juspay/neurolink 7.49.0 → 7.51.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (258) hide show
  1. package/CHANGELOG.md +13 -0
  2. package/README.md +12 -9
  3. package/dist/adapters/providerImageAdapter.js +82 -10
  4. package/dist/agent/directTools.d.ts +10 -10
  5. package/dist/agent/directTools.js +5 -3
  6. package/dist/cli/commands/config.js +1 -0
  7. package/dist/cli/commands/mcp.js +1 -0
  8. package/dist/cli/commands/models.js +1 -0
  9. package/dist/cli/commands/ollama.js +1 -0
  10. package/dist/cli/commands/setup-anthropic.js +1 -0
  11. package/dist/cli/commands/setup-azure.js +1 -0
  12. package/dist/cli/commands/setup-bedrock.js +1 -0
  13. package/dist/cli/commands/setup-gcp.js +1 -0
  14. package/dist/cli/commands/setup-google-ai.js +1 -0
  15. package/dist/cli/commands/setup-huggingface.js +1 -0
  16. package/dist/cli/commands/setup-mistral.js +1 -0
  17. package/dist/cli/commands/setup-openai.js +1 -0
  18. package/dist/cli/commands/setup.js +1 -0
  19. package/dist/cli/errorHandler.js +1 -0
  20. package/dist/cli/factories/commandFactory.d.ts +5 -0
  21. package/dist/cli/factories/commandFactory.js +42 -6
  22. package/dist/cli/factories/ollamaCommandFactory.js +1 -0
  23. package/dist/cli/factories/sagemakerCommandFactory.js +1 -0
  24. package/dist/cli/factories/setupCommandFactory.js +1 -0
  25. package/dist/cli/index.js +14 -2
  26. package/dist/cli/loop/conversationSelector.js +1 -0
  27. package/dist/cli/loop/optionsSchema.js +1 -0
  28. package/dist/cli/loop/session.js +1 -0
  29. package/dist/cli/parser.js +1 -0
  30. package/dist/cli/utils/completeSetup.js +1 -0
  31. package/dist/cli/utils/envManager.js +1 -0
  32. package/dist/cli/utils/interactiveSetup.js +1 -0
  33. package/dist/cli/utils/ollamaUtils.js +1 -0
  34. package/dist/constants/index.js +1 -1
  35. package/dist/core/baseProvider.d.ts +14 -0
  36. package/dist/core/baseProvider.js +106 -23
  37. package/dist/index.d.ts +11 -5
  38. package/dist/index.js +11 -10
  39. package/dist/lib/adapters/providerImageAdapter.js +83 -10
  40. package/dist/lib/agent/directTools.d.ts +10 -10
  41. package/dist/lib/agent/directTools.js +6 -3
  42. package/dist/lib/config/configManager.js +1 -0
  43. package/dist/lib/config/conversationMemory.js +1 -0
  44. package/dist/lib/config/taskClassificationConfig.js +1 -0
  45. package/dist/lib/constants/index.js +2 -1
  46. package/dist/lib/constants/performance.js +1 -0
  47. package/dist/lib/constants/retry.js +1 -0
  48. package/dist/lib/constants/timeouts.js +1 -0
  49. package/dist/lib/constants/tokens.js +1 -0
  50. package/dist/lib/core/analytics.js +1 -0
  51. package/dist/lib/core/baseProvider.d.ts +14 -0
  52. package/dist/lib/core/baseProvider.js +107 -23
  53. package/dist/lib/core/constants.js +1 -0
  54. package/dist/lib/core/conversationMemoryFactory.js +1 -0
  55. package/dist/lib/core/conversationMemoryInitializer.js +1 -0
  56. package/dist/lib/core/conversationMemoryManager.js +1 -0
  57. package/dist/lib/core/dynamicModels.js +1 -0
  58. package/dist/lib/core/evaluation.js +1 -0
  59. package/dist/lib/core/evaluationProviders.js +1 -0
  60. package/dist/lib/core/factory.js +1 -0
  61. package/dist/lib/core/modelConfiguration.js +1 -0
  62. package/dist/lib/core/redisConversationMemoryManager.js +1 -0
  63. package/dist/lib/core/serviceRegistry.js +1 -0
  64. package/dist/lib/core/streamAnalytics.js +1 -0
  65. package/dist/lib/evaluation/contextBuilder.js +1 -0
  66. package/dist/lib/evaluation/index.js +1 -0
  67. package/dist/lib/evaluation/prompts.js +1 -0
  68. package/dist/lib/evaluation/ragasEvaluator.js +1 -0
  69. package/dist/lib/evaluation/retryManager.js +1 -0
  70. package/dist/lib/evaluation/scoring.js +1 -0
  71. package/dist/lib/factories/providerFactory.js +1 -0
  72. package/dist/lib/factories/providerRegistry.js +1 -0
  73. package/dist/lib/hitl/hitlErrors.js +1 -0
  74. package/dist/lib/hitl/hitlManager.js +1 -0
  75. package/dist/lib/hitl/index.js +1 -0
  76. package/dist/lib/hitl/types.js +1 -0
  77. package/dist/lib/index.d.ts +11 -5
  78. package/dist/lib/index.js +12 -10
  79. package/dist/lib/mcp/externalServerManager.js +1 -0
  80. package/dist/lib/mcp/factory.js +1 -0
  81. package/dist/lib/mcp/flexibleToolValidator.js +1 -0
  82. package/dist/lib/mcp/index.js +1 -0
  83. package/dist/lib/mcp/mcpCircuitBreaker.js +1 -0
  84. package/dist/lib/mcp/mcpClientFactory.js +2 -1
  85. package/dist/lib/mcp/registry.js +1 -0
  86. package/dist/lib/mcp/servers/agent/directToolsServer.js +2 -0
  87. package/dist/lib/mcp/servers/aiProviders/aiAnalysisTools.js +1 -0
  88. package/dist/lib/mcp/servers/aiProviders/aiCoreServer.js +1 -0
  89. package/dist/lib/mcp/servers/aiProviders/aiWorkflowTools.js +1 -0
  90. package/dist/lib/mcp/servers/utilities/utilityServer.js +1 -0
  91. package/dist/lib/mcp/toolDiscoveryService.js +1 -0
  92. package/dist/lib/mcp/toolRegistry.js +1 -0
  93. package/dist/lib/memory/mem0Initializer.js +1 -0
  94. package/dist/lib/middleware/builtin/analytics.js +1 -0
  95. package/dist/lib/middleware/builtin/autoEvaluation.js +1 -0
  96. package/dist/lib/middleware/builtin/guardrails.js +1 -0
  97. package/dist/lib/middleware/factory.js +1 -0
  98. package/dist/lib/middleware/index.js +1 -0
  99. package/dist/lib/middleware/registry.js +1 -0
  100. package/dist/lib/middleware/utils/guardrailsUtils.js +1 -0
  101. package/dist/lib/models/modelRegistry.js +1 -0
  102. package/dist/lib/models/modelResolver.js +2 -0
  103. package/dist/lib/neurolink.d.ts +41 -6
  104. package/dist/lib/neurolink.js +276 -5
  105. package/dist/lib/providers/amazonBedrock.d.ts +1 -0
  106. package/dist/lib/providers/amazonBedrock.js +166 -14
  107. package/dist/lib/providers/amazonSagemaker.js +1 -0
  108. package/dist/lib/providers/anthropic.js +8 -21
  109. package/dist/lib/providers/anthropicBaseProvider.js +1 -0
  110. package/dist/lib/providers/azureOpenai.js +6 -21
  111. package/dist/lib/providers/googleAiStudio.js +6 -21
  112. package/dist/lib/providers/googleVertex.js +9 -1
  113. package/dist/lib/providers/huggingFace.js +34 -3
  114. package/dist/lib/providers/index.js +1 -0
  115. package/dist/lib/providers/litellm.js +34 -3
  116. package/dist/lib/providers/mistral.js +32 -2
  117. package/dist/lib/providers/ollama.d.ts +37 -1
  118. package/dist/lib/providers/ollama.js +544 -58
  119. package/dist/lib/providers/openAI.js +6 -21
  120. package/dist/lib/providers/openaiCompatible.js +41 -4
  121. package/dist/lib/providers/sagemaker/adaptive-semaphore.js +1 -0
  122. package/dist/lib/providers/sagemaker/client.js +1 -0
  123. package/dist/lib/providers/sagemaker/config.js +1 -0
  124. package/dist/lib/providers/sagemaker/detection.js +1 -0
  125. package/dist/lib/providers/sagemaker/diagnostics.js +1 -0
  126. package/dist/lib/providers/sagemaker/error-constants.js +1 -0
  127. package/dist/lib/providers/sagemaker/errors.js +1 -0
  128. package/dist/lib/providers/sagemaker/index.js +1 -0
  129. package/dist/lib/providers/sagemaker/language-model.js +1 -0
  130. package/dist/lib/providers/sagemaker/parsers.js +1 -0
  131. package/dist/lib/providers/sagemaker/streaming.js +1 -0
  132. package/dist/lib/providers/sagemaker/structured-parser.js +1 -0
  133. package/dist/lib/proxy/awsProxyIntegration.js +1 -0
  134. package/dist/lib/proxy/proxyFetch.js +1 -0
  135. package/dist/lib/proxy/utils/noProxyUtils.js +1 -0
  136. package/dist/lib/sdk/toolRegistration.js +2 -0
  137. package/dist/lib/services/server/ai/observability/instrumentation.d.ts +57 -0
  138. package/dist/lib/services/server/ai/observability/instrumentation.js +171 -0
  139. package/dist/lib/session/globalSessionState.js +38 -1
  140. package/dist/lib/telemetry/index.d.ts +1 -0
  141. package/dist/lib/telemetry/index.js +1 -0
  142. package/dist/lib/telemetry/telemetryService.d.ts +2 -0
  143. package/dist/lib/telemetry/telemetryService.js +8 -7
  144. package/dist/lib/types/analytics.js +1 -0
  145. package/dist/lib/types/cli.js +1 -0
  146. package/dist/lib/types/common.js +1 -0
  147. package/dist/lib/types/configTypes.js +1 -0
  148. package/dist/lib/types/content.d.ts +14 -1
  149. package/dist/lib/types/content.js +1 -0
  150. package/dist/lib/types/contextTypes.js +1 -0
  151. package/dist/lib/types/conversation.d.ts +2 -0
  152. package/dist/lib/types/conversation.js +1 -0
  153. package/dist/lib/types/domainTypes.js +1 -0
  154. package/dist/lib/types/errors.js +1 -0
  155. package/dist/lib/types/evaluation.js +1 -0
  156. package/dist/lib/types/evaluationProviders.js +1 -0
  157. package/dist/lib/types/evaluationTypes.js +1 -0
  158. package/dist/lib/types/externalMcp.js +1 -0
  159. package/dist/lib/types/fileTypes.d.ts +44 -0
  160. package/dist/lib/types/fileTypes.js +1 -0
  161. package/dist/lib/types/generateTypes.d.ts +1 -0
  162. package/dist/lib/types/generateTypes.js +1 -0
  163. package/dist/lib/types/guardrails.js +1 -0
  164. package/dist/lib/types/index.js +1 -0
  165. package/dist/lib/types/mcpTypes.js +1 -0
  166. package/dist/lib/types/middlewareTypes.js +1 -0
  167. package/dist/lib/types/modelTypes.d.ts +6 -6
  168. package/dist/lib/types/modelTypes.js +1 -0
  169. package/dist/lib/types/observability.d.ts +49 -0
  170. package/dist/lib/types/observability.js +7 -0
  171. package/dist/lib/types/providers.d.ts +44 -0
  172. package/dist/lib/types/providers.js +1 -0
  173. package/dist/lib/types/sdkTypes.js +1 -0
  174. package/dist/lib/types/serviceTypes.js +1 -0
  175. package/dist/lib/types/streamTypes.d.ts +1 -0
  176. package/dist/lib/types/streamTypes.js +1 -0
  177. package/dist/lib/types/taskClassificationTypes.js +1 -0
  178. package/dist/lib/types/tools.js +2 -0
  179. package/dist/lib/types/typeAliases.js +1 -0
  180. package/dist/lib/types/universalProviderOptions.js +1 -0
  181. package/dist/lib/utils/analyticsUtils.js +1 -0
  182. package/dist/lib/utils/conversationMemory.js +1 -0
  183. package/dist/lib/utils/conversationMemoryUtils.js +1 -0
  184. package/dist/lib/utils/csvProcessor.js +1 -0
  185. package/dist/lib/utils/errorHandling.js +1 -0
  186. package/dist/lib/utils/evaluationUtils.js +1 -0
  187. package/dist/lib/utils/factoryProcessing.js +1 -0
  188. package/dist/lib/utils/fileDetector.js +7 -3
  189. package/dist/lib/utils/imageProcessor.js +1 -0
  190. package/dist/lib/utils/logger.js +1 -0
  191. package/dist/lib/utils/loopUtils.js +1 -0
  192. package/dist/lib/utils/mcpDefaults.js +1 -0
  193. package/dist/lib/utils/messageBuilder.js +96 -9
  194. package/dist/lib/utils/modelRouter.js +1 -0
  195. package/dist/lib/utils/multimodalOptionsBuilder.d.ts +67 -0
  196. package/dist/lib/utils/multimodalOptionsBuilder.js +65 -0
  197. package/dist/lib/utils/optionsConversion.js +1 -0
  198. package/dist/lib/utils/optionsUtils.js +1 -0
  199. package/dist/lib/utils/parameterValidation.js +1 -0
  200. package/dist/lib/utils/pdfProcessor.d.ts +10 -0
  201. package/dist/lib/utils/pdfProcessor.js +199 -0
  202. package/dist/lib/utils/performance.js +1 -0
  203. package/dist/lib/utils/promptRedaction.js +1 -0
  204. package/dist/lib/utils/providerConfig.js +1 -0
  205. package/dist/lib/utils/providerHealth.js +1 -0
  206. package/dist/lib/utils/providerSetupMessages.js +1 -0
  207. package/dist/lib/utils/providerUtils.js +1 -0
  208. package/dist/lib/utils/redis.js +1 -0
  209. package/dist/lib/utils/retryHandler.js +1 -0
  210. package/dist/lib/utils/schemaConversion.js +1 -0
  211. package/dist/lib/utils/taskClassificationUtils.js +1 -0
  212. package/dist/lib/utils/taskClassifier.js +1 -0
  213. package/dist/lib/utils/timeout.js +1 -0
  214. package/dist/lib/utils/tokenLimits.js +1 -0
  215. package/dist/lib/utils/toolUtils.js +1 -0
  216. package/dist/lib/utils/transformationUtils.js +1 -0
  217. package/dist/lib/utils/typeUtils.js +1 -0
  218. package/dist/mcp/mcpClientFactory.js +1 -1
  219. package/dist/mcp/servers/agent/directToolsServer.js +1 -0
  220. package/dist/models/modelResolver.js +1 -0
  221. package/dist/neurolink.d.ts +41 -6
  222. package/dist/neurolink.js +275 -5
  223. package/dist/providers/amazonBedrock.d.ts +1 -0
  224. package/dist/providers/amazonBedrock.js +165 -14
  225. package/dist/providers/anthropic.js +7 -21
  226. package/dist/providers/azureOpenai.js +5 -21
  227. package/dist/providers/googleAiStudio.js +5 -21
  228. package/dist/providers/googleVertex.js +8 -1
  229. package/dist/providers/huggingFace.js +33 -3
  230. package/dist/providers/litellm.js +33 -3
  231. package/dist/providers/mistral.js +31 -2
  232. package/dist/providers/ollama.d.ts +37 -1
  233. package/dist/providers/ollama.js +543 -58
  234. package/dist/providers/openAI.js +5 -21
  235. package/dist/providers/openaiCompatible.js +40 -4
  236. package/dist/sdk/toolRegistration.js +1 -0
  237. package/dist/services/server/ai/observability/instrumentation.d.ts +57 -0
  238. package/dist/services/server/ai/observability/instrumentation.js +170 -0
  239. package/dist/session/globalSessionState.js +37 -1
  240. package/dist/telemetry/index.d.ts +1 -0
  241. package/dist/telemetry/telemetryService.d.ts +2 -0
  242. package/dist/telemetry/telemetryService.js +7 -7
  243. package/dist/types/content.d.ts +14 -1
  244. package/dist/types/conversation.d.ts +2 -0
  245. package/dist/types/fileTypes.d.ts +44 -0
  246. package/dist/types/generateTypes.d.ts +1 -0
  247. package/dist/types/observability.d.ts +49 -0
  248. package/dist/types/observability.js +6 -0
  249. package/dist/types/providers.d.ts +44 -0
  250. package/dist/types/streamTypes.d.ts +1 -0
  251. package/dist/types/tools.js +1 -0
  252. package/dist/utils/fileDetector.js +6 -3
  253. package/dist/utils/messageBuilder.js +95 -9
  254. package/dist/utils/multimodalOptionsBuilder.d.ts +67 -0
  255. package/dist/utils/multimodalOptionsBuilder.js +64 -0
  256. package/dist/utils/pdfProcessor.d.ts +10 -0
  257. package/dist/utils/pdfProcessor.js +198 -0
  258. package/package.json +12 -16
@@ -9,6 +9,7 @@ import { DEFAULT_MAX_STEPS } from "../core/constants.js";
9
9
  import { validateApiKey, createOpenAIConfig, getProviderModel, } from "../utils/providerConfig.js";
10
10
  import { streamAnalyticsCollector } from "../core/streamAnalytics.js";
11
11
  import { buildMessagesArray, buildMultimodalMessagesArray, convertToCoreMessages, } from "../utils/messageBuilder.js";
12
+ import { buildMultimodalOptions } from "../utils/multimodalOptionsBuilder.js";
12
13
  import { createProxyFetch } from "../proxy/proxyFetch.js";
13
14
  import { isZodSchema } from "../utils/schemaConversion.js";
14
15
  // Configuration helpers - now using consolidated utility
@@ -248,7 +249,8 @@ export class OpenAIProvider extends BaseProvider {
248
249
  const hasMultimodalInput = !!(options.input?.images?.length ||
249
250
  options.input?.content?.length ||
250
251
  options.input?.files?.length ||
251
- options.input?.csvFiles?.length);
252
+ options.input?.csvFiles?.length ||
253
+ options.input?.pdfFiles?.length);
252
254
  let messages;
253
255
  if (hasMultimodalInput) {
254
256
  logger.debug(`OpenAI: Detected multimodal input, using multimodal message builder`, {
@@ -261,26 +263,7 @@ export class OpenAIProvider extends BaseProvider {
261
263
  hasCSVFiles: !!options.input?.csvFiles?.length,
262
264
  csvFileCount: options.input?.csvFiles?.length || 0,
263
265
  });
264
- // Create multimodal options for buildMultimodalMessagesArray
265
- const multimodalOptions = {
266
- input: {
267
- text: options.input?.text || "",
268
- images: options.input?.images,
269
- content: options.input?.content,
270
- files: options.input?.files,
271
- csvFiles: options.input?.csvFiles,
272
- },
273
- csvOptions: options.csvOptions,
274
- systemPrompt: options.systemPrompt,
275
- conversationHistory: options.conversationMessages,
276
- provider: this.providerName,
277
- model: this.modelName,
278
- temperature: options.temperature,
279
- maxTokens: options.maxTokens,
280
- enableAnalytics: options.enableAnalytics,
281
- enableEvaluation: options.enableEvaluation,
282
- context: options.context,
283
- };
266
+ const multimodalOptions = buildMultimodalOptions(options, this.providerName, this.modelName);
284
267
  const mm = await buildMultimodalMessagesArray(multimodalOptions, this.providerName, this.modelName);
285
268
  // Convert multimodal messages to Vercel AI SDK format (CoreMessage[])
286
269
  messages = convertToCoreMessages(mm);
@@ -316,6 +299,7 @@ export class OpenAIProvider extends BaseProvider {
316
299
  maxSteps: options.maxSteps || DEFAULT_MAX_STEPS,
317
300
  toolChoice: shouldUseTools && Object.keys(tools).length > 0 ? "auto" : "none",
318
301
  abortSignal: timeoutController?.controller.signal,
302
+ experimental_telemetry: this.getStreamTelemetryConfig(options),
319
303
  onStepFinish: ({ toolCalls, toolResults }) => {
320
304
  logger.info("Tool execution completed", { toolResults, toolCalls });
321
305
  // Handle tool execution storage
@@ -491,3 +475,4 @@ export class OpenAIProvider extends BaseProvider {
491
475
  }
492
476
  // Export for factory registration
493
477
  export default OpenAIProvider;
478
+ //# sourceMappingURL=openAI.js.map
@@ -5,6 +5,9 @@ import { logger } from "../utils/logger.js";
5
5
  import { createTimeoutController, TimeoutError } from "../utils/timeout.js";
6
6
  import { streamAnalyticsCollector } from "../core/streamAnalytics.js";
7
7
  import { createProxyFetch } from "../proxy/proxyFetch.js";
8
+ import { DEFAULT_MAX_STEPS } from "../core/constants.js";
9
+ import { buildMessagesArray, buildMultimodalMessagesArray, convertToCoreMessages, } from "../utils/messageBuilder.js";
10
+ import { buildMultimodalOptions } from "../utils/multimodalOptionsBuilder.js";
8
11
  // Constants
9
12
  const FALLBACK_OPENAI_COMPATIBLE_MODEL = "gpt-3.5-turbo";
10
13
  // Configuration helpers
@@ -157,13 +160,46 @@ export class OpenAICompatibleProvider extends BaseProvider {
157
160
  const timeout = this.getTimeout(options);
158
161
  const timeoutController = createTimeoutController(timeout, this.providerName, "stream");
159
162
  try {
163
+ // Check for multimodal input (images, PDFs, CSVs, files)
164
+ const hasMultimodalInput = !!(options.input?.images?.length ||
165
+ options.input?.content?.length ||
166
+ options.input?.files?.length ||
167
+ options.input?.csvFiles?.length ||
168
+ options.input?.pdfFiles?.length);
169
+ let messages;
170
+ if (hasMultimodalInput) {
171
+ logger.debug(`OpenAI Compatible: Detected multimodal input, using multimodal message builder`, {
172
+ hasImages: !!options.input?.images?.length,
173
+ imageCount: options.input?.images?.length || 0,
174
+ hasContent: !!options.input?.content?.length,
175
+ contentCount: options.input?.content?.length || 0,
176
+ hasFiles: !!options.input?.files?.length,
177
+ fileCount: options.input?.files?.length || 0,
178
+ hasCSVFiles: !!options.input?.csvFiles?.length,
179
+ csvFileCount: options.input?.csvFiles?.length || 0,
180
+ hasPDFFiles: !!options.input?.pdfFiles?.length,
181
+ pdfFileCount: options.input?.pdfFiles?.length || 0,
182
+ });
183
+ const multimodalOptions = buildMultimodalOptions(options, this.providerName, this.modelName);
184
+ const mm = await buildMultimodalMessagesArray(multimodalOptions, this.providerName, this.modelName);
185
+ // Convert multimodal messages to Vercel AI SDK format (CoreMessage[])
186
+ messages = convertToCoreMessages(mm);
187
+ }
188
+ else {
189
+ logger.debug(`OpenAI Compatible: Text-only input, using standard message builder`);
190
+ messages = await buildMessagesArray(options);
191
+ }
160
192
  const model = await this.getAISDKModelWithMiddleware(options); // This is where network connection happens!
161
193
  const result = streamText({
162
194
  model,
163
- prompt: options.input.text,
164
- system: options.systemPrompt,
165
- temperature: options.temperature,
166
- maxTokens: options.maxTokens, // No default limit - unlimited unless specified
195
+ messages: messages,
196
+ ...(options.maxTokens !== null && options.maxTokens !== undefined
197
+ ? { maxTokens: options.maxTokens }
198
+ : {}),
199
+ ...(options.temperature !== null && options.temperature !== undefined
200
+ ? { temperature: options.temperature }
201
+ : {}),
202
+ maxSteps: options.maxSteps || DEFAULT_MAX_STEPS,
167
203
  tools: options.tools,
168
204
  toolChoice: "auto",
169
205
  abortSignal: timeoutController?.controller.signal,
@@ -265,3 +301,4 @@ export class OpenAICompatibleProvider extends BaseProvider {
265
301
  ];
266
302
  }
267
303
  }
304
+ //# sourceMappingURL=openaiCompatible.js.map
@@ -213,3 +213,4 @@ export function createAdaptiveSemaphore(initialConcurrency, maxConcurrency = 10,
213
213
  minConcurrency,
214
214
  });
215
215
  }
216
+ //# sourceMappingURL=adaptive-semaphore.js.map
@@ -470,3 +470,4 @@ export async function testSageMakerConnectivity(config, endpointName) {
470
470
  client.dispose();
471
471
  }
472
472
  }
473
+ //# sourceMappingURL=client.js.map
@@ -315,3 +315,4 @@ export function checkSageMakerConfiguration() {
315
315
  summary: getConfigurationSummary(),
316
316
  };
317
317
  }
318
+ //# sourceMappingURL=config.js.map
@@ -604,3 +604,4 @@ export class SageMakerDetector {
604
604
  export function createSageMakerDetector(config) {
605
605
  return new SageMakerDetector(config);
606
606
  }
607
+ //# sourceMappingURL=detection.js.map
@@ -135,3 +135,4 @@ export function formatDiagnosticReport(report) {
135
135
  lines.push(`${statusIcon} Overall Status: ${statusColor(report.overallStatus.toUpperCase())}`);
136
136
  return lines.join("\n");
137
137
  }
138
+ //# sourceMappingURL=diagnostics.js.map
@@ -225,3 +225,4 @@ export const ERROR_KEYWORDS = {
225
225
  NETWORK: ["network"],
226
226
  ENDPOINT_NOT_FOUND: ["endpoint", "not found"],
227
227
  };
228
+ //# sourceMappingURL=error-constants.js.map
@@ -297,3 +297,4 @@ export function getRetryDelay(error, attempt = 1) {
297
297
  // Default exponential backoff
298
298
  return 1000 * Math.pow(2, attempt - 1);
299
299
  }
300
+ //# sourceMappingURL=errors.js.map
@@ -65,3 +65,4 @@ export async function validateSageMakerSetup() {
65
65
  * Default export for convenience
66
66
  */
67
67
  export { AmazonSageMakerProvider as default };
68
+ //# sourceMappingURL=index.js.map
@@ -761,3 +761,4 @@ export class SageMakerLanguageModel {
761
761
  }
762
762
  }
763
763
  export default SageMakerLanguageModel;
764
+ //# sourceMappingURL=language-model.js.map
@@ -632,3 +632,4 @@ export function estimateTokenUsage(prompt, completion) {
632
632
  total: promptTokens + completionTokens,
633
633
  };
634
634
  }
635
+ //# sourceMappingURL=parsers.js.map
@@ -329,3 +329,4 @@ export function estimateTokenUsage(prompt, completion) {
329
329
  total: promptTokens + completionTokens,
330
330
  };
331
331
  }
332
+ //# sourceMappingURL=streaming.js.map
@@ -623,3 +623,4 @@ export function extractSchemaFromResponseFormat(responseFormat) {
623
623
  }
624
624
  return undefined;
625
625
  }
626
+ //# sourceMappingURL=structured-parser.js.map
@@ -283,3 +283,4 @@ export async function testAWSProxyConnectivity() {
283
283
  return false;
284
284
  }
285
285
  }
286
+ //# sourceMappingURL=awsProxyIntegration.js.map
@@ -322,3 +322,4 @@ export function getProxyStatus() {
322
322
  ],
323
323
  };
324
324
  }
325
+ //# sourceMappingURL=proxyFetch.js.map
@@ -147,3 +147,4 @@ export function shouldBypassProxySimple(targetUrl, noProxyValue) {
147
147
  return false;
148
148
  }
149
149
  }
150
+ //# sourceMappingURL=noProxyUtils.js.map
@@ -2,6 +2,7 @@
2
2
  * NeuroLink SDK Tool Registration API
3
3
  * Simple interface for developers to register custom tools
4
4
  */
5
+ import { z } from "zod";
5
6
  import { logger } from "../utils/logger.js";
6
7
  import { createMCPServerInfo } from "../utils/mcpDefaults.js";
7
8
  import { validateToolName, validateToolDescription, } from "../utils/parameterValidation.js";
@@ -364,3 +365,4 @@ export function suggestToolNames(baseName) {
364
365
  }
365
366
  return suggestions.slice(0, 5); // Limit to 5 suggestions
366
367
  }
368
+ //# sourceMappingURL=toolRegistration.js.map
@@ -0,0 +1,57 @@
1
+ /**
2
+ * OpenTelemetry Instrumentation for Langfuse v4
3
+ *
4
+ * Configures OpenTelemetry TracerProvider with LangfuseSpanProcessor to capture
5
+ * traces from Vercel AI SDK's experimental_telemetry feature.
6
+ *
7
+ * Flow: Vercel AI SDK → OpenTelemetry Spans → LangfuseSpanProcessor → Langfuse Platform
8
+ */
9
+ import { NodeTracerProvider } from "@opentelemetry/sdk-trace-node";
10
+ import { LangfuseSpanProcessor } from "@langfuse/otel";
11
+ import type { LangfuseConfig } from "../../../../types/observability.js";
12
+ /**
13
+ * Initialize OpenTelemetry with Langfuse span processor
14
+ *
15
+ * This connects Vercel AI SDK's experimental_telemetry to Langfuse by:
16
+ * 1. Creating LangfuseSpanProcessor with Langfuse credentials
17
+ * 2. Creating a NodeTracerProvider with service metadata and span processor
18
+ * 3. Registering the provider globally for AI SDK to use
19
+ *
20
+ * @param config - Langfuse configuration passed from parent application
21
+ */
22
+ export declare function initializeOpenTelemetry(config: LangfuseConfig): void;
23
+ /**
24
+ * Flush all pending spans to Langfuse
25
+ */
26
+ export declare function flushOpenTelemetry(): Promise<void>;
27
+ /**
28
+ * Shutdown OpenTelemetry and Langfuse span processor
29
+ */
30
+ export declare function shutdownOpenTelemetry(): Promise<void>;
31
+ /**
32
+ * Get the Langfuse span processor
33
+ */
34
+ export declare function getLangfuseSpanProcessor(): LangfuseSpanProcessor | null;
35
+ /**
36
+ * Get the tracer provider
37
+ */
38
+ export declare function getTracerProvider(): NodeTracerProvider | null;
39
+ /**
40
+ * Check if OpenTelemetry is initialized
41
+ */
42
+ export declare function isOpenTelemetryInitialized(): boolean;
43
+ /**
44
+ * Get health status for Langfuse observability
45
+ */
46
+ export declare function getLangfuseHealthStatus(): {
47
+ isHealthy: boolean | undefined;
48
+ initialized: boolean;
49
+ credentialsValid: boolean;
50
+ enabled: boolean;
51
+ hasProcessor: boolean;
52
+ config: {
53
+ baseUrl: string;
54
+ environment: string;
55
+ release: string;
56
+ } | undefined;
57
+ };
@@ -0,0 +1,171 @@
1
+ /**
2
+ * OpenTelemetry Instrumentation for Langfuse v4
3
+ *
4
+ * Configures OpenTelemetry TracerProvider with LangfuseSpanProcessor to capture
5
+ * traces from Vercel AI SDK's experimental_telemetry feature.
6
+ *
7
+ * Flow: Vercel AI SDK → OpenTelemetry Spans → LangfuseSpanProcessor → Langfuse Platform
8
+ */
9
+ import { NodeTracerProvider } from "@opentelemetry/sdk-trace-node";
10
+ import { LangfuseSpanProcessor } from "@langfuse/otel";
11
+ import { ATTR_SERVICE_NAME, ATTR_SERVICE_VERSION, } from "@opentelemetry/semantic-conventions";
12
+ import { resourceFromAttributes } from "@opentelemetry/resources";
13
+ import { logger } from "../../../../utils/logger.js";
14
+ const LOG_PREFIX = "[OpenTelemetry]";
15
+ let tracerProvider = null;
16
+ let langfuseProcessor = null;
17
+ let isInitialized = false;
18
+ let isCredentialsValid = false;
19
+ let currentConfig = null;
20
+ /**
21
+ * Initialize OpenTelemetry with Langfuse span processor
22
+ *
23
+ * This connects Vercel AI SDK's experimental_telemetry to Langfuse by:
24
+ * 1. Creating LangfuseSpanProcessor with Langfuse credentials
25
+ * 2. Creating a NodeTracerProvider with service metadata and span processor
26
+ * 3. Registering the provider globally for AI SDK to use
27
+ *
28
+ * @param config - Langfuse configuration passed from parent application
29
+ */
30
+ export function initializeOpenTelemetry(config) {
31
+ if (isInitialized) {
32
+ logger.debug(`${LOG_PREFIX} Already initialized`);
33
+ return;
34
+ }
35
+ if (!config.enabled) {
36
+ logger.debug(`${LOG_PREFIX} Langfuse disabled, skipping initialization`);
37
+ isInitialized = true;
38
+ return;
39
+ }
40
+ if (!config.publicKey || !config.secretKey) {
41
+ logger.warn(`${LOG_PREFIX} Langfuse enabled but missing credentials, skipping initialization`);
42
+ isInitialized = true;
43
+ isCredentialsValid = false;
44
+ return;
45
+ }
46
+ try {
47
+ currentConfig = config;
48
+ isCredentialsValid = true;
49
+ // Create Langfuse span processor with configuration
50
+ langfuseProcessor = new LangfuseSpanProcessor({
51
+ publicKey: config.publicKey,
52
+ secretKey: config.secretKey,
53
+ baseUrl: config.baseUrl || "https://cloud.langfuse.com",
54
+ environment: config.environment || "dev",
55
+ release: config.release || "v1.0.0",
56
+ });
57
+ // Create resource with service metadata (v2.x API)
58
+ const resource = resourceFromAttributes({
59
+ [ATTR_SERVICE_NAME]: "neurolink",
60
+ [ATTR_SERVICE_VERSION]: config.release || "v1.0.0",
61
+ "deployment.environment": config.environment || "dev",
62
+ });
63
+ // Initialize tracer provider with span processor and resource
64
+ tracerProvider = new NodeTracerProvider({
65
+ resource,
66
+ spanProcessors: [langfuseProcessor],
67
+ });
68
+ // Register provider globally so Vercel AI SDK can use it
69
+ tracerProvider.register();
70
+ isInitialized = true;
71
+ logger.info(`${LOG_PREFIX} Initialized with Langfuse span processor`, {
72
+ baseUrl: config.baseUrl || "https://cloud.langfuse.com",
73
+ environment: config.environment || "dev",
74
+ release: config.release || "v1.0.0",
75
+ });
76
+ }
77
+ catch (error) {
78
+ logger.error(`${LOG_PREFIX} Initialization failed`, {
79
+ error: error instanceof Error ? error.message : String(error),
80
+ stack: error instanceof Error ? error.stack : undefined,
81
+ });
82
+ throw error;
83
+ }
84
+ }
85
+ /**
86
+ * Flush all pending spans to Langfuse
87
+ */
88
+ export async function flushOpenTelemetry() {
89
+ if (!isInitialized) {
90
+ logger.debug(`${LOG_PREFIX} Not initialized, skipping flush`);
91
+ return;
92
+ }
93
+ if (!langfuseProcessor) {
94
+ logger.debug(`${LOG_PREFIX} No processor to flush (Langfuse disabled)`);
95
+ return;
96
+ }
97
+ try {
98
+ logger.info(`${LOG_PREFIX} Flushing pending spans to Langfuse...`);
99
+ await langfuseProcessor.forceFlush();
100
+ logger.info(`${LOG_PREFIX} Successfully flushed spans to Langfuse`);
101
+ }
102
+ catch (error) {
103
+ logger.error(`${LOG_PREFIX} Flush failed`, {
104
+ error: error instanceof Error ? error.message : String(error),
105
+ stack: error instanceof Error ? error.stack : undefined,
106
+ });
107
+ throw error;
108
+ }
109
+ }
110
+ /**
111
+ * Shutdown OpenTelemetry and Langfuse span processor
112
+ */
113
+ export async function shutdownOpenTelemetry() {
114
+ if (!isInitialized || !tracerProvider) {
115
+ return;
116
+ }
117
+ try {
118
+ await tracerProvider.shutdown();
119
+ tracerProvider = null;
120
+ langfuseProcessor = null;
121
+ isInitialized = false;
122
+ isCredentialsValid = false;
123
+ logger.debug(`${LOG_PREFIX} Shutdown complete`);
124
+ }
125
+ catch (error) {
126
+ logger.error(`${LOG_PREFIX} Shutdown failed`, {
127
+ error: error instanceof Error ? error.message : String(error),
128
+ });
129
+ }
130
+ }
131
+ /**
132
+ * Get the Langfuse span processor
133
+ */
134
+ export function getLangfuseSpanProcessor() {
135
+ return langfuseProcessor;
136
+ }
137
+ /**
138
+ * Get the tracer provider
139
+ */
140
+ export function getTracerProvider() {
141
+ return tracerProvider;
142
+ }
143
+ /**
144
+ * Check if OpenTelemetry is initialized
145
+ */
146
+ export function isOpenTelemetryInitialized() {
147
+ return isInitialized;
148
+ }
149
+ /**
150
+ * Get health status for Langfuse observability
151
+ */
152
+ export function getLangfuseHealthStatus() {
153
+ return {
154
+ isHealthy: currentConfig?.enabled &&
155
+ isInitialized &&
156
+ isCredentialsValid &&
157
+ langfuseProcessor !== null,
158
+ initialized: isInitialized,
159
+ credentialsValid: isCredentialsValid,
160
+ enabled: currentConfig?.enabled || false,
161
+ hasProcessor: langfuseProcessor !== null,
162
+ config: currentConfig
163
+ ? {
164
+ baseUrl: currentConfig.baseUrl || "https://cloud.langfuse.com",
165
+ environment: currentConfig.environment || "dev",
166
+ release: currentConfig.release || "v1.0.0",
167
+ }
168
+ : undefined,
169
+ };
170
+ }
171
+ //# sourceMappingURL=instrumentation.js.map
@@ -1,5 +1,31 @@
1
1
  import { nanoid } from "nanoid";
2
2
  import { NeuroLink } from "../neurolink.js";
3
+ /**
4
+ * Build observability config from environment variables
5
+ * Used by CLI to configure NeuroLink instances
6
+ */
7
+ function buildObservabilityConfigFromEnv() {
8
+ const langfuseEnabled = process.env.LANGFUSE_ENABLED?.trim().toLowerCase() === "true";
9
+ const publicKey = process.env.LANGFUSE_PUBLIC_KEY?.trim();
10
+ const secretKey = process.env.LANGFUSE_SECRET_KEY?.trim();
11
+ if (!langfuseEnabled || !publicKey || !secretKey) {
12
+ return undefined;
13
+ }
14
+ return {
15
+ langfuse: {
16
+ enabled: langfuseEnabled,
17
+ publicKey,
18
+ secretKey,
19
+ baseUrl: process.env.LANGFUSE_BASE_URL?.trim() || "https://cloud.langfuse.com",
20
+ environment: process.env.LANGFUSE_ENVIRONMENT?.trim() ||
21
+ process.env.PUBLIC_APP_ENVIRONMENT?.trim() ||
22
+ "dev",
23
+ release: process.env.PUBLIC_APP_VERSION?.trim() ||
24
+ process.env.npm_package_version?.trim() ||
25
+ "v1.0.0",
26
+ },
27
+ };
28
+ }
3
29
  export class GlobalSessionManager {
4
30
  static instance;
5
31
  loopSession = null;
@@ -19,6 +45,11 @@ export class GlobalSessionManager {
19
45
  maxTurnsPerSession: config.maxTurnsPerSession,
20
46
  };
21
47
  }
48
+ // Add observability config from environment variables (CLI usage)
49
+ const observabilityConfig = buildObservabilityConfigFromEnv();
50
+ if (observabilityConfig) {
51
+ neurolinkOptions.observability = observabilityConfig;
52
+ }
22
53
  this.loopSession = {
23
54
  neurolinkInstance: new NeuroLink(neurolinkOptions),
24
55
  sessionId,
@@ -88,7 +119,12 @@ export class GlobalSessionManager {
88
119
  }
89
120
  getOrCreateNeuroLink() {
90
121
  const session = this.getLoopSession();
91
- return session ? session.neurolinkInstance : new NeuroLink();
122
+ if (session) {
123
+ return session.neurolinkInstance;
124
+ }
125
+ // Create new NeuroLink with observability config from environment (CLI usage)
126
+ const observabilityConfig = buildObservabilityConfigFromEnv();
127
+ return new NeuroLink(observabilityConfig ? { observability: observabilityConfig } : undefined);
92
128
  }
93
129
  getCurrentSessionId() {
94
130
  return this.getLoopSession()?.sessionId;
@@ -124,3 +160,4 @@ export class GlobalSessionManager {
124
160
  }
125
161
  }
126
162
  export const globalSession = GlobalSessionManager.getInstance();
163
+ //# sourceMappingURL=globalSessionState.js.map
@@ -9,6 +9,7 @@ export declare function initializeTelemetry(): Promise<import("./telemetryServic
9
9
  */
10
10
  export declare function getTelemetryStatus(): Promise<{
11
11
  enabled: boolean;
12
+ initialized: boolean;
12
13
  endpoint?: string;
13
14
  service?: string;
14
15
  version?: string;
@@ -21,3 +21,4 @@ export async function getTelemetryStatus() {
21
21
  const { TelemetryService } = await import("./telemetryService.js");
22
22
  return TelemetryService.getInstance().getStatus();
23
23
  }
24
+ //# sourceMappingURL=index.js.map
@@ -10,6 +10,7 @@ export declare class TelemetryService {
10
10
  private static instance;
11
11
  private sdk?;
12
12
  private enabled;
13
+ private initialized;
13
14
  private meter?;
14
15
  private tracer?;
15
16
  private aiRequestCounter?;
@@ -43,6 +44,7 @@ export declare class TelemetryService {
43
44
  isEnabled(): boolean;
44
45
  getStatus(): {
45
46
  enabled: boolean;
47
+ initialized: boolean;
46
48
  endpoint?: string;
47
49
  service?: string;
48
50
  version?: string;
@@ -1,14 +1,14 @@
1
1
  import { NodeSDK } from "@opentelemetry/sdk-node";
2
2
  import { metrics, trace, } from "@opentelemetry/api";
3
3
  import { getNodeAutoInstrumentations } from "@opentelemetry/auto-instrumentations-node";
4
- import { OTLPTraceExporter } from "@opentelemetry/exporter-trace-otlp-http";
5
- import { Resource } from "@opentelemetry/resources";
4
+ import { resourceFromAttributes } from "@opentelemetry/resources";
6
5
  import { ATTR_SERVICE_NAME, ATTR_SERVICE_VERSION, } from "@opentelemetry/semantic-conventions";
7
6
  import { logger } from "../utils/logger.js";
8
7
  export class TelemetryService {
9
8
  static instance;
10
9
  sdk;
11
10
  enabled = false;
11
+ initialized = false;
12
12
  meter;
13
13
  tracer;
14
14
  // Optional Metrics (only created when enabled)
@@ -47,16 +47,12 @@ export class TelemetryService {
47
47
  }
48
48
  initializeTelemetry() {
49
49
  try {
50
- const resource = new Resource({
50
+ const resource = resourceFromAttributes({
51
51
  [ATTR_SERVICE_NAME]: process.env.OTEL_SERVICE_NAME || "neurolink-ai",
52
52
  [ATTR_SERVICE_VERSION]: process.env.OTEL_SERVICE_VERSION || "3.0.1",
53
53
  });
54
54
  this.sdk = new NodeSDK({
55
55
  resource,
56
- traceExporter: new OTLPTraceExporter({
57
- url: process.env.OTEL_EXPORTER_OTLP_TRACES_ENDPOINT ||
58
- `${process.env.OTEL_EXPORTER_OTLP_ENDPOINT}/v1/traces`,
59
- }),
60
56
  // Note: Metric reader configured separately
61
57
  instrumentations: [getNodeAutoInstrumentations()],
62
58
  });
@@ -102,11 +98,13 @@ export class TelemetryService {
102
98
  }
103
99
  try {
104
100
  await this.sdk?.start();
101
+ this.initialized = true;
105
102
  logger.debug("[Telemetry] SDK started successfully");
106
103
  }
107
104
  catch (error) {
108
105
  logger.error("[Telemetry] Failed to start SDK:", error);
109
106
  this.enabled = false;
107
+ this.initialized = false;
110
108
  }
111
109
  }
112
110
  // AI Operation Tracing (NO-OP when disabled)
@@ -250,6 +248,7 @@ export class TelemetryService {
250
248
  getStatus() {
251
249
  return {
252
250
  enabled: this.enabled,
251
+ initialized: this.initialized,
253
252
  endpoint: process.env.OTEL_EXPORTER_OTLP_ENDPOINT,
254
253
  service: process.env.OTEL_SERVICE_NAME || "neurolink-ai",
255
254
  version: process.env.OTEL_SERVICE_VERSION || "3.0.1",
@@ -285,6 +284,7 @@ export class TelemetryService {
285
284
  if (this.enabled && this.sdk) {
286
285
  try {
287
286
  await this.sdk.shutdown();
287
+ this.initialized = false;
288
288
  logger.debug("[Telemetry] SDK shutdown completed");
289
289
  }
290
290
  catch (error) {
@@ -293,3 +293,4 @@ export class TelemetryService {
293
293
  }
294
294
  }
295
295
  }
296
+ //# sourceMappingURL=telemetryService.js.map
@@ -3,3 +3,4 @@
3
3
  * Comprehensive usage tracking, performance metrics, and cost analysis types
4
4
  */
5
5
  export {};
6
+ //# sourceMappingURL=analytics.js.map
@@ -19,3 +19,4 @@ export function isCommandResult(value) {
19
19
  "success" in value &&
20
20
  typeof value.success === "boolean");
21
21
  }
22
+ //# sourceMappingURL=cli.js.map
@@ -49,3 +49,4 @@ export function toErrorInfo(error) {
49
49
  message: getErrorMessage(error),
50
50
  };
51
51
  }
52
+ //# sourceMappingURL=common.js.map
@@ -47,3 +47,4 @@ export const DEFAULT_CONFIG = {
47
47
  },
48
48
  configVersion: "3.0.1",
49
49
  };
50
+ //# sourceMappingURL=configTypes.js.map