@juspay/neurolink 8.24.0 → 8.25.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,3 +1,9 @@
1
+ ## [8.25.0](https://github.com/juspay/neurolink/compare/v8.24.0...v8.25.0) (2025-12-30)
2
+
3
+ ### Features
4
+
5
+ - **(observability):** Add support for custom metadata in Context ([b175249](https://github.com/juspay/neurolink/commit/b175249c61357b0e6d127932bd7824d0bfe6f2ed))
6
+
1
7
  ## [8.24.0](https://github.com/juspay/neurolink/compare/v8.23.2...v8.24.0) (2025-12-28)
2
8
 
3
9
  ### Features
@@ -7,6 +7,7 @@ import type { MiddlewareFactoryOptions } from "../types/middlewareTypes.js";
7
7
  import type { StreamOptions, StreamResult } from "../types/streamTypes.js";
8
8
  import type { UnknownRecord } from "../types/common.js";
9
9
  import type { NeuroLink } from "../neurolink.js";
10
+ import { TelemetryHandler } from "./modules/TelemetryHandler.js";
10
11
  /**
11
12
  * Abstract base class for all AI providers
12
13
  * Tools are integrated as first-class citizens - always available by default
@@ -26,7 +27,7 @@ export declare abstract class BaseProvider implements AIProvider {
26
27
  private readonly messageBuilder;
27
28
  private readonly streamHandler;
28
29
  private readonly generationHandler;
29
- private readonly telemetryHandler;
30
+ protected readonly telemetryHandler: TelemetryHandler;
30
31
  private readonly utilities;
31
32
  private readonly toolsManager;
32
33
  constructor(modelName?: string, providerName?: AIProviderName, neurolink?: NeuroLink, middleware?: MiddlewareFactoryOptions);
@@ -260,13 +261,4 @@ export declare abstract class BaseProvider implements AIProvider {
260
261
  * @returns Array of prompt chunks
261
262
  */
262
263
  static chunkPrompt(prompt: string, maxChunkSize?: number, overlap?: number): string[];
263
- /**
264
- * Create telemetry configuration for Vercel AI SDK experimental_telemetry
265
- * This enables automatic OpenTelemetry tracing when telemetry is enabled
266
- */
267
- protected getStreamTelemetryConfig(options: StreamOptions | TextGenerationOptions, operationType?: "stream" | "generate"): {
268
- isEnabled: boolean;
269
- functionId?: string;
270
- metadata?: Record<string, string | number | boolean>;
271
- } | undefined;
272
264
  }
@@ -48,7 +48,7 @@ export class BaseProvider {
48
48
  // Initialize composition modules
49
49
  this.messageBuilder = new MessageBuilder(this.providerName, this.modelName);
50
50
  this.streamHandler = new StreamHandler(this.providerName, this.modelName);
51
- this.generationHandler = new GenerationHandler(this.providerName, this.modelName, () => this.supportsTools(), (options, type) => this.getStreamTelemetryConfig(options, type), (toolCalls, toolResults, options, timestamp) => this.handleToolExecutionStorage(toolCalls, toolResults, options, timestamp));
51
+ this.generationHandler = new GenerationHandler(this.providerName, this.modelName, () => this.supportsTools(), (options, type) => this.telemetryHandler.getTelemetryConfig(options, type), (toolCalls, toolResults, options, timestamp) => this.handleToolExecutionStorage(toolCalls, toolResults, options, timestamp));
52
52
  this.telemetryHandler = new TelemetryHandler(this.providerName, this.modelName, this.neurolink);
53
53
  this.utilities = new Utilities(this.providerName, this.modelName, this.defaultTimeout, this.middlewareOptions);
54
54
  this.toolsManager = new ToolsManager(this.providerName, this.directTools, this.neurolink, {
@@ -726,40 +726,4 @@ export class BaseProvider {
726
726
  }
727
727
  return chunks;
728
728
  }
729
- /**
730
- * Create telemetry configuration for Vercel AI SDK experimental_telemetry
731
- * This enables automatic OpenTelemetry tracing when telemetry is enabled
732
- */
733
- getStreamTelemetryConfig(options, operationType = "stream") {
734
- // Check if telemetry is enabled via NeuroLink observability config
735
- if (!this.neurolink?.isTelemetryEnabled()) {
736
- return undefined;
737
- }
738
- const context = options.context;
739
- const traceName = context?.traceName;
740
- const userId = context?.userId;
741
- const functionId = traceName ? traceName : userId ? userId : "guest";
742
- const metadata = {
743
- provider: this.providerName,
744
- model: this.modelName,
745
- toolsEnabled: !options.disableTools,
746
- neurolink: true,
747
- operationType,
748
- originalProvider: this.providerName,
749
- };
750
- // Add sessionId if available
751
- if ("sessionId" in options && options.sessionId) {
752
- const sessionId = options.sessionId;
753
- if (typeof sessionId === "string" ||
754
- typeof sessionId === "number" ||
755
- typeof sessionId === "boolean") {
756
- metadata.sessionId = sessionId;
757
- }
758
- }
759
- return {
760
- isEnabled: true,
761
- functionId,
762
- metadata,
763
- };
764
- }
765
729
  }
@@ -50,9 +50,10 @@ export declare class TelemetryHandler {
50
50
  totalTokens?: number;
51
51
  }): Promise<number>;
52
52
  /**
53
- * Get telemetry configuration for streaming/generation
53
+ * Create telemetry configuration for Vercel AI SDK experimental_telemetry
54
+ * This enables automatic OpenTelemetry tracing when telemetry is enabled
54
55
  */
55
- getStreamTelemetryConfig(options: StreamOptions | TextGenerationOptions, operationType?: "stream" | "generate"): {
56
+ getTelemetryConfig(options: StreamOptions | TextGenerationOptions, operationType?: "stream" | "generate"): {
56
57
  isEnabled: boolean;
57
58
  functionId?: string;
58
59
  metadata?: Record<string, string | number | boolean>;
@@ -113,9 +113,10 @@ export class TelemetryHandler {
113
113
  }
114
114
  }
115
115
  /**
116
- * Get telemetry configuration for streaming/generation
116
+ * Create telemetry configuration for Vercel AI SDK experimental_telemetry
117
+ * This enables automatic OpenTelemetry tracing when telemetry is enabled
117
118
  */
118
- getStreamTelemetryConfig(options, operationType = "stream") {
119
+ getTelemetryConfig(options, operationType = "stream") {
119
120
  // Check if telemetry is enabled via NeuroLink observability config
120
121
  if (!this.neurolink?.isTelemetryEnabled()) {
121
122
  return undefined;
@@ -125,6 +126,7 @@ export class TelemetryHandler {
125
126
  const userId = context?.userId;
126
127
  const functionId = traceName ? traceName : userId ? userId : "guest";
127
128
  const metadata = {
129
+ ...(context?.metadata || {}),
128
130
  provider: this.providerName,
129
131
  model: this.modelName,
130
132
  toolsEnabled: !options.disableTools,
@@ -7,6 +7,7 @@ import type { MiddlewareFactoryOptions } from "../types/middlewareTypes.js";
7
7
  import type { StreamOptions, StreamResult } from "../types/streamTypes.js";
8
8
  import type { UnknownRecord } from "../types/common.js";
9
9
  import type { NeuroLink } from "../neurolink.js";
10
+ import { TelemetryHandler } from "./modules/TelemetryHandler.js";
10
11
  /**
11
12
  * Abstract base class for all AI providers
12
13
  * Tools are integrated as first-class citizens - always available by default
@@ -26,7 +27,7 @@ export declare abstract class BaseProvider implements AIProvider {
26
27
  private readonly messageBuilder;
27
28
  private readonly streamHandler;
28
29
  private readonly generationHandler;
29
- private readonly telemetryHandler;
30
+ protected readonly telemetryHandler: TelemetryHandler;
30
31
  private readonly utilities;
31
32
  private readonly toolsManager;
32
33
  constructor(modelName?: string, providerName?: AIProviderName, neurolink?: NeuroLink, middleware?: MiddlewareFactoryOptions);
@@ -260,13 +261,4 @@ export declare abstract class BaseProvider implements AIProvider {
260
261
  * @returns Array of prompt chunks
261
262
  */
262
263
  static chunkPrompt(prompt: string, maxChunkSize?: number, overlap?: number): string[];
263
- /**
264
- * Create telemetry configuration for Vercel AI SDK experimental_telemetry
265
- * This enables automatic OpenTelemetry tracing when telemetry is enabled
266
- */
267
- protected getStreamTelemetryConfig(options: StreamOptions | TextGenerationOptions, operationType?: "stream" | "generate"): {
268
- isEnabled: boolean;
269
- functionId?: string;
270
- metadata?: Record<string, string | number | boolean>;
271
- } | undefined;
272
264
  }
@@ -48,7 +48,7 @@ export class BaseProvider {
48
48
  // Initialize composition modules
49
49
  this.messageBuilder = new MessageBuilder(this.providerName, this.modelName);
50
50
  this.streamHandler = new StreamHandler(this.providerName, this.modelName);
51
- this.generationHandler = new GenerationHandler(this.providerName, this.modelName, () => this.supportsTools(), (options, type) => this.getStreamTelemetryConfig(options, type), (toolCalls, toolResults, options, timestamp) => this.handleToolExecutionStorage(toolCalls, toolResults, options, timestamp));
51
+ this.generationHandler = new GenerationHandler(this.providerName, this.modelName, () => this.supportsTools(), (options, type) => this.telemetryHandler.getTelemetryConfig(options, type), (toolCalls, toolResults, options, timestamp) => this.handleToolExecutionStorage(toolCalls, toolResults, options, timestamp));
52
52
  this.telemetryHandler = new TelemetryHandler(this.providerName, this.modelName, this.neurolink);
53
53
  this.utilities = new Utilities(this.providerName, this.modelName, this.defaultTimeout, this.middlewareOptions);
54
54
  this.toolsManager = new ToolsManager(this.providerName, this.directTools, this.neurolink, {
@@ -726,41 +726,5 @@ export class BaseProvider {
726
726
  }
727
727
  return chunks;
728
728
  }
729
- /**
730
- * Create telemetry configuration for Vercel AI SDK experimental_telemetry
731
- * This enables automatic OpenTelemetry tracing when telemetry is enabled
732
- */
733
- getStreamTelemetryConfig(options, operationType = "stream") {
734
- // Check if telemetry is enabled via NeuroLink observability config
735
- if (!this.neurolink?.isTelemetryEnabled()) {
736
- return undefined;
737
- }
738
- const context = options.context;
739
- const traceName = context?.traceName;
740
- const userId = context?.userId;
741
- const functionId = traceName ? traceName : userId ? userId : "guest";
742
- const metadata = {
743
- provider: this.providerName,
744
- model: this.modelName,
745
- toolsEnabled: !options.disableTools,
746
- neurolink: true,
747
- operationType,
748
- originalProvider: this.providerName,
749
- };
750
- // Add sessionId if available
751
- if ("sessionId" in options && options.sessionId) {
752
- const sessionId = options.sessionId;
753
- if (typeof sessionId === "string" ||
754
- typeof sessionId === "number" ||
755
- typeof sessionId === "boolean") {
756
- metadata.sessionId = sessionId;
757
- }
758
- }
759
- return {
760
- isEnabled: true,
761
- functionId,
762
- metadata,
763
- };
764
- }
765
729
  }
766
730
  //# sourceMappingURL=baseProvider.js.map
@@ -50,9 +50,10 @@ export declare class TelemetryHandler {
50
50
  totalTokens?: number;
51
51
  }): Promise<number>;
52
52
  /**
53
- * Get telemetry configuration for streaming/generation
53
+ * Create telemetry configuration for Vercel AI SDK experimental_telemetry
54
+ * This enables automatic OpenTelemetry tracing when telemetry is enabled
54
55
  */
55
- getStreamTelemetryConfig(options: StreamOptions | TextGenerationOptions, operationType?: "stream" | "generate"): {
56
+ getTelemetryConfig(options: StreamOptions | TextGenerationOptions, operationType?: "stream" | "generate"): {
56
57
  isEnabled: boolean;
57
58
  functionId?: string;
58
59
  metadata?: Record<string, string | number | boolean>;
@@ -113,9 +113,10 @@ export class TelemetryHandler {
113
113
  }
114
114
  }
115
115
  /**
116
- * Get telemetry configuration for streaming/generation
116
+ * Create telemetry configuration for Vercel AI SDK experimental_telemetry
117
+ * This enables automatic OpenTelemetry tracing when telemetry is enabled
117
118
  */
118
- getStreamTelemetryConfig(options, operationType = "stream") {
119
+ getTelemetryConfig(options, operationType = "stream") {
119
120
  // Check if telemetry is enabled via NeuroLink observability config
120
121
  if (!this.neurolink?.isTelemetryEnabled()) {
121
122
  return undefined;
@@ -125,6 +126,7 @@ export class TelemetryHandler {
125
126
  const userId = context?.userId;
126
127
  const functionId = traceName ? traceName : userId ? userId : "guest";
127
128
  const metadata = {
129
+ ...(context?.metadata || {}),
128
130
  provider: this.providerName,
129
131
  model: this.modelName,
130
132
  toolsEnabled: !options.disableTools,
@@ -104,7 +104,7 @@ export class AnthropicProvider extends BaseProvider {
104
104
  maxSteps: options.maxSteps || DEFAULT_MAX_STEPS,
105
105
  toolChoice: shouldUseTools ? "auto" : "none",
106
106
  abortSignal: timeoutController?.controller.signal,
107
- experimental_telemetry: this.getStreamTelemetryConfig(options),
107
+ experimental_telemetry: this.telemetryHandler.getTelemetryConfig(options),
108
108
  onStepFinish: ({ toolCalls, toolResults }) => {
109
109
  this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
110
110
  logger.warn("[AnthropicProvider] Failed to store tool executions", {
@@ -123,7 +123,7 @@ export class AzureOpenAIProvider extends BaseProvider {
123
123
  : {}),
124
124
  tools,
125
125
  toolChoice: shouldUseTools ? "auto" : "none",
126
- experimental_telemetry: this.getStreamTelemetryConfig(options),
126
+ experimental_telemetry: this.telemetryHandler.getTelemetryConfig(options),
127
127
  onStepFinish: ({ toolCalls, toolResults }) => {
128
128
  this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
129
129
  logger.warn("[AzureOpenaiProvider] Failed to store tool executions", {
@@ -129,7 +129,7 @@ export class GoogleAIStudioProvider extends BaseProvider {
129
129
  maxSteps: options.maxSteps || DEFAULT_MAX_STEPS,
130
130
  toolChoice: shouldUseTools ? "auto" : "none",
131
131
  abortSignal: timeoutController?.controller.signal,
132
- experimental_telemetry: this.getStreamTelemetryConfig(options),
132
+ experimental_telemetry: this.telemetryHandler.getTelemetryConfig(options),
133
133
  onStepFinish: ({ toolCalls, toolResults }) => {
134
134
  this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
135
135
  logger.warn("[GoogleAiStudioProvider] Failed to store tool executions", {
@@ -671,7 +671,7 @@ export class GoogleVertexProvider extends BaseProvider {
671
671
  maxSteps: options.maxSteps || DEFAULT_MAX_STEPS,
672
672
  }),
673
673
  abortSignal: timeoutController?.controller.signal,
674
- experimental_telemetry: this.getStreamTelemetryConfig(options),
674
+ experimental_telemetry: this.telemetryHandler.getTelemetryConfig(options),
675
675
  onError: (event) => {
676
676
  const error = event.error;
677
677
  const errorMessage = error instanceof Error ? error.message : String(error);
@@ -273,7 +273,7 @@ export class OpenAIProvider extends BaseProvider {
273
273
  maxSteps: options.maxSteps || DEFAULT_MAX_STEPS,
274
274
  toolChoice: shouldUseTools && Object.keys(tools).length > 0 ? "auto" : "none",
275
275
  abortSignal: timeoutController?.controller.signal,
276
- experimental_telemetry: this.getStreamTelemetryConfig(options),
276
+ experimental_telemetry: this.telemetryHandler.getTelemetryConfig(options),
277
277
  onStepFinish: ({ toolCalls, toolResults }) => {
278
278
  logger.info("Tool execution completed", { toolResults, toolCalls });
279
279
  // Handle tool execution storage
@@ -133,4 +133,5 @@ export type Context = {
133
133
  traceName?: string;
134
134
  userId?: string;
135
135
  sessionId?: string;
136
+ metadata?: Record<string, string | number | boolean>;
136
137
  };
@@ -104,7 +104,7 @@ export class AnthropicProvider extends BaseProvider {
104
104
  maxSteps: options.maxSteps || DEFAULT_MAX_STEPS,
105
105
  toolChoice: shouldUseTools ? "auto" : "none",
106
106
  abortSignal: timeoutController?.controller.signal,
107
- experimental_telemetry: this.getStreamTelemetryConfig(options),
107
+ experimental_telemetry: this.telemetryHandler.getTelemetryConfig(options),
108
108
  onStepFinish: ({ toolCalls, toolResults }) => {
109
109
  this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
110
110
  logger.warn("[AnthropicProvider] Failed to store tool executions", {
@@ -123,7 +123,7 @@ export class AzureOpenAIProvider extends BaseProvider {
123
123
  : {}),
124
124
  tools,
125
125
  toolChoice: shouldUseTools ? "auto" : "none",
126
- experimental_telemetry: this.getStreamTelemetryConfig(options),
126
+ experimental_telemetry: this.telemetryHandler.getTelemetryConfig(options),
127
127
  onStepFinish: ({ toolCalls, toolResults }) => {
128
128
  this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
129
129
  logger.warn("[AzureOpenaiProvider] Failed to store tool executions", {
@@ -129,7 +129,7 @@ export class GoogleAIStudioProvider extends BaseProvider {
129
129
  maxSteps: options.maxSteps || DEFAULT_MAX_STEPS,
130
130
  toolChoice: shouldUseTools ? "auto" : "none",
131
131
  abortSignal: timeoutController?.controller.signal,
132
- experimental_telemetry: this.getStreamTelemetryConfig(options),
132
+ experimental_telemetry: this.telemetryHandler.getTelemetryConfig(options),
133
133
  onStepFinish: ({ toolCalls, toolResults }) => {
134
134
  this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
135
135
  logger.warn("[GoogleAiStudioProvider] Failed to store tool executions", {
@@ -671,7 +671,7 @@ export class GoogleVertexProvider extends BaseProvider {
671
671
  maxSteps: options.maxSteps || DEFAULT_MAX_STEPS,
672
672
  }),
673
673
  abortSignal: timeoutController?.controller.signal,
674
- experimental_telemetry: this.getStreamTelemetryConfig(options),
674
+ experimental_telemetry: this.telemetryHandler.getTelemetryConfig(options),
675
675
  onError: (event) => {
676
676
  const error = event.error;
677
677
  const errorMessage = error instanceof Error ? error.message : String(error);
@@ -273,7 +273,7 @@ export class OpenAIProvider extends BaseProvider {
273
273
  maxSteps: options.maxSteps || DEFAULT_MAX_STEPS,
274
274
  toolChoice: shouldUseTools && Object.keys(tools).length > 0 ? "auto" : "none",
275
275
  abortSignal: timeoutController?.controller.signal,
276
- experimental_telemetry: this.getStreamTelemetryConfig(options),
276
+ experimental_telemetry: this.telemetryHandler.getTelemetryConfig(options),
277
277
  onStepFinish: ({ toolCalls, toolResults }) => {
278
278
  logger.info("Tool execution completed", { toolResults, toolCalls });
279
279
  // Handle tool execution storage
@@ -133,4 +133,5 @@ export type Context = {
133
133
  traceName?: string;
134
134
  userId?: string;
135
135
  sessionId?: string;
136
+ metadata?: Record<string, string | number | boolean>;
136
137
  };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@juspay/neurolink",
3
- "version": "8.24.0",
3
+ "version": "8.25.0",
4
4
  "description": "Universal AI Development Platform with working MCP integration, multi-provider support, and professional CLI. Built-in tools operational, 58+ external MCP servers discoverable. Connect to filesystem, GitHub, database operations, and more. Build, test, and deploy AI applications with 13 providers: OpenAI, Anthropic, Google AI, AWS Bedrock, Azure, Hugging Face, Ollama, and Mistral AI.",
5
5
  "author": {
6
6
  "name": "Juspay Technologies",