@juspay/neurolink 9.28.0 → 9.29.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/CHANGELOG.md +12 -0
  2. package/README.md +19 -16
  3. package/dist/adapters/providerImageAdapter.js +9 -1
  4. package/dist/constants/contextWindows.js +18 -7
  5. package/dist/constants/enums.d.ts +36 -6
  6. package/dist/constants/enums.js +38 -5
  7. package/dist/constants/tokens.d.ts +4 -0
  8. package/dist/constants/tokens.js +4 -0
  9. package/dist/context/contextCompactor.js +2 -2
  10. package/dist/core/baseProvider.d.ts +6 -2
  11. package/dist/core/baseProvider.js +61 -1
  12. package/dist/lib/adapters/providerImageAdapter.js +9 -1
  13. package/dist/lib/constants/contextWindows.js +18 -7
  14. package/dist/lib/constants/enums.d.ts +36 -6
  15. package/dist/lib/constants/enums.js +38 -5
  16. package/dist/lib/constants/tokens.d.ts +4 -0
  17. package/dist/lib/constants/tokens.js +4 -0
  18. package/dist/lib/context/contextCompactor.js +2 -2
  19. package/dist/lib/core/baseProvider.d.ts +6 -2
  20. package/dist/lib/core/baseProvider.js +61 -1
  21. package/dist/lib/memory/hippocampusInitializer.d.ts +2 -2
  22. package/dist/lib/neurolink.js +3 -3
  23. package/dist/lib/observability/exporters/langfuseExporter.d.ts +1 -0
  24. package/dist/lib/observability/exporters/langfuseExporter.js +8 -2
  25. package/dist/lib/observability/exporters/langsmithExporter.js +25 -5
  26. package/dist/lib/observability/otelBridge.js +1 -1
  27. package/dist/lib/observability/types/exporterTypes.d.ts +7 -0
  28. package/dist/lib/observability/utils/spanSerializer.js +3 -1
  29. package/dist/lib/providers/amazonBedrock.js +1 -1
  30. package/dist/lib/providers/anthropic.js +1 -1
  31. package/dist/lib/types/conversation.d.ts +2 -2
  32. package/dist/lib/types/index.d.ts +1 -1
  33. package/dist/lib/types/providers.d.ts +6 -3
  34. package/dist/memory/hippocampusInitializer.d.ts +2 -2
  35. package/dist/neurolink.js +3 -3
  36. package/dist/observability/exporters/langfuseExporter.d.ts +1 -0
  37. package/dist/observability/exporters/langfuseExporter.js +8 -2
  38. package/dist/observability/exporters/langsmithExporter.js +25 -5
  39. package/dist/observability/otelBridge.js +1 -1
  40. package/dist/observability/types/exporterTypes.d.ts +7 -0
  41. package/dist/observability/utils/spanSerializer.js +3 -1
  42. package/dist/providers/amazonBedrock.js +1 -1
  43. package/dist/providers/anthropic.js +1 -1
  44. package/dist/types/conversation.d.ts +2 -2
  45. package/dist/types/index.d.ts +1 -1
  46. package/dist/types/providers.d.ts +6 -3
  47. package/package.json +5 -3
@@ -3,6 +3,19 @@
3
3
  * Exports spans to LangSmith observability platform
4
4
  */
5
5
  import { logger } from "../../utils/logger.js";
6
+ /**
7
+ * Build a LangSmith dotted_order value: "{datetime}.{id}"
8
+ * Format: YYYYMMDDTHHmmssSSSSSSZ.<id> (datetime in UTC with microseconds)
9
+ */
10
+ function buildDottedOrder(isoTime, id) {
11
+ const d = new Date(isoTime);
12
+ const pad2 = (n) => String(n).padStart(2, "0");
13
+ const microseconds = String(d.getUTCMilliseconds() * 1000).padStart(6, "0");
14
+ const dt = `${d.getUTCFullYear()}${pad2(d.getUTCMonth() + 1)}${pad2(d.getUTCDate())}T` +
15
+ `${pad2(d.getUTCHours())}${pad2(d.getUTCMinutes())}${pad2(d.getUTCSeconds())}` +
16
+ `${microseconds}Z`;
17
+ return `${dt}.${id}`;
18
+ }
6
19
  import { SpanSerializer } from "../utils/spanSerializer.js";
7
20
  import { BaseExporter } from "./baseExporter.js";
8
21
  /**
@@ -67,17 +80,24 @@ export class LangSmithExporter extends BaseExporter {
67
80
  async exportBatch(spans) {
68
81
  const startTime = Date.now();
69
82
  try {
70
- const runs = spans.map((s) => ({
71
- ...SpanSerializer.toLangSmithFormat(s),
72
- session_name: this.projectName,
73
- }));
83
+ const post = spans.map((s) => {
84
+ const run = SpanSerializer.toLangSmithFormat(s);
85
+ // LangSmith /api/v1/runs/batch requires dotted_order and trace_id on each run
86
+ const dotted_order = buildDottedOrder(run.start_time ?? s.startTime, run.id);
87
+ return {
88
+ ...run,
89
+ trace_id: run.trace_id ?? run.id,
90
+ dotted_order,
91
+ session_name: this.projectName,
92
+ };
93
+ });
74
94
  const response = await fetch(`${this.endpoint}/api/v1/runs/batch`, {
75
95
  method: "POST",
76
96
  headers: {
77
97
  "Content-Type": "application/json",
78
98
  "x-api-key": this.apiKey,
79
99
  },
80
- body: JSON.stringify({ runs }),
100
+ body: JSON.stringify({ post }),
81
101
  });
82
102
  if (!response.ok) {
83
103
  throw new Error(`Batch export failed: ${response.statusText}`);
@@ -30,7 +30,7 @@ export class OtelBridge {
30
30
  * Create a NeuroLink span from OpenTelemetry context
31
31
  */
32
32
  createSpanFromOtelContext(spanContext, type, name) {
33
- return SpanSerializer.createSpan(type, name, {}, undefined, spanContext.traceId);
33
+ return SpanSerializer.createSpan(type, name, {}, spanContext.spanId, spanContext.traceId);
34
34
  }
35
35
  /**
36
36
  * Wrap a function with OpenTelemetry tracing that also creates NeuroLink spans
@@ -64,6 +64,13 @@ export type LangfuseExporterConfig = ExporterConfig & {
64
64
  secretKey: string;
65
65
  baseUrl?: string;
66
66
  release?: string;
67
+ /**
68
+ * When true, `input` and `output` fields are omitted from exported spans and
69
+ * generations. Enable in compliance-sensitive deployments where prompt/response
70
+ * content is considered PII or subject to data-minimisation requirements.
71
+ * Defaults to false (input/output are exported).
72
+ */
73
+ redactIO?: boolean;
67
74
  };
68
75
  /**
69
76
  * LangSmith exporter configuration
@@ -105,7 +105,9 @@ export class SpanSerializer {
105
105
  startTime: span.startTime,
106
106
  endTime: span.endTime,
107
107
  // Only pick safe, non-PII attributes for metadata — intentionally excludes
108
- // input, output, error.stack, and other user content for PII safety
108
+ // error.stack and other internal fields. input/output are exported here for
109
+ // Langfuse tracing; use LangfuseExporterConfig.redactIO=true to suppress them
110
+ // in compliance-sensitive deployments.
109
111
  metadata: filterSafeMetadata(span.attributes),
110
112
  level: span.status === SpanStatus.ERROR ? "ERROR" : "DEFAULT",
111
113
  statusMessage: span.statusMessage,
@@ -88,7 +88,7 @@ export class AmazonBedrockProvider extends BaseProvider {
88
88
  return "bedrock";
89
89
  }
90
90
  getDefaultModel() {
91
- return (process.env.BEDROCK_MODEL || "anthropic.claude-3-sonnet-20240229-v1:0");
91
+ return process.env.BEDROCK_MODEL || "anthropic.claude-sonnet-4-6";
92
92
  }
93
93
  /**
94
94
  * Get the default embedding model for Amazon Bedrock
@@ -213,7 +213,7 @@ const getAnthropicApiKey = () => {
213
213
  return validateApiKey(createAnthropicConfig());
214
214
  };
215
215
  const getDefaultAnthropicModel = () => {
216
- return getProviderModel("ANTHROPIC_MODEL", AnthropicModels.CLAUDE_3_5_SONNET);
216
+ return getProviderModel("ANTHROPIC_MODEL", AnthropicModels.CLAUDE_SONNET_4_6);
217
217
  };
218
218
  const streamTracer = trace.getTracer("neurolink.provider.anthropic");
219
219
  /**
@@ -33,8 +33,8 @@
33
33
  * - Current time (ISO): `new Date().toISOString()`
34
34
  */
35
35
  import type { Mem0Config } from "../memory/mem0Initializer.js";
36
- import type { Memory, CustomStorageConfig } from "../memory/hippocampusInitializer.js";
37
- export type { Memory, CustomStorageConfig };
36
+ import type { Memory, StorageConfig } from "../memory/hippocampusInitializer.js";
37
+ export type { Memory, StorageConfig };
38
38
  /**
39
39
  * Configuration for conversation memory feature
40
40
  */
@@ -38,5 +38,5 @@ export * from "./contextTypes.js";
38
38
  export * from "./fileReferenceTypes.js";
39
39
  export * from "./ragTypes.js";
40
40
  export * from "./conversationMemoryInterface.js";
41
- export type { CustomStorageConfig } from "./conversation.js";
41
+ export type { StorageConfig } from "./conversation.js";
42
42
  export * from "./subscriptionTypes.js";
@@ -335,11 +335,14 @@ export type AIProvider = {
335
335
  customTools: Map<string, unknown>;
336
336
  executeTool: (toolName: string, params: unknown) => Promise<unknown>;
337
337
  }, functionTag: string): void;
338
- /** Trace context propagated from NeuroLink SDK for parent-child span hierarchy */
339
- _traceContext?: {
338
+ /**
339
+ * Propagate trace context from NeuroLink SDK for parent-child span hierarchy.
340
+ * Use this method instead of accessing `_traceContext` directly.
341
+ */
342
+ setTraceContext(ctx: {
340
343
  traceId: string;
341
344
  parentSpanId: string;
342
- } | null;
345
+ } | null): void;
343
346
  };
344
347
  /**
345
348
  * Provider attempt result for iteration tracking (converted from interface)
@@ -1,5 +1,5 @@
1
- import { Hippocampus, type HippocampusConfig, type CustomStorageConfig } from "@juspay/hippocampus";
2
- export type { HippocampusConfig, CustomStorageConfig };
1
+ import { Hippocampus, type HippocampusConfig, type StorageConfig } from "@juspay/hippocampus";
2
+ export type { HippocampusConfig, StorageConfig };
3
3
  export type Memory = HippocampusConfig & {
4
4
  enabled?: boolean;
5
5
  };
package/dist/neurolink.js CHANGED
@@ -3503,7 +3503,7 @@ Current user's request: ${currentInput}`;
3503
3503
  this, // Pass SDK instance
3504
3504
  options.region);
3505
3505
  // Propagate trace context for parent-child span hierarchy
3506
- provider._traceContext = this._metricsTraceContext;
3506
+ provider.setTraceContext(this._metricsTraceContext);
3507
3507
  // ADD: Emit connection events for all providers (Bedrock-compatible)
3508
3508
  this.emitter.emit("connected");
3509
3509
  this.emitter.emit("message", `${providerName} provider initialized successfully`);
@@ -3734,7 +3734,7 @@ Current user's request: ${currentInput}`;
3734
3734
  this, // Pass SDK instance
3735
3735
  options.region);
3736
3736
  // Propagate trace context for parent-child span hierarchy
3737
- provider._traceContext = this._metricsTraceContext;
3737
+ provider.setTraceContext(this._metricsTraceContext);
3738
3738
  // ADD: Emit connection events for successful provider creation (Bedrock-compatible)
3739
3739
  this.emitter.emit("connected");
3740
3740
  this.emitter.emit("message", `${providerName} provider initialized successfully`);
@@ -4660,7 +4660,7 @@ Current user's request: ${currentInput}`;
4660
4660
  this, // Pass SDK instance
4661
4661
  options.region);
4662
4662
  // Propagate trace context for parent-child span hierarchy
4663
- provider._traceContext = this._metricsTraceContext;
4663
+ provider.setTraceContext(this._metricsTraceContext);
4664
4664
  // Enable tool execution for the provider using BaseProvider method
4665
4665
  provider.setupToolExecutor({
4666
4666
  customTools: this.getCustomTools(),
@@ -13,6 +13,7 @@ export declare class LangfuseExporter extends BaseExporter {
13
13
  private readonly secretKey;
14
14
  private readonly baseUrl;
15
15
  private readonly release?;
16
+ private readonly redactIO;
16
17
  constructor(config: LangfuseExporterConfig);
17
18
  initialize(): Promise<void>;
18
19
  exportSpan(span: SpanData): Promise<ExportResult>;
@@ -14,12 +14,14 @@ export class LangfuseExporter extends BaseExporter {
14
14
  secretKey;
15
15
  baseUrl;
16
16
  release;
17
+ redactIO;
17
18
  constructor(config) {
18
19
  super("langfuse", config);
19
20
  this.publicKey = config.publicKey;
20
21
  this.secretKey = config.secretKey;
21
22
  this.baseUrl = config.baseUrl ?? "https://cloud.langfuse.com";
22
23
  this.release = config.release;
24
+ this.redactIO = config.redactIO ?? false;
23
25
  }
24
26
  async initialize() {
25
27
  if (this.initialized) {
@@ -135,6 +137,10 @@ export class LangfuseExporter extends BaseExporter {
135
137
  ...langfuseSpan,
136
138
  traceId: span.traceId,
137
139
  };
140
+ if (this.redactIO) {
141
+ delete body["input"];
142
+ delete body["output"];
143
+ }
138
144
  await this.apiCall("/api/public/spans", body);
139
145
  }
140
146
  /**
@@ -155,8 +161,8 @@ export class LangfuseExporter extends BaseExporter {
155
161
  maxTokens: span.attributes["ai.max_tokens"],
156
162
  topP: span.attributes["ai.top_p"],
157
163
  },
158
- input: langfuseSpan.input,
159
- output: langfuseSpan.output,
164
+ input: this.redactIO ? undefined : langfuseSpan.input,
165
+ output: this.redactIO ? undefined : langfuseSpan.output,
160
166
  usage: langfuseSpan.usage,
161
167
  metadata: langfuseSpan.metadata,
162
168
  level: langfuseSpan.level,
@@ -3,6 +3,19 @@
3
3
  * Exports spans to LangSmith observability platform
4
4
  */
5
5
  import { logger } from "../../utils/logger.js";
6
+ /**
7
+ * Build a LangSmith dotted_order value: "{datetime}.{id}"
8
+ * Format: YYYYMMDDTHHmmssSSSSSSZ.<id> (datetime in UTC with microseconds)
9
+ */
10
+ function buildDottedOrder(isoTime, id) {
11
+ const d = new Date(isoTime);
12
+ const pad2 = (n) => String(n).padStart(2, "0");
13
+ const microseconds = String(d.getUTCMilliseconds() * 1000).padStart(6, "0");
14
+ const dt = `${d.getUTCFullYear()}${pad2(d.getUTCMonth() + 1)}${pad2(d.getUTCDate())}T` +
15
+ `${pad2(d.getUTCHours())}${pad2(d.getUTCMinutes())}${pad2(d.getUTCSeconds())}` +
16
+ `${microseconds}Z`;
17
+ return `${dt}.${id}`;
18
+ }
6
19
  import { SpanSerializer } from "../utils/spanSerializer.js";
7
20
  import { BaseExporter } from "./baseExporter.js";
8
21
  /**
@@ -67,17 +80,24 @@ export class LangSmithExporter extends BaseExporter {
67
80
  async exportBatch(spans) {
68
81
  const startTime = Date.now();
69
82
  try {
70
- const runs = spans.map((s) => ({
71
- ...SpanSerializer.toLangSmithFormat(s),
72
- session_name: this.projectName,
73
- }));
83
+ const post = spans.map((s) => {
84
+ const run = SpanSerializer.toLangSmithFormat(s);
85
+ // LangSmith /api/v1/runs/batch requires dotted_order and trace_id on each run
86
+ const dotted_order = buildDottedOrder(run.start_time ?? s.startTime, run.id);
87
+ return {
88
+ ...run,
89
+ trace_id: run.trace_id ?? run.id,
90
+ dotted_order,
91
+ session_name: this.projectName,
92
+ };
93
+ });
74
94
  const response = await fetch(`${this.endpoint}/api/v1/runs/batch`, {
75
95
  method: "POST",
76
96
  headers: {
77
97
  "Content-Type": "application/json",
78
98
  "x-api-key": this.apiKey,
79
99
  },
80
- body: JSON.stringify({ runs }),
100
+ body: JSON.stringify({ post }),
81
101
  });
82
102
  if (!response.ok) {
83
103
  throw new Error(`Batch export failed: ${response.statusText}`);
@@ -30,7 +30,7 @@ export class OtelBridge {
30
30
  * Create a NeuroLink span from OpenTelemetry context
31
31
  */
32
32
  createSpanFromOtelContext(spanContext, type, name) {
33
- return SpanSerializer.createSpan(type, name, {}, undefined, spanContext.traceId);
33
+ return SpanSerializer.createSpan(type, name, {}, spanContext.spanId, spanContext.traceId);
34
34
  }
35
35
  /**
36
36
  * Wrap a function with OpenTelemetry tracing that also creates NeuroLink spans
@@ -64,6 +64,13 @@ export type LangfuseExporterConfig = ExporterConfig & {
64
64
  secretKey: string;
65
65
  baseUrl?: string;
66
66
  release?: string;
67
+ /**
68
+ * When true, `input` and `output` fields are omitted from exported spans and
69
+ * generations. Enable in compliance-sensitive deployments where prompt/response
70
+ * content is considered PII or subject to data-minimisation requirements.
71
+ * Defaults to false (input/output are exported).
72
+ */
73
+ redactIO?: boolean;
67
74
  };
68
75
  /**
69
76
  * LangSmith exporter configuration
@@ -105,7 +105,9 @@ export class SpanSerializer {
105
105
  startTime: span.startTime,
106
106
  endTime: span.endTime,
107
107
  // Only pick safe, non-PII attributes for metadata — intentionally excludes
108
- // input, output, error.stack, and other user content for PII safety
108
+ // error.stack and other internal fields. input/output are exported here for
109
+ // Langfuse tracing; use LangfuseExporterConfig.redactIO=true to suppress them
110
+ // in compliance-sensitive deployments.
109
111
  metadata: filterSafeMetadata(span.attributes),
110
112
  level: span.status === SpanStatus.ERROR ? "ERROR" : "DEFAULT",
111
113
  statusMessage: span.statusMessage,
@@ -88,7 +88,7 @@ export class AmazonBedrockProvider extends BaseProvider {
88
88
  return "bedrock";
89
89
  }
90
90
  getDefaultModel() {
91
- return (process.env.BEDROCK_MODEL || "anthropic.claude-3-sonnet-20240229-v1:0");
91
+ return process.env.BEDROCK_MODEL || "anthropic.claude-sonnet-4-6";
92
92
  }
93
93
  /**
94
94
  * Get the default embedding model for Amazon Bedrock
@@ -213,7 +213,7 @@ const getAnthropicApiKey = () => {
213
213
  return validateApiKey(createAnthropicConfig());
214
214
  };
215
215
  const getDefaultAnthropicModel = () => {
216
- return getProviderModel("ANTHROPIC_MODEL", AnthropicModels.CLAUDE_3_5_SONNET);
216
+ return getProviderModel("ANTHROPIC_MODEL", AnthropicModels.CLAUDE_SONNET_4_6);
217
217
  };
218
218
  const streamTracer = trace.getTracer("neurolink.provider.anthropic");
219
219
  /**
@@ -33,8 +33,8 @@
33
33
  * - Current time (ISO): `new Date().toISOString()`
34
34
  */
35
35
  import type { Mem0Config } from "../memory/mem0Initializer.js";
36
- import type { Memory, CustomStorageConfig } from "../memory/hippocampusInitializer.js";
37
- export type { Memory, CustomStorageConfig };
36
+ import type { Memory, StorageConfig } from "../memory/hippocampusInitializer.js";
37
+ export type { Memory, StorageConfig };
38
38
  /**
39
39
  * Configuration for conversation memory feature
40
40
  */
@@ -38,5 +38,5 @@ export * from "./contextTypes.js";
38
38
  export * from "./fileReferenceTypes.js";
39
39
  export * from "./ragTypes.js";
40
40
  export * from "./conversationMemoryInterface.js";
41
- export type { CustomStorageConfig } from "./conversation.js";
41
+ export type { StorageConfig } from "./conversation.js";
42
42
  export * from "./subscriptionTypes.js";
@@ -335,11 +335,14 @@ export type AIProvider = {
335
335
  customTools: Map<string, unknown>;
336
336
  executeTool: (toolName: string, params: unknown) => Promise<unknown>;
337
337
  }, functionTag: string): void;
338
- /** Trace context propagated from NeuroLink SDK for parent-child span hierarchy */
339
- _traceContext?: {
338
+ /**
339
+ * Propagate trace context from NeuroLink SDK for parent-child span hierarchy.
340
+ * Use this method instead of accessing `_traceContext` directly.
341
+ */
342
+ setTraceContext(ctx: {
340
343
  traceId: string;
341
344
  parentSpanId: string;
342
- } | null;
345
+ } | null): void;
343
346
  };
344
347
  /**
345
348
  * Provider attempt result for iteration tracking (converted from interface)
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@juspay/neurolink",
3
- "version": "9.28.0",
3
+ "version": "9.29.0",
4
4
  "description": "Universal AI Development Platform with working MCP integration, multi-provider support, and professional CLI. Built-in tools operational, 58+ external MCP servers discoverable. Connect to filesystem, GitHub, database operations, and more. Build, test, and deploy AI applications with 13 providers: OpenAI, Anthropic, Google AI, AWS Bedrock, Azure, Hugging Face, Ollama, and Mistral AI.",
5
5
  "author": {
6
6
  "name": "Juspay Technologies",
@@ -194,7 +194,7 @@
194
194
  "@opentelemetry/core": "^2.1.0",
195
195
  "@opentelemetry/exporter-trace-otlp-http": "^0.202.0",
196
196
  "@opentelemetry/resources": "^2.1.0",
197
- "@opentelemetry/sdk-node": "^0.56.0",
197
+ "@opentelemetry/sdk-node": "^0.202.0",
198
198
  "@opentelemetry/semantic-conventions": "^1.30.1",
199
199
  "adm-zip": "^0.5.16",
200
200
  "ai": "4.3.19",
@@ -371,7 +371,9 @@
371
371
  "tmp@<=0.2.3": ">=0.2.4",
372
372
  "axios@<1.8.2": ">=1.8.2",
373
373
  "glob@>=10.3.7 <=11.0.3": ">=11.1.0",
374
- "@semantic-release/npm": "^13.1.2"
374
+ "@semantic-release/npm": "^13.1.2",
375
+ "@opentelemetry/sdk-trace-base": "^2.0.1",
376
+ "@opentelemetry/sdk-trace-node": "^2.0.1"
375
377
  }
376
378
  },
377
379
  "os": [