@juspay/neurolink 9.59.0 → 9.59.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -10,6 +10,7 @@ import { ErrorCategory, ErrorSeverity, } from "../constants/enums.js";
10
10
  import { BaseProvider } from "../core/baseProvider.js";
11
11
  import { DEFAULT_MAX_STEPS, GLOBAL_LOCATION_MODELS, } from "../core/constants.js";
12
12
  import { ModelConfigurationManager } from "../core/modelConfiguration.js";
13
+ import { markStreamProviderEmittedGenerationEnd, } from "../neurolink.js";
13
14
  import { createProxyFetch } from "../proxy/proxyFetch.js";
14
15
  import { ATTR, tracers, withClientSpan } from "../telemetry/index.js";
15
16
  import { AuthenticationError, InvalidModelError, NetworkError, ProviderError, RateLimitError, } from "../types/index.js";
@@ -1630,8 +1631,12 @@ export class GoogleVertexProvider extends BaseProvider {
1630
1631
  // Emit generation:end so Pipeline B (Langfuse) creates a GENERATION
1631
1632
  // observation. The native @google/genai stream path on Vertex bypasses the
1632
1633
  // Vercel AI SDK so experimental_telemetry is never injected; we emit manually.
1634
+ // Curator P2-4 dedup: flag the per-stream context attached to options
1635
+ // so the orchestration in `runStandardStreamRequest` knows we already
1636
+ // emitted and skips its own emit (preserving exactly-once).
1633
1637
  const vertexStreamEmitter = this.neurolink?.getEventEmitter();
1634
1638
  if (vertexStreamEmitter) {
1639
+ markStreamProviderEmittedGenerationEnd(params.options);
1635
1640
  vertexStreamEmitter.emit("generation:end", {
1636
1641
  provider: this.providerName,
1637
1642
  responseTime,
@@ -57,3 +57,4 @@ export * from "./span.js";
57
57
  export * from "./imageGen.js";
58
58
  export * from "./elicitation.js";
59
59
  export * from "./dynamic.js";
60
+ export * from "./streamDedup.js";
@@ -60,4 +60,6 @@ export * from "./imageGen.js";
60
60
  export * from "./elicitation.js";
61
61
  // Dynamic Arguments types
62
62
  export * from "./dynamic.js";
63
+ // Curator P2-4 dedup: per-stream AsyncLocalStorage context
64
+ export * from "./streamDedup.js";
63
65
  //# sourceMappingURL=index.js.map
@@ -0,0 +1,14 @@
1
+ /**
2
+ * Curator P2-4 dedup (concurrency-safe): per-stream context that lets
3
+ * the orchestration's `runStandardStreamRequest` finally block know
4
+ * whether a *native provider* path within THIS stream's async chain
5
+ * already emitted `generation:end`. Native providers (Vertex / Google
6
+ * AI Studio for Gemini 3, etc.) emit on the shared SDK emitter; without
7
+ * scoping, a concurrent unrelated stream's emit on the same NeuroLink
8
+ * instance would suppress the wrong stream's orchestration emit.
9
+ *
10
+ * AsyncLocalStorage scopes each stream's flag to its own async chain.
11
+ */
12
+ export type StreamGenerationEndContext = {
13
+ providerEmitted: boolean;
14
+ };
@@ -0,0 +1,2 @@
1
+ export {};
2
+ //# sourceMappingURL=streamDedup.js.map
@@ -5,13 +5,41 @@
5
5
  * Enhanced AI provider system with natural MCP tool access.
6
6
  * Uses real MCP infrastructure for tool discovery and execution.
7
7
  */
8
- import type { CompactionConfig, CompactionResult, SpanData, ObservabilityConfig, MetricsSummary, MCPToolAnnotations, TraceView, AuthenticatedContext, AuthProvider, JsonObject, NeuroLinkEvents, TypedEventEmitter, MCPEnhancementsConfig, NeuroLinkAuthConfig, NeurolinkConstructorConfig, ChatMessage, ExternalMCPOperationResult, ExternalMCPServerInstance, ExternalMCPToolInfo, GenerateOptions, GenerateResult, ProviderStatus, TextGenerationOptions, TextGenerationResult, MCPExecutableTool, MCPServerInfo, MCPStatus, StreamOptions, StreamResult, ToolExecutionContext, ToolExecutionSummary, ToolInfo, ToolRegistrationOptions, BatchOperationResult } from "./types/index.js";
8
+ import type { CompactionConfig, CompactionResult, SpanData, ObservabilityConfig, MetricsSummary, MCPToolAnnotations, TraceView, AuthenticatedContext, AuthProvider, JsonObject, NeuroLinkEvents, TypedEventEmitter, MCPEnhancementsConfig, NeuroLinkAuthConfig, NeurolinkConstructorConfig, ChatMessage, ExternalMCPOperationResult, ExternalMCPServerInstance, ExternalMCPToolInfo, GenerateOptions, GenerateResult, ProviderStatus, TextGenerationOptions, TextGenerationResult, MCPExecutableTool, MCPServerInfo, MCPStatus, StreamOptions, StreamResult, ToolExecutionContext, ToolExecutionSummary, ToolInfo, ToolRegistrationOptions, BatchOperationResult, StreamGenerationEndContext } from "./types/index.js";
9
9
  import { ConversationMemoryManager } from "./core/conversationMemoryManager.js";
10
10
  import type { RedisConversationMemoryManager } from "./core/redisConversationMemoryManager.js";
11
11
  import { ExternalServerManager } from "./mcp/externalServerManager.js";
12
12
  import { MCPToolRegistry } from "./mcp/toolRegistry.js";
13
13
  import type { DynamicOptions } from "./types/index.js";
14
14
  import { TaskManager } from "./tasks/taskManager.js";
15
+ /**
16
+ * Curator P2-4 dedup (concurrency-safe): native providers emit
17
+ * `generation:end` on the shared SDK emitter. We attach a fresh
18
+ * mutable `dedupContext` object directly to the per-call
19
+ * `StreamOptions` (under `_streamDedupContext`) so each stream gets
20
+ * its own instance — concurrent streams have different option objects
21
+ * and therefore different contexts, so they cannot interfere.
22
+ *
23
+ * Native provider emit sites read `options._streamDedupContext` and
24
+ * flip `.providerEmitted = true` before emitting; the orchestration's
25
+ * finally block reads the same closed-over reference and skips its
26
+ * own emit when the flag is set.
27
+ *
28
+ * This avoids the AsyncLocalStorage approach which doesn't reliably
29
+ * propagate through async-generator yield boundaries when iteration
30
+ * happens from outside the original `run()` scope (e.g. when the
31
+ * consumer drives `for await of result.stream` after `sdk.stream(...)`
32
+ * returns).
33
+ */
34
+ export declare const STREAM_DEDUP_CONTEXT_KEY: "_streamDedupContext";
35
+ /**
36
+ * Native providers call this from their `generation:end` emit sites,
37
+ * passing the same `options` object they received. Safe no-op when
38
+ * the field isn't set.
39
+ */
40
+ export declare function markStreamProviderEmittedGenerationEnd(options: {
41
+ _streamDedupContext?: StreamGenerationEndContext;
42
+ } | undefined): void;
15
43
  export declare class NeuroLink {
16
44
  private mcpInitialized;
17
45
  private mcpSkipped;