@visibe.ai/node 0.1.27 → 0.1.29

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -353,9 +353,12 @@ class Visibe {
353
353
  return {
354
354
  span_id: opts.spanId,
355
355
  type: 'agent_start', // EXACT string — backend validates this
356
- timestamp: new Date().toISOString(),
356
+ timestamp: opts.timestamp ?? new Date().toISOString(),
357
357
  agent_name: opts.agentName,
358
358
  description: `Agent started: ${opts.agentName}`,
359
+ ...(opts.inputText !== undefined ? { input_text: opts.inputText } : {}),
360
+ ...(opts.outputText !== undefined ? { output_text: opts.outputText } : {}),
361
+ ...(opts.durationMs !== undefined ? { duration_ms: opts.durationMs } : {}),
359
362
  };
360
363
  }
361
364
  }
@@ -4,40 +4,73 @@ exports.LangGraphCallback = void 0;
4
4
  exports.patchCompiledStateGraph = patchCompiledStateGraph;
5
5
  const node_crypto_1 = require("node:crypto");
6
6
  const langchain_1 = require("./langchain");
7
- // ---------------------------------------------------------------------------
8
- // LangGraphCallback
9
- // Extends LangChainCallback and adds node-level agent_start spans.
10
- // ---------------------------------------------------------------------------
11
7
  class LangGraphCallback extends langchain_1.LangChainCallback {
12
8
  constructor(options) {
13
9
  super(options);
10
+ this.pendingAgentSpans = new Map();
14
11
  this.nodeNames = new Set(options.nodeNames ?? []);
15
12
  // LangGraphCallback emits its own agent_start spans per node — suppress the
16
13
  // base-class root-chain emission to avoid a spurious 'LangGraph' agent span.
17
14
  this._emitRootAgentStart = false;
18
15
  }
19
- // Override handleChainStart to emit agent_start spans for LangGraph nodes.
16
+ // Buffer the span info on chain start span is sent on chain end so we
17
+ // can include both input_text and output_text in a single agent_start span.
20
18
  //
21
19
  // In LangGraph v1.2+, the node key is the 8th `name` parameter (not chain.name).
22
20
  // Internal system nodes (__start__, __end__, LangGraph) are filtered out.
23
21
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
24
22
  async handleChainStart(chain, inputs, runId, parentRunId, tags, metadata, runType, name) {
25
- // Always track the runId so child LLM calls can resolve parent_span_id.
23
+ // Always let the base class track runId → spanId FIRST so child LLM calls
24
+ // (which fire between chain start and chain end) resolve parent_span_id correctly.
26
25
  await super.handleChainStart(chain, inputs, runId, parentRunId, tags, metadata, runType, name);
27
- // The node key is passed as the 8th `name` parameter in LangGraph v1.2+.
28
- // Fall back to chain.name for older versions.
29
26
  const nodeName = name ?? chain?.name ?? '';
30
- if (nodeName && !langchain_1.LANGGRAPH_INTERNAL_NODES.has(nodeName)) {
31
- // Use the spanId already assigned by super for this runId.
32
- const spanId = this.runIdToSpanId.get(runId) ?? this.nextSpanId();
33
- this.runIdToSpanId.set(runId, spanId);
34
- // Register agent name so _findAgentName() resolves it for child llm_call / tool_call spans.
35
- this.runIdToAgentName.set(runId, nodeName);
36
- this.visibe.batcher.add(this.traceId, this.visibe.buildAgentStartSpan({
37
- spanId,
38
- agentName: nodeName,
39
- }));
40
- }
27
+ if (!nodeName || langchain_1.LANGGRAPH_INTERNAL_NODES.has(nodeName))
28
+ return;
29
+ // Ensure the spanId is assigned to this runId before any child spans are emitted.
30
+ const spanId = this.runIdToSpanId.get(runId) ?? this.nextSpanId();
31
+ this.runIdToSpanId.set(runId, spanId);
32
+ // Register agent name so _findAgentName() resolves it for child llm_call / tool_call spans.
33
+ this.runIdToAgentName.set(runId, nodeName);
34
+ // Buffer — will be sent with output_text when handleChainEnd fires.
35
+ this.pendingAgentSpans.set(runId, {
36
+ spanId,
37
+ agentName: nodeName,
38
+ startMs: Date.now(),
39
+ timestamp: new Date().toISOString(),
40
+ inputText: (0, langchain_1.serializeInput)(inputs, 1000),
41
+ });
42
+ }
43
+ // Flush the buffered agent_start span with both input and output text.
44
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
45
+ async handleChainEnd(outputs, runId) {
46
+ const pending = this.pendingAgentSpans.get(runId);
47
+ if (!pending)
48
+ return;
49
+ this.pendingAgentSpans.delete(runId);
50
+ this.visibe.batcher.add(this.traceId, this.visibe.buildAgentStartSpan({
51
+ spanId: pending.spanId,
52
+ agentName: pending.agentName,
53
+ timestamp: pending.timestamp,
54
+ inputText: pending.inputText,
55
+ outputText: (0, langchain_1.serializeInput)(outputs, 1000),
56
+ durationMs: Date.now() - pending.startMs,
57
+ }));
58
+ }
59
+ // Also flush on chain error so the span is never silently dropped.
60
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
61
+ async handleChainError(err, runId) {
62
+ const pending = this.pendingAgentSpans.get(runId);
63
+ if (!pending)
64
+ return;
65
+ this.pendingAgentSpans.delete(runId);
66
+ this.visibe.batcher.add(this.traceId, this.visibe.buildAgentStartSpan({
67
+ spanId: pending.spanId,
68
+ agentName: pending.agentName,
69
+ timestamp: pending.timestamp,
70
+ inputText: pending.inputText,
71
+ outputText: `Error: ${err?.message ?? String(err)}`,
72
+ durationMs: Date.now() - pending.startMs,
73
+ }));
41
74
  }
42
75
  }
43
76
  exports.LangGraphCallback = LangGraphCallback;
@@ -105,12 +138,16 @@ function patchCompiledStateGraph(lgModule, visibe) {
105
138
  return result;
106
139
  };
107
140
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
108
- CompiledStateGraph.prototype.stream = async function* (input, config) {
141
+ CompiledStateGraph.prototype.stream = async function (input, config) {
109
142
  if (langchain_1.activeLangChainStorage.getStore() !== undefined) {
110
- // Pregel.stream() is a regular async function returning a Promise<AsyncIterable>.
111
- // We must await it before yield*.
112
- yield* (await originalStream.call(this, input, config));
113
- return;
143
+ // Pass-through: return the original stream directly with NO generator wrapping.
144
+ //
145
+ // Pregel.stream() is a regular async function returning a Promise<IterableReadableStreamWithAbortSignal>.
146
+ // Wrapping it in an async function* (yield*) was the root cause of ALS context loss
147
+ // for later graph nodes — the extra generator delegation layer breaks AsyncLocalStorage
148
+ // propagation through the eager .then()-pump inside IterableReadableStreamWithAbortSignal.
149
+ // Returning the Promise directly restores the exact original behavior.
150
+ return originalStream.call(this, input, config);
114
151
  }
115
152
  const traceId = (0, node_crypto_1.randomUUID)();
116
153
  const startedAt = new Date().toISOString();
@@ -131,31 +168,38 @@ function patchCompiledStateGraph(lgModule, visibe) {
131
168
  agentName: graphName,
132
169
  nodeNames,
133
170
  });
171
+ // Get the inner stream inside the ALS context so createAndRunLoop() (which runs
172
+ // fire-and-forget inside Pregel.stream()) inherits ALS = cb for all graph nodes.
173
+ const innerStream = await langchain_1.activeLangChainStorage.run(cb, () => originalStream.call(this, input, _mergeCallbacks(config, cb)));
174
+ // Return an async generator that wraps the stream for completion tracking.
175
+ // This path is only reached when the user calls app.stream() directly —
176
+ // not via app.invoke() (which takes the pass-through path above).
177
+ // createAndRunLoop() already runs in ALS = cb context (established above),
178
+ // so the generator wrapping here does NOT affect node ALS propagation.
134
179
  let status = 'completed';
135
- try {
136
- // activeLangChainStorage.run returns the callback's return value synchronously,
137
- // which is a Promise<AsyncIterable> from Pregel.stream(). Await before yield*.
138
- const gen = await langchain_1.activeLangChainStorage.run(cb, () => originalStream.call(this, input, _mergeCallbacks(config, cb)));
139
- yield* gen;
140
- }
141
- catch (err) {
142
- status = 'failed';
143
- throw err;
144
- }
145
- finally {
146
- visibe.batcher.flush();
147
- await visibe.apiClient.completeTrace(traceId, {
148
- status,
149
- ended_at: new Date().toISOString(),
150
- duration_ms: Date.now() - startMs,
151
- llm_call_count: cb.llmCallCount,
152
- total_cost: cb.totalCost,
153
- total_tokens: cb.totalInputTokens + cb.totalOutputTokens,
154
- total_input_tokens: cb.totalInputTokens,
155
- total_output_tokens: cb.totalOutputTokens,
156
- ...(cb.firstModel ? { model: cb.firstModel } : {}),
157
- });
158
- }
180
+ return (async function* () {
181
+ try {
182
+ yield* innerStream;
183
+ }
184
+ catch (err) {
185
+ status = 'failed';
186
+ throw err;
187
+ }
188
+ finally {
189
+ visibe.batcher.flush();
190
+ await visibe.apiClient.completeTrace(traceId, {
191
+ status,
192
+ ended_at: new Date().toISOString(),
193
+ duration_ms: Date.now() - startMs,
194
+ llm_call_count: cb.llmCallCount,
195
+ total_cost: cb.totalCost,
196
+ total_tokens: cb.totalInputTokens + cb.totalOutputTokens,
197
+ total_input_tokens: cb.totalInputTokens,
198
+ total_output_tokens: cb.totalOutputTokens,
199
+ ...(cb.firstModel ? { model: cb.firstModel } : {}),
200
+ });
201
+ }
202
+ })();
159
203
  };
160
204
  return () => {
161
205
  CompiledStateGraph.prototype.invoke = originalInvoke;
@@ -350,9 +350,12 @@ export class Visibe {
350
350
  return {
351
351
  span_id: opts.spanId,
352
352
  type: 'agent_start', // EXACT string — backend validates this
353
- timestamp: new Date().toISOString(),
353
+ timestamp: opts.timestamp ?? new Date().toISOString(),
354
354
  agent_name: opts.agentName,
355
355
  description: `Agent started: ${opts.agentName}`,
356
+ ...(opts.inputText !== undefined ? { input_text: opts.inputText } : {}),
357
+ ...(opts.outputText !== undefined ? { output_text: opts.outputText } : {}),
358
+ ...(opts.durationMs !== undefined ? { duration_ms: opts.durationMs } : {}),
356
359
  };
357
360
  }
358
361
  }
@@ -1,39 +1,72 @@
1
1
  import { randomUUID } from 'node:crypto';
2
2
  import { LangChainCallback, activeLangChainStorage, LANGGRAPH_INTERNAL_NODES, serializeInput } from './langchain.js';
3
- // ---------------------------------------------------------------------------
4
- // LangGraphCallback
5
- // Extends LangChainCallback and adds node-level agent_start spans.
6
- // ---------------------------------------------------------------------------
7
3
  export class LangGraphCallback extends LangChainCallback {
8
4
  constructor(options) {
9
5
  super(options);
6
+ this.pendingAgentSpans = new Map();
10
7
  this.nodeNames = new Set(options.nodeNames ?? []);
11
8
  // LangGraphCallback emits its own agent_start spans per node — suppress the
12
9
  // base-class root-chain emission to avoid a spurious 'LangGraph' agent span.
13
10
  this._emitRootAgentStart = false;
14
11
  }
15
- // Override handleChainStart to emit agent_start spans for LangGraph nodes.
12
+ // Buffer the span info on chain start span is sent on chain end so we
13
+ // can include both input_text and output_text in a single agent_start span.
16
14
  //
17
15
  // In LangGraph v1.2+, the node key is the 8th `name` parameter (not chain.name).
18
16
  // Internal system nodes (__start__, __end__, LangGraph) are filtered out.
19
17
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
20
18
  async handleChainStart(chain, inputs, runId, parentRunId, tags, metadata, runType, name) {
21
- // Always track the runId so child LLM calls can resolve parent_span_id.
19
+ // Always let the base class track runId → spanId FIRST so child LLM calls
20
+ // (which fire between chain start and chain end) resolve parent_span_id correctly.
22
21
  await super.handleChainStart(chain, inputs, runId, parentRunId, tags, metadata, runType, name);
23
- // The node key is passed as the 8th `name` parameter in LangGraph v1.2+.
24
- // Fall back to chain.name for older versions.
25
22
  const nodeName = name ?? chain?.name ?? '';
26
- if (nodeName && !LANGGRAPH_INTERNAL_NODES.has(nodeName)) {
27
- // Use the spanId already assigned by super for this runId.
28
- const spanId = this.runIdToSpanId.get(runId) ?? this.nextSpanId();
29
- this.runIdToSpanId.set(runId, spanId);
30
- // Register agent name so _findAgentName() resolves it for child llm_call / tool_call spans.
31
- this.runIdToAgentName.set(runId, nodeName);
32
- this.visibe.batcher.add(this.traceId, this.visibe.buildAgentStartSpan({
33
- spanId,
34
- agentName: nodeName,
35
- }));
36
- }
23
+ if (!nodeName || LANGGRAPH_INTERNAL_NODES.has(nodeName))
24
+ return;
25
+ // Ensure the spanId is assigned to this runId before any child spans are emitted.
26
+ const spanId = this.runIdToSpanId.get(runId) ?? this.nextSpanId();
27
+ this.runIdToSpanId.set(runId, spanId);
28
+ // Register agent name so _findAgentName() resolves it for child llm_call / tool_call spans.
29
+ this.runIdToAgentName.set(runId, nodeName);
30
+ // Buffer — will be sent with output_text when handleChainEnd fires.
31
+ this.pendingAgentSpans.set(runId, {
32
+ spanId,
33
+ agentName: nodeName,
34
+ startMs: Date.now(),
35
+ timestamp: new Date().toISOString(),
36
+ inputText: serializeInput(inputs, 1000),
37
+ });
38
+ }
39
+ // Flush the buffered agent_start span with both input and output text.
40
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
41
+ async handleChainEnd(outputs, runId) {
42
+ const pending = this.pendingAgentSpans.get(runId);
43
+ if (!pending)
44
+ return;
45
+ this.pendingAgentSpans.delete(runId);
46
+ this.visibe.batcher.add(this.traceId, this.visibe.buildAgentStartSpan({
47
+ spanId: pending.spanId,
48
+ agentName: pending.agentName,
49
+ timestamp: pending.timestamp,
50
+ inputText: pending.inputText,
51
+ outputText: serializeInput(outputs, 1000),
52
+ durationMs: Date.now() - pending.startMs,
53
+ }));
54
+ }
55
+ // Also flush on chain error so the span is never silently dropped.
56
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
57
+ async handleChainError(err, runId) {
58
+ const pending = this.pendingAgentSpans.get(runId);
59
+ if (!pending)
60
+ return;
61
+ this.pendingAgentSpans.delete(runId);
62
+ this.visibe.batcher.add(this.traceId, this.visibe.buildAgentStartSpan({
63
+ spanId: pending.spanId,
64
+ agentName: pending.agentName,
65
+ timestamp: pending.timestamp,
66
+ inputText: pending.inputText,
67
+ outputText: `Error: ${err?.message ?? String(err)}`,
68
+ durationMs: Date.now() - pending.startMs,
69
+ }));
37
70
  }
38
71
  }
39
72
  // ---------------------------------------------------------------------------
@@ -100,12 +133,16 @@ export function patchCompiledStateGraph(lgModule, visibe) {
100
133
  return result;
101
134
  };
102
135
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
103
- CompiledStateGraph.prototype.stream = async function* (input, config) {
136
+ CompiledStateGraph.prototype.stream = async function (input, config) {
104
137
  if (activeLangChainStorage.getStore() !== undefined) {
105
- // Pregel.stream() is a regular async function returning a Promise<AsyncIterable>.
106
- // We must await it before yield*.
107
- yield* (await originalStream.call(this, input, config));
108
- return;
138
+ // Pass-through: return the original stream directly with NO generator wrapping.
139
+ //
140
+ // Pregel.stream() is a regular async function returning a Promise<IterableReadableStreamWithAbortSignal>.
141
+ // Wrapping it in an async function* (yield*) was the root cause of ALS context loss
142
+ // for later graph nodes — the extra generator delegation layer breaks AsyncLocalStorage
143
+ // propagation through the eager .then()-pump inside IterableReadableStreamWithAbortSignal.
144
+ // Returning the Promise directly restores the exact original behavior.
145
+ return originalStream.call(this, input, config);
109
146
  }
110
147
  const traceId = randomUUID();
111
148
  const startedAt = new Date().toISOString();
@@ -126,31 +163,38 @@ export function patchCompiledStateGraph(lgModule, visibe) {
126
163
  agentName: graphName,
127
164
  nodeNames,
128
165
  });
166
+ // Get the inner stream inside the ALS context so createAndRunLoop() (which runs
167
+ // fire-and-forget inside Pregel.stream()) inherits ALS = cb for all graph nodes.
168
+ const innerStream = await activeLangChainStorage.run(cb, () => originalStream.call(this, input, _mergeCallbacks(config, cb)));
169
+ // Return an async generator that wraps the stream for completion tracking.
170
+ // This path is only reached when the user calls app.stream() directly —
171
+ // not via app.invoke() (which takes the pass-through path above).
172
+ // createAndRunLoop() already runs in ALS = cb context (established above),
173
+ // so the generator wrapping here does NOT affect node ALS propagation.
129
174
  let status = 'completed';
130
- try {
131
- // activeLangChainStorage.run returns the callback's return value synchronously,
132
- // which is a Promise<AsyncIterable> from Pregel.stream(). Await before yield*.
133
- const gen = await activeLangChainStorage.run(cb, () => originalStream.call(this, input, _mergeCallbacks(config, cb)));
134
- yield* gen;
135
- }
136
- catch (err) {
137
- status = 'failed';
138
- throw err;
139
- }
140
- finally {
141
- visibe.batcher.flush();
142
- await visibe.apiClient.completeTrace(traceId, {
143
- status,
144
- ended_at: new Date().toISOString(),
145
- duration_ms: Date.now() - startMs,
146
- llm_call_count: cb.llmCallCount,
147
- total_cost: cb.totalCost,
148
- total_tokens: cb.totalInputTokens + cb.totalOutputTokens,
149
- total_input_tokens: cb.totalInputTokens,
150
- total_output_tokens: cb.totalOutputTokens,
151
- ...(cb.firstModel ? { model: cb.firstModel } : {}),
152
- });
153
- }
175
+ return (async function* () {
176
+ try {
177
+ yield* innerStream;
178
+ }
179
+ catch (err) {
180
+ status = 'failed';
181
+ throw err;
182
+ }
183
+ finally {
184
+ visibe.batcher.flush();
185
+ await visibe.apiClient.completeTrace(traceId, {
186
+ status,
187
+ ended_at: new Date().toISOString(),
188
+ duration_ms: Date.now() - startMs,
189
+ llm_call_count: cb.llmCallCount,
190
+ total_cost: cb.totalCost,
191
+ total_tokens: cb.totalInputTokens + cb.totalOutputTokens,
192
+ total_input_tokens: cb.totalInputTokens,
193
+ total_output_tokens: cb.totalOutputTokens,
194
+ ...(cb.firstModel ? { model: cb.firstModel } : {}),
195
+ });
196
+ }
197
+ })();
154
198
  };
155
199
  return () => {
156
200
  CompiledStateGraph.prototype.invoke = originalInvoke;
@@ -48,5 +48,9 @@ export declare class Visibe {
48
48
  buildAgentStartSpan(opts: {
49
49
  spanId: string;
50
50
  agentName: string;
51
+ timestamp?: string;
52
+ inputText?: string;
53
+ outputText?: string;
54
+ durationMs?: number;
51
55
  }): object;
52
56
  }
@@ -2,6 +2,7 @@ import { LangChainCallback } from './langchain';
2
2
  import type { Visibe } from '../client';
3
3
  export declare class LangGraphCallback extends LangChainCallback {
4
4
  private readonly nodeNames;
5
+ private readonly pendingAgentSpans;
5
6
  constructor(options: {
6
7
  visibe: Visibe;
7
8
  traceId: string;
@@ -9,5 +10,7 @@ export declare class LangGraphCallback extends LangChainCallback {
9
10
  nodeNames?: string[];
10
11
  });
11
12
  handleChainStart(chain: any, inputs: any, runId: string, parentRunId?: string, tags?: string[], metadata?: any, runType?: string, name?: string): Promise<void>;
13
+ handleChainEnd(outputs: any, runId: string): Promise<void>;
14
+ handleChainError(err: any, runId: string): Promise<void>;
12
15
  }
13
16
  export declare function patchCompiledStateGraph(lgModule: any, visibe: Visibe): () => void;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@visibe.ai/node",
3
- "version": "0.1.27",
3
+ "version": "0.1.29",
4
4
  "description": "AI Agent Observability — Track OpenAI, LangChain, LangGraph, Bedrock, Vercel AI, Anthropic",
5
5
  "main": "dist/cjs/index.js",
6
6
  "module": "dist/esm/index.js",