@visibe.ai/node 0.1.4 → 0.1.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -110,8 +110,9 @@ class LangChainCallback {
110
110
  // Tool events
111
111
  // ---------------------------------------------------------------------------
112
112
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
113
- async handleToolStart(_tool, input, runId) {
114
- this.pendingToolCalls.set(runId, { startMs: Date.now(), inputText: input });
113
+ async handleToolStart(tool, input, runId) {
114
+ const toolName = tool?.name ?? tool?.id?.[tool.id.length - 1] ?? 'tool';
115
+ this.pendingToolCalls.set(runId, { startMs: Date.now(), inputText: input, toolName });
115
116
  }
116
117
  async handleToolEnd(output, runId, parentRunId) {
117
118
  const pending = this.pendingToolCalls.get(runId);
@@ -122,7 +123,7 @@ class LangChainCallback {
122
123
  const span = this.visibe.buildToolSpan({
123
124
  spanId,
124
125
  parentSpanId,
125
- toolName: 'tool',
126
+ toolName: pending?.toolName ?? 'tool',
126
127
  agentName: this.agentName,
127
128
  status: 'success',
128
129
  durationMs: pending ? Date.now() - pending.startMs : 0,
@@ -208,6 +209,7 @@ function patchRunnableSequence(lcModule, visibe) {
208
209
  status,
209
210
  ended_at: new Date().toISOString(),
210
211
  duration_ms: Date.now() - startMs,
212
+ llm_call_count: cb.llmCallCount,
211
213
  total_tokens: cb.totalInputTokens + cb.totalOutputTokens,
212
214
  total_input_tokens: cb.totalInputTokens,
213
215
  total_output_tokens: cb.totalOutputTokens,
@@ -218,7 +220,7 @@ function patchRunnableSequence(lcModule, visibe) {
218
220
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
219
221
  RunnableSequence.prototype.stream = async function* (input, config) {
220
222
  if (exports.activeLangChainStorage.getStore() !== undefined) {
221
- yield* originalStream.call(this, input, config);
223
+ yield* (await originalStream.call(this, input, config));
222
224
  return;
223
225
  }
224
226
  const traceId = (0, node_crypto_1.randomUUID)();
@@ -250,6 +252,7 @@ function patchRunnableSequence(lcModule, visibe) {
250
252
  status,
251
253
  ended_at: new Date().toISOString(),
252
254
  duration_ms: Date.now() - startMs,
255
+ llm_call_count: cb.llmCallCount,
253
256
  total_tokens: cb.totalInputTokens + cb.totalOutputTokens,
254
257
  total_input_tokens: cb.totalInputTokens,
255
258
  total_output_tokens: cb.totalOutputTokens,
@@ -86,6 +86,7 @@ function patchCompiledStateGraph(lgModule, visibe) {
86
86
  status,
87
87
  ended_at: new Date().toISOString(),
88
88
  duration_ms: Date.now() - startMs,
89
+ llm_call_count: cb.llmCallCount,
89
90
  total_tokens: cb.totalInputTokens + cb.totalOutputTokens,
90
91
  total_input_tokens: cb.totalInputTokens,
91
92
  total_output_tokens: cb.totalOutputTokens,
@@ -136,6 +137,7 @@ function patchCompiledStateGraph(lgModule, visibe) {
136
137
  status,
137
138
  ended_at: new Date().toISOString(),
138
139
  duration_ms: Date.now() - startMs,
140
+ llm_call_count: cb.llmCallCount,
139
141
  total_tokens: cb.totalInputTokens + cb.totalOutputTokens,
140
142
  total_input_tokens: cb.totalInputTokens,
141
143
  total_output_tokens: cb.totalOutputTokens,
@@ -49,7 +49,7 @@ async function sendLLMTrace(visibe, agentName, model, provider, inputTokens, out
49
49
  const cost = (0, utils_1.calculateCost)(model, inputTokens, outputTokens);
50
50
  // Build span manually so we can override the provider from Vercel's metadata.
51
51
  const span = {
52
- span_id: `step_1`,
52
+ span_id: (0, node_crypto_1.randomUUID)(),
53
53
  type: 'llm_call',
54
54
  timestamp: new Date().toISOString(),
55
55
  agent_name: agentName,
@@ -106,8 +106,9 @@ export class LangChainCallback {
106
106
  // Tool events
107
107
  // ---------------------------------------------------------------------------
108
108
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
109
- async handleToolStart(_tool, input, runId) {
110
- this.pendingToolCalls.set(runId, { startMs: Date.now(), inputText: input });
109
+ async handleToolStart(tool, input, runId) {
110
+ const toolName = tool?.name ?? tool?.id?.[tool.id.length - 1] ?? 'tool';
111
+ this.pendingToolCalls.set(runId, { startMs: Date.now(), inputText: input, toolName });
111
112
  }
112
113
  async handleToolEnd(output, runId, parentRunId) {
113
114
  const pending = this.pendingToolCalls.get(runId);
@@ -118,7 +119,7 @@ export class LangChainCallback {
118
119
  const span = this.visibe.buildToolSpan({
119
120
  spanId,
120
121
  parentSpanId,
121
- toolName: 'tool',
122
+ toolName: pending?.toolName ?? 'tool',
122
123
  agentName: this.agentName,
123
124
  status: 'success',
124
125
  durationMs: pending ? Date.now() - pending.startMs : 0,
@@ -203,6 +204,7 @@ export function patchRunnableSequence(lcModule, visibe) {
203
204
  status,
204
205
  ended_at: new Date().toISOString(),
205
206
  duration_ms: Date.now() - startMs,
207
+ llm_call_count: cb.llmCallCount,
206
208
  total_tokens: cb.totalInputTokens + cb.totalOutputTokens,
207
209
  total_input_tokens: cb.totalInputTokens,
208
210
  total_output_tokens: cb.totalOutputTokens,
@@ -213,7 +215,7 @@ export function patchRunnableSequence(lcModule, visibe) {
213
215
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
214
216
  RunnableSequence.prototype.stream = async function* (input, config) {
215
217
  if (activeLangChainStorage.getStore() !== undefined) {
216
- yield* originalStream.call(this, input, config);
218
+ yield* (await originalStream.call(this, input, config));
217
219
  return;
218
220
  }
219
221
  const traceId = randomUUID();
@@ -245,6 +247,7 @@ export function patchRunnableSequence(lcModule, visibe) {
245
247
  status,
246
248
  ended_at: new Date().toISOString(),
247
249
  duration_ms: Date.now() - startMs,
250
+ llm_call_count: cb.llmCallCount,
248
251
  total_tokens: cb.totalInputTokens + cb.totalOutputTokens,
249
252
  total_input_tokens: cb.totalInputTokens,
250
253
  total_output_tokens: cb.totalOutputTokens,
@@ -81,6 +81,7 @@ export function patchCompiledStateGraph(lgModule, visibe) {
81
81
  status,
82
82
  ended_at: new Date().toISOString(),
83
83
  duration_ms: Date.now() - startMs,
84
+ llm_call_count: cb.llmCallCount,
84
85
  total_tokens: cb.totalInputTokens + cb.totalOutputTokens,
85
86
  total_input_tokens: cb.totalInputTokens,
86
87
  total_output_tokens: cb.totalOutputTokens,
@@ -131,6 +132,7 @@ export function patchCompiledStateGraph(lgModule, visibe) {
131
132
  status,
132
133
  ended_at: new Date().toISOString(),
133
134
  duration_ms: Date.now() - startMs,
135
+ llm_call_count: cb.llmCallCount,
134
136
  total_tokens: cb.totalInputTokens + cb.totalOutputTokens,
135
137
  total_input_tokens: cb.totalInputTokens,
136
138
  total_output_tokens: cb.totalOutputTokens,
@@ -46,7 +46,7 @@ async function sendLLMTrace(visibe, agentName, model, provider, inputTokens, out
46
46
  const cost = calculateCost(model, inputTokens, outputTokens);
47
47
  // Build span manually so we can override the provider from Vercel's metadata.
48
48
  const span = {
49
- span_id: `step_1`,
49
+ span_id: randomUUID(),
50
50
  type: 'llm_call',
51
51
  timestamp: new Date().toISOString(),
52
52
  agent_name: agentName,
@@ -13,6 +13,7 @@ export declare class LangChainCallback {
13
13
  protected pendingToolCalls: Map<string, {
14
14
  startMs: number;
15
15
  inputText: string;
16
+ toolName?: string;
16
17
  }>;
17
18
  protected stepCounter: number;
18
19
  protected nextSpanId(): string;
@@ -28,7 +29,7 @@ export declare class LangChainCallback {
28
29
  handleLLMStart(_llm: any, _messages: any[], runId: string): Promise<void>;
29
30
  handleLLMEnd(output: any, runId: string, parentRunId?: string): Promise<void>;
30
31
  handleLLMError(err: Error, runId: string): Promise<void>;
31
- handleToolStart(_tool: any, input: string, runId: string): Promise<void>;
32
+ handleToolStart(tool: any, input: string, runId: string): Promise<void>;
32
33
  handleToolEnd(output: string, runId: string, parentRunId?: string): Promise<void>;
33
34
  handleToolError(err: Error, runId: string): Promise<void>;
34
35
  handleChainStart(chain: any, _inputs: any, runId: string, parentRunId?: string): Promise<void>;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@visibe.ai/node",
3
- "version": "0.1.4",
3
+ "version": "0.1.7",
4
4
  "description": "AI Agent Observability — Track OpenAI, LangChain, LangGraph, Bedrock, Vercel AI, Anthropic",
5
5
  "main": "dist/cjs/index.js",
6
6
  "module": "dist/esm/index.js",