@visibe.ai/node 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +330 -0
- package/dist/cjs/api.js +92 -0
- package/dist/cjs/client.js +242 -0
- package/dist/cjs/index.js +216 -0
- package/dist/cjs/integrations/anthropic.js +277 -0
- package/dist/cjs/integrations/base.js +32 -0
- package/dist/cjs/integrations/bedrock.js +442 -0
- package/dist/cjs/integrations/group-context.js +10 -0
- package/dist/cjs/integrations/langchain.js +274 -0
- package/dist/cjs/integrations/langgraph.js +173 -0
- package/dist/cjs/integrations/openai.js +447 -0
- package/dist/cjs/integrations/vercel-ai.js +261 -0
- package/dist/cjs/types/index.js +5 -0
- package/dist/cjs/utils.js +122 -0
- package/dist/esm/api.js +87 -0
- package/dist/esm/client.js +238 -0
- package/dist/esm/index.js +209 -0
- package/dist/esm/integrations/anthropic.js +272 -0
- package/dist/esm/integrations/base.js +28 -0
- package/dist/esm/integrations/bedrock.js +438 -0
- package/dist/esm/integrations/group-context.js +7 -0
- package/dist/esm/integrations/langchain.js +269 -0
- package/dist/esm/integrations/langgraph.js +168 -0
- package/dist/esm/integrations/openai.js +442 -0
- package/dist/esm/integrations/vercel-ai.js +258 -0
- package/dist/esm/types/index.js +4 -0
- package/dist/esm/utils.js +116 -0
- package/dist/types/api.d.ts +27 -0
- package/dist/types/client.d.ts +50 -0
- package/dist/types/index.d.ts +7 -0
- package/dist/types/integrations/anthropic.d.ts +9 -0
- package/dist/types/integrations/base.d.ts +17 -0
- package/dist/types/integrations/bedrock.d.ts +11 -0
- package/dist/types/integrations/group-context.d.ts +12 -0
- package/dist/types/integrations/langchain.d.ts +40 -0
- package/dist/types/integrations/langgraph.d.ts +13 -0
- package/dist/types/integrations/openai.d.ts +11 -0
- package/dist/types/integrations/vercel-ai.d.ts +2 -0
- package/dist/types/types/index.d.ts +21 -0
- package/dist/types/utils.d.ts +23 -0
- package/package.json +80 -0
|
@@ -0,0 +1,447 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.OpenAIIntegration = exports.activeLangChainStorage = void 0;
|
|
4
|
+
exports.patchOpenAIClient = patchOpenAIClient;
|
|
5
|
+
const node_async_hooks_1 = require("node:async_hooks");
|
|
6
|
+
const node_crypto_1 = require("node:crypto");
|
|
7
|
+
const base_1 = require("./base");
|
|
8
|
+
const group_context_1 = require("./group-context");
|
|
9
|
+
const utils_1 = require("../utils");
|
|
10
|
+
// ---------------------------------------------------------------------------
|
|
11
|
+
// AsyncLocalStorage — used to prevent double-tracing when LangChain/LangGraph
|
|
12
|
+
// is the outer trace owner. The OpenAI wrapper checks this store at the start
|
|
13
|
+
// of every intercepted call; if it is set, the call is passed through as-is.
|
|
14
|
+
// ---------------------------------------------------------------------------
|
|
15
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
16
|
+
exports.activeLangChainStorage = new node_async_hooks_1.AsyncLocalStorage();
|
|
17
|
+
// ---------------------------------------------------------------------------
|
|
18
|
+
// Helpers — extract text from OpenAI message arrays
|
|
19
|
+
// ---------------------------------------------------------------------------
|
|
20
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
21
|
+
function extractLastUserMessage(messages) {
|
|
22
|
+
if (!Array.isArray(messages))
|
|
23
|
+
return '';
|
|
24
|
+
// Walk backwards to find the last user-role message
|
|
25
|
+
for (let i = messages.length - 1; i >= 0; i--) {
|
|
26
|
+
const msg = messages[i];
|
|
27
|
+
if (msg?.role === 'user') {
|
|
28
|
+
if (typeof msg.content === 'string')
|
|
29
|
+
return msg.content;
|
|
30
|
+
// Content can be an array of content parts (vision / multi-modal)
|
|
31
|
+
if (Array.isArray(msg.content)) {
|
|
32
|
+
const textPart = msg.content.find((p) => p.type === 'text');
|
|
33
|
+
return textPart?.text ?? '';
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
return '';
|
|
38
|
+
}
|
|
39
|
+
// When the model makes a tool call the message content is null.
|
|
40
|
+
// Format all tool calls as "toolName(args); toolName(args)".
|
|
41
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
42
|
+
function formatToolCalls(toolCalls) {
|
|
43
|
+
if (!Array.isArray(toolCalls) || toolCalls.length === 0)
|
|
44
|
+
return '';
|
|
45
|
+
return toolCalls
|
|
46
|
+
.map((tc) => `${tc.function?.name ?? 'unknown'}(${tc.function?.arguments ?? ''})`)
|
|
47
|
+
.join('; ');
|
|
48
|
+
}
|
|
49
|
+
// Extract plain text from a Responses API input value (string or message array).
|
|
50
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
51
|
+
function extractResponsesInputText(input) {
|
|
52
|
+
if (typeof input === 'string')
|
|
53
|
+
return input;
|
|
54
|
+
if (Array.isArray(input)) {
|
|
55
|
+
for (let i = input.length - 1; i >= 0; i--) {
|
|
56
|
+
const msg = input[i];
|
|
57
|
+
if (msg?.role === 'user') {
|
|
58
|
+
if (typeof msg.content === 'string')
|
|
59
|
+
return msg.content;
|
|
60
|
+
if (Array.isArray(msg.content)) {
|
|
61
|
+
const part = msg.content.find((p) => p.type === 'input_text' || p.type === 'text');
|
|
62
|
+
return part?.text ?? '';
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
return '';
|
|
68
|
+
}
|
|
69
|
+
// ---------------------------------------------------------------------------
|
|
70
|
+
// OpenAIIntegration class
|
|
71
|
+
// ---------------------------------------------------------------------------
|
|
72
|
+
class OpenAIIntegration extends base_1.BaseIntegration {
|
|
73
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
74
|
+
patchClient(client, agentName) {
|
|
75
|
+
const originalCreate = client.chat.completions.create.bind(client.chat.completions);
|
|
76
|
+
const originalResponsesCreate = client.responses?.create?.bind(client.responses);
|
|
77
|
+
// --- chat.completions.create ---
|
|
78
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
79
|
+
client.chat.completions.create = async (params, options) => {
|
|
80
|
+
// If a LangChain/LangGraph trace is active, pass through without wrapping.
|
|
81
|
+
if (exports.activeLangChainStorage.getStore() !== undefined) {
|
|
82
|
+
return originalCreate(params, options);
|
|
83
|
+
}
|
|
84
|
+
if (params.stream) {
|
|
85
|
+
return this._wrapStream(originalCreate, params, options, agentName);
|
|
86
|
+
}
|
|
87
|
+
return this._wrapCreate(originalCreate, params, options, agentName);
|
|
88
|
+
};
|
|
89
|
+
// --- responses.create (Responses API) ---
|
|
90
|
+
if (originalResponsesCreate) {
|
|
91
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
92
|
+
client.responses.create = async (params, options) => {
|
|
93
|
+
if (exports.activeLangChainStorage.getStore() !== undefined) {
|
|
94
|
+
return originalResponsesCreate(params, options);
|
|
95
|
+
}
|
|
96
|
+
return this._wrapResponsesCreate(originalResponsesCreate, params, options, agentName);
|
|
97
|
+
};
|
|
98
|
+
}
|
|
99
|
+
// Return restore function
|
|
100
|
+
return () => {
|
|
101
|
+
client.chat.completions.create = originalCreate;
|
|
102
|
+
if (originalResponsesCreate) {
|
|
103
|
+
client.responses.create = originalResponsesCreate;
|
|
104
|
+
}
|
|
105
|
+
};
|
|
106
|
+
}
|
|
107
|
+
// ---------------------------------------------------------------------------
|
|
108
|
+
// chat.completions.create — non-streaming
|
|
109
|
+
// ---------------------------------------------------------------------------
|
|
110
|
+
async _wrapCreate(
|
|
111
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
112
|
+
original,
|
|
113
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
114
|
+
params,
|
|
115
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
116
|
+
options, agentName) {
|
|
117
|
+
const groupCtx = group_context_1.activeGroupTraceStorage.getStore();
|
|
118
|
+
const traceId = groupCtx?.traceId ?? (0, node_crypto_1.randomUUID)();
|
|
119
|
+
const startedAt = new Date().toISOString();
|
|
120
|
+
const startMs = Date.now();
|
|
121
|
+
if (!groupCtx) {
|
|
122
|
+
await this.visibe.apiClient.createTrace({
|
|
123
|
+
trace_id: traceId,
|
|
124
|
+
name: agentName,
|
|
125
|
+
framework: 'openai',
|
|
126
|
+
started_at: startedAt,
|
|
127
|
+
...(this.visibe.sessionId ? { session_id: this.visibe.sessionId } : {}),
|
|
128
|
+
});
|
|
129
|
+
}
|
|
130
|
+
const spanId = this.nextSpanId();
|
|
131
|
+
let response;
|
|
132
|
+
let spanStatus = 'success';
|
|
133
|
+
try {
|
|
134
|
+
response = await original(params, options);
|
|
135
|
+
}
|
|
136
|
+
catch (err) {
|
|
137
|
+
spanStatus = 'failed';
|
|
138
|
+
const errorSpan = this.visibe.buildErrorSpan({
|
|
139
|
+
spanId: this.nextSpanId(),
|
|
140
|
+
errorType: err?.constructor?.name ?? 'Error',
|
|
141
|
+
errorMessage: err?.message ?? String(err),
|
|
142
|
+
});
|
|
143
|
+
this.visibe.batcher.add(traceId, errorSpan);
|
|
144
|
+
if (!groupCtx) {
|
|
145
|
+
this.visibe.batcher.flush();
|
|
146
|
+
await this.visibe.apiClient.completeTrace(traceId, {
|
|
147
|
+
status: 'failed',
|
|
148
|
+
ended_at: new Date().toISOString(),
|
|
149
|
+
duration_ms: Date.now() - startMs,
|
|
150
|
+
});
|
|
151
|
+
}
|
|
152
|
+
throw err;
|
|
153
|
+
}
|
|
154
|
+
const model = response.model ?? params.model ?? 'unknown';
|
|
155
|
+
const inputTokens = response.usage?.prompt_tokens ?? 0;
|
|
156
|
+
const outputTokens = response.usage?.completion_tokens ?? 0;
|
|
157
|
+
const cost = (0, utils_1.calculateCost)(model, inputTokens, outputTokens);
|
|
158
|
+
const choice = response.choices?.[0];
|
|
159
|
+
const rawContent = choice?.message?.content;
|
|
160
|
+
const toolCalls = choice?.message?.tool_calls ?? [];
|
|
161
|
+
const outputText = rawContent ?? formatToolCalls(toolCalls);
|
|
162
|
+
const inputText = extractLastUserMessage(params.messages ?? []);
|
|
163
|
+
const llmSpan = this.visibe.buildLLMSpan({
|
|
164
|
+
spanId,
|
|
165
|
+
agentName,
|
|
166
|
+
model,
|
|
167
|
+
status: spanStatus,
|
|
168
|
+
inputTokens,
|
|
169
|
+
outputTokens,
|
|
170
|
+
inputText,
|
|
171
|
+
outputText,
|
|
172
|
+
durationMs: Date.now() - startMs,
|
|
173
|
+
});
|
|
174
|
+
this.visibe.batcher.add(traceId, llmSpan);
|
|
175
|
+
// Notify the group tracker (if inside track()) about this LLM span.
|
|
176
|
+
groupCtx?.onLLMSpan(inputTokens, outputTokens, cost);
|
|
177
|
+
if (!groupCtx) {
|
|
178
|
+
this.visibe.batcher.flush();
|
|
179
|
+
const sent = await this.visibe.apiClient.completeTrace(traceId, {
|
|
180
|
+
status: 'completed',
|
|
181
|
+
ended_at: new Date().toISOString(),
|
|
182
|
+
duration_ms: Date.now() - startMs,
|
|
183
|
+
llm_call_count: 1,
|
|
184
|
+
prompt: inputText,
|
|
185
|
+
model,
|
|
186
|
+
total_cost: cost,
|
|
187
|
+
total_tokens: inputTokens + outputTokens,
|
|
188
|
+
total_input_tokens: inputTokens,
|
|
189
|
+
total_output_tokens: outputTokens,
|
|
190
|
+
});
|
|
191
|
+
_printSummary(agentName, model, inputTokens, outputTokens, cost, Date.now() - startMs, sent);
|
|
192
|
+
}
|
|
193
|
+
return response;
|
|
194
|
+
}
|
|
195
|
+
// ---------------------------------------------------------------------------
|
|
196
|
+
// chat.completions.create — streaming (stream: true)
|
|
197
|
+
// ---------------------------------------------------------------------------
|
|
198
|
+
async _wrapStream(
|
|
199
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
200
|
+
original,
|
|
201
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
202
|
+
params,
|
|
203
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
204
|
+
options, agentName) {
|
|
205
|
+
const groupCtx = group_context_1.activeGroupTraceStorage.getStore();
|
|
206
|
+
const traceId = groupCtx?.traceId ?? (0, node_crypto_1.randomUUID)();
|
|
207
|
+
const startedAt = new Date().toISOString();
|
|
208
|
+
const startMs = Date.now();
|
|
209
|
+
if (!groupCtx) {
|
|
210
|
+
await this.visibe.apiClient.createTrace({
|
|
211
|
+
trace_id: traceId,
|
|
212
|
+
name: agentName,
|
|
213
|
+
framework: 'openai',
|
|
214
|
+
started_at: startedAt,
|
|
215
|
+
...(this.visibe.sessionId ? { session_id: this.visibe.sessionId } : {}),
|
|
216
|
+
});
|
|
217
|
+
}
|
|
218
|
+
// Inject stream_options so the final chunk includes usage data.
|
|
219
|
+
const augmentedParams = {
|
|
220
|
+
...params,
|
|
221
|
+
stream_options: { ...params.stream_options, include_usage: true },
|
|
222
|
+
};
|
|
223
|
+
const spanId = this.nextSpanId();
|
|
224
|
+
let outputText = '';
|
|
225
|
+
let inputTokens = 0, outputTokens = 0;
|
|
226
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
227
|
+
let model = params.model ?? 'unknown';
|
|
228
|
+
const originalStream = await original(augmentedParams, options);
|
|
229
|
+
// Wrap the async iterator to accumulate chunks, then finalize on exhaustion.
|
|
230
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
231
|
+
const wrappedStream = {
|
|
232
|
+
...originalStream,
|
|
233
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
234
|
+
[Symbol.asyncIterator]() {
|
|
235
|
+
const iter = originalStream[Symbol.asyncIterator]();
|
|
236
|
+
return {
|
|
237
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
238
|
+
async next() {
|
|
239
|
+
const result = await iter.next();
|
|
240
|
+
if (!result.done) {
|
|
241
|
+
const chunk = result.value;
|
|
242
|
+
model = chunk.model ?? model;
|
|
243
|
+
const delta = chunk.choices?.[0]?.delta?.content;
|
|
244
|
+
if (delta)
|
|
245
|
+
outputText += delta;
|
|
246
|
+
// Last chunk carries usage when stream_options.include_usage is set
|
|
247
|
+
if (chunk.usage) {
|
|
248
|
+
inputTokens = chunk.usage.prompt_tokens ?? 0;
|
|
249
|
+
outputTokens = chunk.usage.completion_tokens ?? 0;
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
return result;
|
|
253
|
+
},
|
|
254
|
+
};
|
|
255
|
+
},
|
|
256
|
+
};
|
|
257
|
+
// After the caller consumes the stream, finalize the span.
|
|
258
|
+
// We do this by overriding the stream's return/throw so finalization
|
|
259
|
+
// runs both on normal exhaustion and on early break.
|
|
260
|
+
const finalize = async (streamStatus) => {
|
|
261
|
+
const cost = (0, utils_1.calculateCost)(model, inputTokens, outputTokens);
|
|
262
|
+
const inputText = extractLastUserMessage(params.messages ?? []);
|
|
263
|
+
const llmSpan = this.visibe.buildLLMSpan({
|
|
264
|
+
spanId,
|
|
265
|
+
agentName,
|
|
266
|
+
model,
|
|
267
|
+
status: streamStatus,
|
|
268
|
+
inputTokens,
|
|
269
|
+
outputTokens,
|
|
270
|
+
inputText,
|
|
271
|
+
outputText,
|
|
272
|
+
durationMs: Date.now() - startMs,
|
|
273
|
+
});
|
|
274
|
+
this.visibe.batcher.add(traceId, llmSpan);
|
|
275
|
+
// Notify the group tracker (if inside track()) about this LLM span.
|
|
276
|
+
groupCtx?.onLLMSpan(inputTokens, outputTokens, cost);
|
|
277
|
+
if (!groupCtx) {
|
|
278
|
+
this.visibe.batcher.flush();
|
|
279
|
+
const sent = await this.visibe.apiClient.completeTrace(traceId, {
|
|
280
|
+
status: streamStatus === 'success' ? 'completed' : 'failed',
|
|
281
|
+
ended_at: new Date().toISOString(),
|
|
282
|
+
duration_ms: Date.now() - startMs,
|
|
283
|
+
llm_call_count: 1,
|
|
284
|
+
prompt: inputText,
|
|
285
|
+
model,
|
|
286
|
+
total_cost: cost,
|
|
287
|
+
total_tokens: inputTokens + outputTokens,
|
|
288
|
+
total_input_tokens: inputTokens,
|
|
289
|
+
total_output_tokens: outputTokens,
|
|
290
|
+
});
|
|
291
|
+
_printSummary(agentName, model, inputTokens, outputTokens, cost, Date.now() - startMs, sent);
|
|
292
|
+
}
|
|
293
|
+
};
|
|
294
|
+
// Attach finalize to the iterator methods the OpenAI SDK exposes.
|
|
295
|
+
const iter = wrappedStream[Symbol.asyncIterator].bind(wrappedStream);
|
|
296
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
297
|
+
wrappedStream[Symbol.asyncIterator] = () => {
|
|
298
|
+
const it = iter();
|
|
299
|
+
let finalized = false;
|
|
300
|
+
const doFinalize = async (status) => {
|
|
301
|
+
if (!finalized) {
|
|
302
|
+
finalized = true;
|
|
303
|
+
await finalize(status);
|
|
304
|
+
}
|
|
305
|
+
};
|
|
306
|
+
return {
|
|
307
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
308
|
+
async next() {
|
|
309
|
+
try {
|
|
310
|
+
const result = await it.next();
|
|
311
|
+
if (result.done)
|
|
312
|
+
await doFinalize('success');
|
|
313
|
+
return result;
|
|
314
|
+
}
|
|
315
|
+
catch (err) {
|
|
316
|
+
await doFinalize('failed');
|
|
317
|
+
throw err;
|
|
318
|
+
}
|
|
319
|
+
},
|
|
320
|
+
async return(value) {
|
|
321
|
+
await doFinalize('success');
|
|
322
|
+
return it.return ? it.return(value) : { done: true, value };
|
|
323
|
+
},
|
|
324
|
+
async throw(err) {
|
|
325
|
+
await doFinalize('failed');
|
|
326
|
+
return it.throw ? it.throw(err) : Promise.reject(err);
|
|
327
|
+
},
|
|
328
|
+
};
|
|
329
|
+
};
|
|
330
|
+
return wrappedStream;
|
|
331
|
+
}
|
|
332
|
+
// ---------------------------------------------------------------------------
|
|
333
|
+
// Responses API — client.responses.create
|
|
334
|
+
// ---------------------------------------------------------------------------
|
|
335
|
+
async _wrapResponsesCreate(
|
|
336
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
337
|
+
original,
|
|
338
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
339
|
+
params,
|
|
340
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
341
|
+
options, agentName) {
|
|
342
|
+
const groupCtx = group_context_1.activeGroupTraceStorage.getStore();
|
|
343
|
+
const traceId = groupCtx?.traceId ?? (0, node_crypto_1.randomUUID)();
|
|
344
|
+
const startedAt = new Date().toISOString();
|
|
345
|
+
const startMs = Date.now();
|
|
346
|
+
if (!groupCtx) {
|
|
347
|
+
await this.visibe.apiClient.createTrace({
|
|
348
|
+
trace_id: traceId,
|
|
349
|
+
name: agentName,
|
|
350
|
+
framework: 'openai',
|
|
351
|
+
started_at: startedAt,
|
|
352
|
+
...(this.visibe.sessionId ? { session_id: this.visibe.sessionId } : {}),
|
|
353
|
+
});
|
|
354
|
+
}
|
|
355
|
+
const spanId = this.nextSpanId();
|
|
356
|
+
let response;
|
|
357
|
+
let spanStatus = 'success';
|
|
358
|
+
try {
|
|
359
|
+
response = await original(params, options);
|
|
360
|
+
}
|
|
361
|
+
catch (err) {
|
|
362
|
+
spanStatus = 'failed';
|
|
363
|
+
this.visibe.batcher.add(traceId, this.visibe.buildErrorSpan({
|
|
364
|
+
spanId: this.nextSpanId(),
|
|
365
|
+
errorType: err?.constructor?.name ?? 'Error',
|
|
366
|
+
errorMessage: err?.message ?? String(err),
|
|
367
|
+
}));
|
|
368
|
+
if (!groupCtx) {
|
|
369
|
+
this.visibe.batcher.flush();
|
|
370
|
+
await this.visibe.apiClient.completeTrace(traceId, {
|
|
371
|
+
status: 'failed', ended_at: new Date().toISOString(), duration_ms: Date.now() - startMs,
|
|
372
|
+
});
|
|
373
|
+
}
|
|
374
|
+
throw err;
|
|
375
|
+
}
|
|
376
|
+
const model = response.model ?? params.model ?? 'unknown';
|
|
377
|
+
const inputTokens = response.usage?.input_tokens ?? 0;
|
|
378
|
+
const outputTokens = response.usage?.output_tokens ?? 0;
|
|
379
|
+
const cost = (0, utils_1.calculateCost)(model, inputTokens, outputTokens);
|
|
380
|
+
// Extract output text from message items; format function/web-search calls.
|
|
381
|
+
const outputItems = response.output ?? [];
|
|
382
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
383
|
+
const textParts = outputItems
|
|
384
|
+
.filter((item) => item.type === 'message')
|
|
385
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
386
|
+
.flatMap((item) => item.content ?? [])
|
|
387
|
+
.filter((c) => c.type === 'output_text' || c.type === 'text')
|
|
388
|
+
.map((c) => c.text ?? '');
|
|
389
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
390
|
+
const toolItems = outputItems.filter((item) => item.type === 'function_call' || item.type === 'web_search_call');
|
|
391
|
+
const toolText = toolItems
|
|
392
|
+
.map((item) => `${item.name ?? item.type}(${JSON.stringify(item.arguments ?? {})})`)
|
|
393
|
+
.join('; ');
|
|
394
|
+
const outputText = textParts.join('') || toolText;
|
|
395
|
+
const inputText = extractResponsesInputText(params.input);
|
|
396
|
+
this.visibe.batcher.add(traceId, this.visibe.buildLLMSpan({
|
|
397
|
+
spanId,
|
|
398
|
+
agentName,
|
|
399
|
+
model,
|
|
400
|
+
status: spanStatus,
|
|
401
|
+
inputTokens,
|
|
402
|
+
outputTokens,
|
|
403
|
+
inputText,
|
|
404
|
+
outputText,
|
|
405
|
+
durationMs: Date.now() - startMs,
|
|
406
|
+
}));
|
|
407
|
+
// Notify the group tracker (if inside track()) about this LLM span.
|
|
408
|
+
groupCtx?.onLLMSpan(inputTokens, outputTokens, cost);
|
|
409
|
+
if (!groupCtx) {
|
|
410
|
+
this.visibe.batcher.flush();
|
|
411
|
+
const sent = await this.visibe.apiClient.completeTrace(traceId, {
|
|
412
|
+
status: 'completed',
|
|
413
|
+
ended_at: new Date().toISOString(),
|
|
414
|
+
duration_ms: Date.now() - startMs,
|
|
415
|
+
llm_call_count: 1,
|
|
416
|
+
prompt: inputText,
|
|
417
|
+
model,
|
|
418
|
+
total_cost: cost,
|
|
419
|
+
total_tokens: inputTokens + outputTokens,
|
|
420
|
+
total_input_tokens: inputTokens,
|
|
421
|
+
total_output_tokens: outputTokens,
|
|
422
|
+
});
|
|
423
|
+
_printSummary(agentName, model, inputTokens, outputTokens, cost, Date.now() - startMs, sent);
|
|
424
|
+
}
|
|
425
|
+
return response;
|
|
426
|
+
}
|
|
427
|
+
}
|
|
428
|
+
exports.OpenAIIntegration = OpenAIIntegration;
|
|
429
|
+
// ---------------------------------------------------------------------------
|
|
430
|
+
// Module-level factory — called by client.ts applyIntegration()
|
|
431
|
+
// ---------------------------------------------------------------------------
|
|
432
|
+
function patchOpenAIClient(
|
|
433
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
434
|
+
client, agentName, visibe) {
|
|
435
|
+
const integration = new OpenAIIntegration(visibe);
|
|
436
|
+
return integration.patchClient(client, agentName);
|
|
437
|
+
}
|
|
438
|
+
// ---------------------------------------------------------------------------
|
|
439
|
+
// Private helpers
|
|
440
|
+
// ---------------------------------------------------------------------------
|
|
441
|
+
function _printSummary(name, model, inputTokens, outputTokens, cost, durationMs, sent) {
|
|
442
|
+
const durationSec = (durationMs / 1000).toFixed(1);
|
|
443
|
+
const tokens = (inputTokens + outputTokens).toLocaleString();
|
|
444
|
+
const costStr = `$${cost.toFixed(6)}`;
|
|
445
|
+
const sentStr = sent ? 'OK' : 'FAILED';
|
|
446
|
+
console.log(`[Visibe] Trace: ${name} | 1 LLM calls | ${tokens} tokens | ${costStr} | ${durationSec}s | 0 tool calls | status: completed | model: ${model} | sent: ${sentStr}`);
|
|
447
|
+
}
|