@visibe.ai/node 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +330 -0
- package/dist/cjs/api.js +92 -0
- package/dist/cjs/client.js +242 -0
- package/dist/cjs/index.js +216 -0
- package/dist/cjs/integrations/anthropic.js +277 -0
- package/dist/cjs/integrations/base.js +32 -0
- package/dist/cjs/integrations/bedrock.js +442 -0
- package/dist/cjs/integrations/group-context.js +10 -0
- package/dist/cjs/integrations/langchain.js +274 -0
- package/dist/cjs/integrations/langgraph.js +173 -0
- package/dist/cjs/integrations/openai.js +447 -0
- package/dist/cjs/integrations/vercel-ai.js +261 -0
- package/dist/cjs/types/index.js +5 -0
- package/dist/cjs/utils.js +122 -0
- package/dist/esm/api.js +87 -0
- package/dist/esm/client.js +238 -0
- package/dist/esm/index.js +209 -0
- package/dist/esm/integrations/anthropic.js +272 -0
- package/dist/esm/integrations/base.js +28 -0
- package/dist/esm/integrations/bedrock.js +438 -0
- package/dist/esm/integrations/group-context.js +7 -0
- package/dist/esm/integrations/langchain.js +269 -0
- package/dist/esm/integrations/langgraph.js +168 -0
- package/dist/esm/integrations/openai.js +442 -0
- package/dist/esm/integrations/vercel-ai.js +258 -0
- package/dist/esm/types/index.js +4 -0
- package/dist/esm/utils.js +116 -0
- package/dist/types/api.d.ts +27 -0
- package/dist/types/client.d.ts +50 -0
- package/dist/types/index.d.ts +7 -0
- package/dist/types/integrations/anthropic.d.ts +9 -0
- package/dist/types/integrations/base.d.ts +17 -0
- package/dist/types/integrations/bedrock.d.ts +11 -0
- package/dist/types/integrations/group-context.d.ts +12 -0
- package/dist/types/integrations/langchain.d.ts +40 -0
- package/dist/types/integrations/langgraph.d.ts +13 -0
- package/dist/types/integrations/openai.d.ts +11 -0
- package/dist/types/integrations/vercel-ai.d.ts +2 -0
- package/dist/types/types/index.d.ts +21 -0
- package/dist/types/utils.d.ts +23 -0
- package/package.json +80 -0
|
@@ -0,0 +1,238 @@
|
|
|
1
|
+
import { randomUUID } from 'node:crypto';
|
|
2
|
+
import { APIClient, SpanBatcher } from './api';
|
|
3
|
+
import { activeGroupTraceStorage } from './integrations/group-context';
|
|
4
|
+
import { calculateCost, detectProvider, truncate, LLM_CONTENT_LIMIT } from './utils';
|
|
5
|
+
export class Visibe {
|
|
6
|
+
constructor(options) {
|
|
7
|
+
// Map of instrumented clients to their cleanup functions.
|
|
8
|
+
this._instrumented = new Map();
|
|
9
|
+
const apiKey = options.apiKey ?? process.env['VISIBE_API_KEY'];
|
|
10
|
+
const apiUrl = options.apiUrl ?? process.env['VISIBE_API_URL'];
|
|
11
|
+
const debug = options.debug ?? process.env['VISIBE_DEBUG'] === '1';
|
|
12
|
+
const limit = options.contentLimit
|
|
13
|
+
?? (process.env['VISIBE_CONTENT_LIMIT'] ? Number(process.env['VISIBE_CONTENT_LIMIT']) : undefined)
|
|
14
|
+
?? LLM_CONTENT_LIMIT;
|
|
15
|
+
this.contentLimit = limit;
|
|
16
|
+
this.sessionId = options.sessionId;
|
|
17
|
+
this.debug = debug;
|
|
18
|
+
this.apiClient = new APIClient({ apiKey, apiUrl });
|
|
19
|
+
this.batcher = new SpanBatcher(this.apiClient);
|
|
20
|
+
}
|
|
21
|
+
// ---------------------------------------------------------------------------
|
|
22
|
+
// instrument() — wrap a client so each call creates its own trace.
|
|
23
|
+
// ---------------------------------------------------------------------------
|
|
24
|
+
instrument(client, options) {
|
|
25
|
+
if (this._instrumented.has(client))
|
|
26
|
+
return; // already patched
|
|
27
|
+
const name = options?.name ?? 'agent';
|
|
28
|
+
// Detect which kind of client this is and apply the right integration.
|
|
29
|
+
// Integrations are resolved lazily here to avoid circular imports at module load.
|
|
30
|
+
const restore = applyIntegration(client, name, this);
|
|
31
|
+
if (restore) {
|
|
32
|
+
this._instrumented.set(client, restore);
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
uninstrument(client) {
|
|
36
|
+
const restore = this._instrumented.get(client);
|
|
37
|
+
if (restore) {
|
|
38
|
+
restore();
|
|
39
|
+
this._instrumented.delete(client);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
// ---------------------------------------------------------------------------
|
|
43
|
+
// track() — group multiple LLM calls into a single named trace.
|
|
44
|
+
//
|
|
45
|
+
// Instruments the client if not already done, then runs fn() inside an
|
|
46
|
+
// AsyncLocalStorage context so all integrations automatically route their
|
|
47
|
+
// spans into the shared traceId and report token/cost totals back here.
|
|
48
|
+
// ---------------------------------------------------------------------------
|
|
49
|
+
async track(client, name, fn) {
|
|
50
|
+
const traceId = randomUUID();
|
|
51
|
+
const startedAt = new Date().toISOString();
|
|
52
|
+
const startMs = Date.now();
|
|
53
|
+
await this.apiClient.createTrace({
|
|
54
|
+
trace_id: traceId,
|
|
55
|
+
name,
|
|
56
|
+
framework: detectFrameworkName(client),
|
|
57
|
+
started_at: startedAt,
|
|
58
|
+
...(this.sessionId ? { session_id: this.sessionId } : {}),
|
|
59
|
+
});
|
|
60
|
+
// Accumulators updated by the group context callbacks (called from integrations).
|
|
61
|
+
let llmCallCount = 0;
|
|
62
|
+
let toolCallCount = 0;
|
|
63
|
+
let totalInput = 0;
|
|
64
|
+
let totalOutput = 0;
|
|
65
|
+
let totalCost = 0;
|
|
66
|
+
const groupCtx = {
|
|
67
|
+
traceId,
|
|
68
|
+
onLLMSpan: (inputTokens, outputTokens, cost) => {
|
|
69
|
+
llmCallCount++;
|
|
70
|
+
totalInput += inputTokens;
|
|
71
|
+
totalOutput += outputTokens;
|
|
72
|
+
totalCost += cost;
|
|
73
|
+
},
|
|
74
|
+
onToolSpan: () => { toolCallCount++; },
|
|
75
|
+
};
|
|
76
|
+
// Temporarily instrument client if not already done so that the integration
|
|
77
|
+
// patch is in place for the duration of fn().
|
|
78
|
+
const wasInstrumented = this._instrumented.has(client);
|
|
79
|
+
if (!wasInstrumented) {
|
|
80
|
+
this.instrument(client, { name });
|
|
81
|
+
}
|
|
82
|
+
// Run fn() inside the group ALS context. All instrumented integrations
|
|
83
|
+
// check activeGroupTraceStorage.getStore() — when set they skip their own
|
|
84
|
+
// createTrace/completeTrace calls and route spans into the shared traceId.
|
|
85
|
+
return activeGroupTraceStorage.run(groupCtx, async () => {
|
|
86
|
+
let status = 'completed';
|
|
87
|
+
try {
|
|
88
|
+
return await fn();
|
|
89
|
+
}
|
|
90
|
+
catch (err) {
|
|
91
|
+
status = 'failed';
|
|
92
|
+
throw err;
|
|
93
|
+
}
|
|
94
|
+
finally {
|
|
95
|
+
if (!wasInstrumented) {
|
|
96
|
+
this.uninstrument(client);
|
|
97
|
+
}
|
|
98
|
+
const durationMs = Date.now() - startMs;
|
|
99
|
+
// CRITICAL ORDER: flush spans first, then complete the trace.
|
|
100
|
+
// The backend computes all breakdowns from spans at read time.
|
|
101
|
+
this.batcher.flush();
|
|
102
|
+
const sent = await this.apiClient.completeTrace(traceId, {
|
|
103
|
+
status,
|
|
104
|
+
ended_at: new Date().toISOString(),
|
|
105
|
+
duration_ms: durationMs,
|
|
106
|
+
llm_call_count: llmCallCount,
|
|
107
|
+
total_cost: parseFloat(totalCost.toFixed(6)),
|
|
108
|
+
total_tokens: totalInput + totalOutput,
|
|
109
|
+
total_input_tokens: totalInput,
|
|
110
|
+
total_output_tokens: totalOutput,
|
|
111
|
+
});
|
|
112
|
+
printTraceSummary({
|
|
113
|
+
name,
|
|
114
|
+
llmCallCount,
|
|
115
|
+
toolCallCount,
|
|
116
|
+
totalTokens: totalInput + totalOutput,
|
|
117
|
+
totalCost,
|
|
118
|
+
durationMs,
|
|
119
|
+
status,
|
|
120
|
+
}, sent);
|
|
121
|
+
}
|
|
122
|
+
});
|
|
123
|
+
}
|
|
124
|
+
// ---------------------------------------------------------------------------
|
|
125
|
+
// flushSpans() — called by shutdown() in index.ts.
|
|
126
|
+
// ---------------------------------------------------------------------------
|
|
127
|
+
flushSpans() {
|
|
128
|
+
this.batcher.flush();
|
|
129
|
+
}
|
|
130
|
+
// ---------------------------------------------------------------------------
|
|
131
|
+
// Internal helpers used by integrations.
|
|
132
|
+
// ---------------------------------------------------------------------------
|
|
133
|
+
buildLLMSpan(opts) {
|
|
134
|
+
const cost = calculateCost(opts.model, opts.inputTokens, opts.outputTokens);
|
|
135
|
+
return {
|
|
136
|
+
span_id: opts.spanId,
|
|
137
|
+
...(opts.parentSpanId ? { parent_span_id: opts.parentSpanId } : {}),
|
|
138
|
+
type: 'llm_call',
|
|
139
|
+
timestamp: new Date().toISOString(),
|
|
140
|
+
agent_name: opts.agentName,
|
|
141
|
+
model: opts.model,
|
|
142
|
+
provider: detectProvider(opts.model),
|
|
143
|
+
status: opts.status,
|
|
144
|
+
description: `LLM Call using ${opts.model}`,
|
|
145
|
+
input_tokens: opts.inputTokens,
|
|
146
|
+
output_tokens: opts.outputTokens,
|
|
147
|
+
cost,
|
|
148
|
+
duration_ms: opts.durationMs,
|
|
149
|
+
input_text: truncate(opts.inputText, this.contentLimit),
|
|
150
|
+
output_text: truncate(opts.outputText, this.contentLimit),
|
|
151
|
+
};
|
|
152
|
+
}
|
|
153
|
+
buildToolSpan(opts) {
|
|
154
|
+
return {
|
|
155
|
+
span_id: opts.spanId,
|
|
156
|
+
...(opts.parentSpanId ? { parent_span_id: opts.parentSpanId } : {}),
|
|
157
|
+
type: 'tool_call',
|
|
158
|
+
timestamp: new Date().toISOString(),
|
|
159
|
+
tool_name: opts.toolName,
|
|
160
|
+
agent_name: opts.agentName,
|
|
161
|
+
status: opts.status,
|
|
162
|
+
duration_ms: opts.durationMs,
|
|
163
|
+
input_text: truncate(opts.inputText, 500),
|
|
164
|
+
output_text: truncate(opts.outputText, 500),
|
|
165
|
+
};
|
|
166
|
+
}
|
|
167
|
+
buildErrorSpan(opts) {
|
|
168
|
+
return {
|
|
169
|
+
span_id: opts.spanId,
|
|
170
|
+
type: 'error',
|
|
171
|
+
timestamp: new Date().toISOString(),
|
|
172
|
+
description: `Error: ${opts.errorType}`,
|
|
173
|
+
error_type: opts.errorType,
|
|
174
|
+
error_message: truncate(opts.errorMessage, 500),
|
|
175
|
+
duration_ms: opts.durationMs ?? 10,
|
|
176
|
+
};
|
|
177
|
+
}
|
|
178
|
+
buildAgentStartSpan(opts) {
|
|
179
|
+
return {
|
|
180
|
+
span_id: opts.spanId,
|
|
181
|
+
type: 'agent_start', // EXACT string — backend validates this
|
|
182
|
+
timestamp: new Date().toISOString(),
|
|
183
|
+
agent_name: opts.agentName,
|
|
184
|
+
description: `Agent started: ${opts.agentName}`,
|
|
185
|
+
};
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
// ---------------------------------------------------------------------------
|
|
189
|
+
// Helpers (module-private)
|
|
190
|
+
// ---------------------------------------------------------------------------
|
|
191
|
+
function printTraceSummary(summary, sent) {
|
|
192
|
+
const durationSec = (summary.durationMs / 1000).toFixed(1);
|
|
193
|
+
const tokens = summary.totalTokens.toLocaleString();
|
|
194
|
+
const cost = `$${summary.totalCost.toFixed(6)}`;
|
|
195
|
+
const sentStr = sent ? 'OK' : 'FAILED';
|
|
196
|
+
console.log(`[Visibe] Trace: ${summary.name} | ${summary.llmCallCount} LLM calls | ${tokens} tokens | ${cost} | ${durationSec}s | ${summary.toolCallCount} tool calls | status: ${summary.status} | sent: ${sentStr}`);
|
|
197
|
+
}
|
|
198
|
+
// Detect a human-readable framework name from a client instance.
|
|
199
|
+
// Used in createTrace() payload. Integrations can refine this later.
|
|
200
|
+
function detectFrameworkName(client) {
|
|
201
|
+
const name = client?.constructor?.name ?? '';
|
|
202
|
+
if (name === 'OpenAI')
|
|
203
|
+
return 'openai';
|
|
204
|
+
if (name === 'Anthropic')
|
|
205
|
+
return 'anthropic';
|
|
206
|
+
if (name === 'BedrockRuntimeClient')
|
|
207
|
+
return 'bedrock';
|
|
208
|
+
return 'unknown';
|
|
209
|
+
}
|
|
210
|
+
// Apply the correct integration for a given client instance.
|
|
211
|
+
// Returns a restore function, or null if the client type is unrecognised.
|
|
212
|
+
function applyIntegration(client, name, visibe) {
|
|
213
|
+
// Integrations are loaded lazily to avoid import errors when the peer
|
|
214
|
+
// dependency is not installed. Each integration module exports a
|
|
215
|
+
// `patchClient(client, name, visibe)` that returns a restore function.
|
|
216
|
+
const constructorName = client?.constructor?.name ?? '';
|
|
217
|
+
try {
|
|
218
|
+
if (constructorName === 'OpenAI') {
|
|
219
|
+
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
|
220
|
+
const { patchOpenAIClient } = require('./integrations/openai');
|
|
221
|
+
return patchOpenAIClient(client, name, visibe);
|
|
222
|
+
}
|
|
223
|
+
if (constructorName === 'Anthropic') {
|
|
224
|
+
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
|
225
|
+
const { patchAnthropicClient } = require('./integrations/anthropic');
|
|
226
|
+
return patchAnthropicClient(client, name, visibe);
|
|
227
|
+
}
|
|
228
|
+
if (constructorName === 'BedrockRuntimeClient') {
|
|
229
|
+
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
|
230
|
+
const { patchBedrockClient } = require('./integrations/bedrock');
|
|
231
|
+
return patchBedrockClient(client, name, visibe);
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
catch {
|
|
235
|
+
// Integration module not available or patch threw — never crash user code.
|
|
236
|
+
}
|
|
237
|
+
return null;
|
|
238
|
+
}
|
|
@@ -0,0 +1,209 @@
|
|
|
1
|
+
import { Visibe } from './client';
|
|
2
|
+
// ---------------------------------------------------------------------------
|
|
3
|
+
// Global state
|
|
4
|
+
// ---------------------------------------------------------------------------
|
|
5
|
+
let _globalClient = null;
|
|
6
|
+
let _shutdownRegistered = false;
|
|
7
|
+
// Saved original constructors so shutdown() can restore them.
|
|
8
|
+
// Each is typed as `any` because we need to reassign imported class bindings.
|
|
9
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
10
|
+
let _originalOpenAI = null;
|
|
11
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
12
|
+
let _originalBedrockClient = null;
|
|
13
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
14
|
+
let _originalCompiledStateGraph = null;
|
|
15
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
16
|
+
let _originalAnthropic = null;
|
|
17
|
+
let _vercelAIRestore = null;
|
|
18
|
+
// ---------------------------------------------------------------------------
|
|
19
|
+
// detectFrameworks()
|
|
20
|
+
// ---------------------------------------------------------------------------
|
|
21
|
+
function tryRequire(pkg) {
|
|
22
|
+
try {
|
|
23
|
+
require(pkg);
|
|
24
|
+
return true;
|
|
25
|
+
}
|
|
26
|
+
catch {
|
|
27
|
+
return false;
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
export function detectFrameworks() {
|
|
31
|
+
return {
|
|
32
|
+
openai: tryRequire('openai'),
|
|
33
|
+
langchain: tryRequire('@langchain/core'),
|
|
34
|
+
langgraph: tryRequire('@langchain/langgraph'),
|
|
35
|
+
bedrock: tryRequire('@aws-sdk/client-bedrock-runtime'),
|
|
36
|
+
vercel_ai: tryRequire('ai'),
|
|
37
|
+
anthropic: tryRequire('@anthropic-ai/sdk'),
|
|
38
|
+
// crewai and autogen are Python-only — no Node.js equivalent
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
// ---------------------------------------------------------------------------
|
|
42
|
+
// patchFramework() — auto-instruments a framework at the constructor level
|
|
43
|
+
// ---------------------------------------------------------------------------
|
|
44
|
+
function patchFramework(framework, client) {
|
|
45
|
+
try {
|
|
46
|
+
switch (framework) {
|
|
47
|
+
case 'openai': {
|
|
48
|
+
const openaiModule = require('openai');
|
|
49
|
+
_originalOpenAI = openaiModule.OpenAI;
|
|
50
|
+
openaiModule.OpenAI = class extends _originalOpenAI {
|
|
51
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
52
|
+
constructor(...args) {
|
|
53
|
+
super(...args);
|
|
54
|
+
try {
|
|
55
|
+
client.instrument(this);
|
|
56
|
+
}
|
|
57
|
+
catch { /* never crash new OpenAI() */ }
|
|
58
|
+
}
|
|
59
|
+
};
|
|
60
|
+
break;
|
|
61
|
+
}
|
|
62
|
+
case 'anthropic': {
|
|
63
|
+
const anthropicModule = require('@anthropic-ai/sdk');
|
|
64
|
+
_originalAnthropic = anthropicModule.Anthropic;
|
|
65
|
+
anthropicModule.Anthropic = class extends _originalAnthropic {
|
|
66
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
67
|
+
constructor(...args) {
|
|
68
|
+
super(...args);
|
|
69
|
+
try {
|
|
70
|
+
client.instrument(this);
|
|
71
|
+
}
|
|
72
|
+
catch { /* never crash new Anthropic() */ }
|
|
73
|
+
}
|
|
74
|
+
};
|
|
75
|
+
break;
|
|
76
|
+
}
|
|
77
|
+
case 'bedrock': {
|
|
78
|
+
const bedrockModule = require('@aws-sdk/client-bedrock-runtime');
|
|
79
|
+
_originalBedrockClient = bedrockModule.BedrockRuntimeClient;
|
|
80
|
+
bedrockModule.BedrockRuntimeClient = class extends _originalBedrockClient {
|
|
81
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
82
|
+
constructor(...args) {
|
|
83
|
+
super(...args);
|
|
84
|
+
try {
|
|
85
|
+
client.instrument(this);
|
|
86
|
+
}
|
|
87
|
+
catch { /* never crash new BedrockRuntimeClient() */ }
|
|
88
|
+
}
|
|
89
|
+
};
|
|
90
|
+
break;
|
|
91
|
+
}
|
|
92
|
+
case 'langgraph': {
|
|
93
|
+
const lgModule = require('@langchain/langgraph');
|
|
94
|
+
_originalCompiledStateGraph = lgModule.CompiledStateGraph;
|
|
95
|
+
// LangGraph instrumentation is applied via LangChainCallback at the class level.
|
|
96
|
+
// The actual patching happens inside the langgraph integration module.
|
|
97
|
+
const { patchCompiledStateGraph } = require('./integrations/langgraph');
|
|
98
|
+
patchCompiledStateGraph(lgModule, client);
|
|
99
|
+
break;
|
|
100
|
+
}
|
|
101
|
+
case 'langchain': {
|
|
102
|
+
// LangChain is instrumented via RunnableSequence constructor patching.
|
|
103
|
+
const { patchRunnableSequence } = require('./integrations/langchain');
|
|
104
|
+
const lcModule = require('@langchain/core/runnables');
|
|
105
|
+
patchRunnableSequence(lcModule, client);
|
|
106
|
+
break;
|
|
107
|
+
}
|
|
108
|
+
case 'vercel_ai': {
|
|
109
|
+
const { patchVercelAI } = require('./integrations/vercel-ai');
|
|
110
|
+
const aiModule = require('ai');
|
|
111
|
+
_vercelAIRestore = patchVercelAI(aiModule, client);
|
|
112
|
+
break;
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
// Record which frameworks were successfully patched for the startup log.
|
|
116
|
+
_autoPatchedFrameworks.push(framework);
|
|
117
|
+
}
|
|
118
|
+
catch {
|
|
119
|
+
// Package not installed or patch failed — skip silently.
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
let _autoPatchedFrameworks = [];
|
|
123
|
+
// ---------------------------------------------------------------------------
|
|
124
|
+
// init()
|
|
125
|
+
// ---------------------------------------------------------------------------
|
|
126
|
+
export function init(options) {
|
|
127
|
+
if (_globalClient !== null) {
|
|
128
|
+
process.emitWarning('[Visibe] Already initialized — call shutdown() first to re-init', { type: 'VisibleSDKWarning' });
|
|
129
|
+
return _globalClient;
|
|
130
|
+
}
|
|
131
|
+
_globalClient = new Visibe(options ?? {});
|
|
132
|
+
const detected = detectFrameworks();
|
|
133
|
+
const toInstrument = options?.frameworks
|
|
134
|
+
?? Object.keys(detected).filter(k => detected[k]);
|
|
135
|
+
for (const fw of toInstrument) {
|
|
136
|
+
patchFramework(fw, _globalClient);
|
|
137
|
+
}
|
|
138
|
+
// Register graceful shutdown handlers.
|
|
139
|
+
// NOTE: process.on('exit') fires synchronously — async HTTP requests cannot
|
|
140
|
+
// complete there. SIGTERM is what Docker/Kubernetes send before killing a
|
|
141
|
+
// container; without handling it all buffered spans are lost.
|
|
142
|
+
// We await shutdown() so the batcher's 300 ms window completes before exit.
|
|
143
|
+
if (!_shutdownRegistered) {
|
|
144
|
+
const graceful = async () => { await shutdown(); process.exit(0); };
|
|
145
|
+
process.on('SIGTERM', graceful);
|
|
146
|
+
process.on('SIGINT', graceful);
|
|
147
|
+
process.on('beforeExit', () => { shutdown().catch(() => { }); });
|
|
148
|
+
_shutdownRegistered = true;
|
|
149
|
+
}
|
|
150
|
+
if (_autoPatchedFrameworks.length > 0) {
|
|
151
|
+
console.log(`[Visibe] Auto-instrumented: ${_autoPatchedFrameworks.join(', ')}`);
|
|
152
|
+
}
|
|
153
|
+
return _globalClient;
|
|
154
|
+
}
|
|
155
|
+
// ---------------------------------------------------------------------------
|
|
156
|
+
// shutdown()
|
|
157
|
+
// ---------------------------------------------------------------------------
|
|
158
|
+
export async function shutdown() {
|
|
159
|
+
if (_globalClient === null)
|
|
160
|
+
return;
|
|
161
|
+
// Capture the client reference and clear global state immediately so that
|
|
162
|
+
// re-init() calls work without needing to await this function.
|
|
163
|
+
const client = _globalClient;
|
|
164
|
+
_globalClient = null;
|
|
165
|
+
_autoPatchedFrameworks = [];
|
|
166
|
+
// Restore patched constructors so the SDK leaves no trace after shutdown.
|
|
167
|
+
try {
|
|
168
|
+
if (_originalOpenAI) {
|
|
169
|
+
require('openai').OpenAI = _originalOpenAI;
|
|
170
|
+
_originalOpenAI = null;
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
catch { /* package may have been unloaded */ }
|
|
174
|
+
try {
|
|
175
|
+
if (_originalAnthropic) {
|
|
176
|
+
require('@anthropic-ai/sdk').Anthropic = _originalAnthropic;
|
|
177
|
+
_originalAnthropic = null;
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
catch { /* package may have been unloaded */ }
|
|
181
|
+
try {
|
|
182
|
+
if (_originalBedrockClient) {
|
|
183
|
+
require('@aws-sdk/client-bedrock-runtime').BedrockRuntimeClient = _originalBedrockClient;
|
|
184
|
+
_originalBedrockClient = null;
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
catch { /* package may have been unloaded */ }
|
|
188
|
+
try {
|
|
189
|
+
if (_originalCompiledStateGraph) {
|
|
190
|
+
require('@langchain/langgraph').CompiledStateGraph = _originalCompiledStateGraph;
|
|
191
|
+
_originalCompiledStateGraph = null;
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
catch { /* package may have been unloaded */ }
|
|
195
|
+
try {
|
|
196
|
+
if (_vercelAIRestore) {
|
|
197
|
+
_vercelAIRestore();
|
|
198
|
+
_vercelAIRestore = null;
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
catch { /* package may have been unloaded */ }
|
|
202
|
+
// Flush buffered spans and wait up to 300 ms for in-flight HTTP requests to
|
|
203
|
+
// complete. This prevents spans from being lost on SIGTERM.
|
|
204
|
+
await client.batcher.shutdown();
|
|
205
|
+
}
|
|
206
|
+
// ---------------------------------------------------------------------------
|
|
207
|
+
// Re-export public surface
|
|
208
|
+
// ---------------------------------------------------------------------------
|
|
209
|
+
export { Visibe } from './client';
|