@dthink/bloop-sdk 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/client.d.ts +152 -0
- package/dist/client.js +275 -0
- package/dist/index.d.ts +4 -0
- package/dist/index.js +11 -0
- package/dist/integrations/anthropic.d.ts +9 -0
- package/dist/integrations/anthropic.js +110 -0
- package/dist/integrations/index.d.ts +2 -0
- package/dist/integrations/index.js +7 -0
- package/dist/integrations/openai.d.ts +9 -0
- package/dist/integrations/openai.js +113 -0
- package/package.json +38 -0
- package/src/client.ts +371 -0
- package/src/index.ts +4 -0
- package/src/integrations/anthropic.ts +139 -0
- package/src/integrations/index.ts +2 -0
- package/src/integrations/openai.ts +142 -0
package/dist/client.d.ts
ADDED
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Bloop error reporting and LLM tracing client for TypeScript/Node.js.
|
|
3
|
+
* Zero external dependencies.
|
|
4
|
+
*/
|
|
5
|
+
export interface BloopClientOptions {
|
|
6
|
+
endpoint: string;
|
|
7
|
+
projectKey: string;
|
|
8
|
+
flushInterval?: number;
|
|
9
|
+
maxBufferSize?: number;
|
|
10
|
+
environment?: string;
|
|
11
|
+
release?: string;
|
|
12
|
+
}
|
|
13
|
+
export interface SpanData {
|
|
14
|
+
id: string;
|
|
15
|
+
span_type: string;
|
|
16
|
+
name: string;
|
|
17
|
+
model?: string;
|
|
18
|
+
provider?: string;
|
|
19
|
+
parent_span_id?: string;
|
|
20
|
+
input_tokens: number;
|
|
21
|
+
output_tokens: number;
|
|
22
|
+
cost: number;
|
|
23
|
+
latency_ms: number;
|
|
24
|
+
time_to_first_token_ms?: number;
|
|
25
|
+
status: string;
|
|
26
|
+
error_message?: string;
|
|
27
|
+
input?: string;
|
|
28
|
+
output?: string;
|
|
29
|
+
metadata?: Record<string, unknown>;
|
|
30
|
+
started_at: number;
|
|
31
|
+
ended_at?: number;
|
|
32
|
+
}
|
|
33
|
+
export interface TraceData {
|
|
34
|
+
id: string;
|
|
35
|
+
name: string;
|
|
36
|
+
status: string;
|
|
37
|
+
session_id?: string;
|
|
38
|
+
user_id?: string;
|
|
39
|
+
input?: string;
|
|
40
|
+
output?: string;
|
|
41
|
+
metadata?: Record<string, unknown>;
|
|
42
|
+
prompt_name?: string;
|
|
43
|
+
prompt_version?: string;
|
|
44
|
+
started_at: number;
|
|
45
|
+
ended_at?: number;
|
|
46
|
+
spans: SpanData[];
|
|
47
|
+
}
|
|
48
|
+
export declare class BloopClient {
|
|
49
|
+
private endpoint;
|
|
50
|
+
private projectKey;
|
|
51
|
+
private flushInterval;
|
|
52
|
+
private maxBufferSize;
|
|
53
|
+
private environment;
|
|
54
|
+
private release;
|
|
55
|
+
private errorBuffer;
|
|
56
|
+
private traceBuffer;
|
|
57
|
+
private timer;
|
|
58
|
+
private closed;
|
|
59
|
+
constructor(options: BloopClientOptions);
|
|
60
|
+
capture(params: {
|
|
61
|
+
errorType: string;
|
|
62
|
+
message: string;
|
|
63
|
+
source?: string;
|
|
64
|
+
stack?: string;
|
|
65
|
+
routeOrProcedure?: string;
|
|
66
|
+
screen?: string;
|
|
67
|
+
metadata?: Record<string, unknown>;
|
|
68
|
+
}): void;
|
|
69
|
+
trace(options?: {
|
|
70
|
+
name?: string;
|
|
71
|
+
traceId?: string;
|
|
72
|
+
sessionId?: string;
|
|
73
|
+
userId?: string;
|
|
74
|
+
promptName?: string;
|
|
75
|
+
promptVersion?: string;
|
|
76
|
+
}): Trace;
|
|
77
|
+
/** @internal */
|
|
78
|
+
_sendTrace(data: TraceData): void;
|
|
79
|
+
wrapOpenAI<T>(openaiClient: T): T;
|
|
80
|
+
wrapAnthropic<T>(anthropicClient: T): T;
|
|
81
|
+
flush(): void;
|
|
82
|
+
close(): void;
|
|
83
|
+
private _flushErrors;
|
|
84
|
+
private _flushTraces;
|
|
85
|
+
private _post;
|
|
86
|
+
}
|
|
87
|
+
export declare class Trace {
|
|
88
|
+
readonly id: string;
|
|
89
|
+
name: string;
|
|
90
|
+
status: string;
|
|
91
|
+
sessionId?: string;
|
|
92
|
+
userId?: string;
|
|
93
|
+
input?: string;
|
|
94
|
+
output?: string;
|
|
95
|
+
metadata?: Record<string, unknown>;
|
|
96
|
+
promptName?: string;
|
|
97
|
+
promptVersion?: string;
|
|
98
|
+
private client;
|
|
99
|
+
private spans;
|
|
100
|
+
private startedAt;
|
|
101
|
+
private endedAt?;
|
|
102
|
+
constructor(client: BloopClient, options: {
|
|
103
|
+
name: string;
|
|
104
|
+
traceId: string;
|
|
105
|
+
sessionId?: string;
|
|
106
|
+
userId?: string;
|
|
107
|
+
promptName?: string;
|
|
108
|
+
promptVersion?: string;
|
|
109
|
+
});
|
|
110
|
+
span(options?: {
|
|
111
|
+
spanType?: string;
|
|
112
|
+
name?: string;
|
|
113
|
+
model?: string;
|
|
114
|
+
provider?: string;
|
|
115
|
+
parentSpanId?: string;
|
|
116
|
+
}): Span;
|
|
117
|
+
/** @internal */
|
|
118
|
+
_addSpan(data: SpanData): void;
|
|
119
|
+
end(): void;
|
|
120
|
+
}
|
|
121
|
+
export declare class Span {
|
|
122
|
+
readonly id: string;
|
|
123
|
+
spanType: string;
|
|
124
|
+
name: string;
|
|
125
|
+
model?: string;
|
|
126
|
+
provider?: string;
|
|
127
|
+
parentSpanId?: string;
|
|
128
|
+
inputTokens: number;
|
|
129
|
+
outputTokens: number;
|
|
130
|
+
cost: number;
|
|
131
|
+
latencyMs: number;
|
|
132
|
+
timeToFirstTokenMs?: number;
|
|
133
|
+
status: string;
|
|
134
|
+
errorMessage?: string;
|
|
135
|
+
input?: string;
|
|
136
|
+
output?: string;
|
|
137
|
+
metadata?: Record<string, unknown>;
|
|
138
|
+
private trace;
|
|
139
|
+
private startedAt;
|
|
140
|
+
constructor(trace: Trace, options: {
|
|
141
|
+
spanType: string;
|
|
142
|
+
name: string;
|
|
143
|
+
model?: string;
|
|
144
|
+
provider?: string;
|
|
145
|
+
parentSpanId?: string;
|
|
146
|
+
});
|
|
147
|
+
setTokens(inputTokens: number, outputTokens: number): void;
|
|
148
|
+
setCost(cost: number): void;
|
|
149
|
+
setLatency(latencyMs: number, timeToFirstTokenMs?: number): void;
|
|
150
|
+
setError(message: string): void;
|
|
151
|
+
end(): void;
|
|
152
|
+
}
|
package/dist/client.js
ADDED
|
@@ -0,0 +1,275 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Bloop error reporting and LLM tracing client for TypeScript/Node.js.
|
|
4
|
+
* Zero external dependencies.
|
|
5
|
+
*/
|
|
6
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
|
+
exports.Span = exports.Trace = exports.BloopClient = void 0;
|
|
8
|
+
const node_crypto_1 = require("node:crypto");
|
|
9
|
+
class BloopClient {
|
|
10
|
+
endpoint;
|
|
11
|
+
projectKey;
|
|
12
|
+
flushInterval;
|
|
13
|
+
maxBufferSize;
|
|
14
|
+
environment;
|
|
15
|
+
release;
|
|
16
|
+
errorBuffer = [];
|
|
17
|
+
traceBuffer = [];
|
|
18
|
+
timer = null;
|
|
19
|
+
closed = false;
|
|
20
|
+
constructor(options) {
|
|
21
|
+
this.endpoint = options.endpoint.replace(/\/+$/, "");
|
|
22
|
+
this.projectKey = options.projectKey;
|
|
23
|
+
this.flushInterval = options.flushInterval ?? 5000;
|
|
24
|
+
this.maxBufferSize = options.maxBufferSize ?? 100;
|
|
25
|
+
this.environment = options.environment ?? "production";
|
|
26
|
+
this.release = options.release ?? "";
|
|
27
|
+
this.timer = setInterval(() => this.flush(), this.flushInterval);
|
|
28
|
+
if (this.timer.unref)
|
|
29
|
+
this.timer.unref();
|
|
30
|
+
}
|
|
31
|
+
// ── Error Tracking ──
|
|
32
|
+
capture(params) {
|
|
33
|
+
this.errorBuffer.push({
|
|
34
|
+
timestamp: Math.floor(Date.now() / 1000),
|
|
35
|
+
source: params.source ?? "javascript",
|
|
36
|
+
environment: this.environment,
|
|
37
|
+
release: this.release,
|
|
38
|
+
error_type: params.errorType,
|
|
39
|
+
message: params.message,
|
|
40
|
+
stack: params.stack ?? "",
|
|
41
|
+
route_or_procedure: params.routeOrProcedure ?? "",
|
|
42
|
+
screen: params.screen ?? "",
|
|
43
|
+
metadata: params.metadata ?? {},
|
|
44
|
+
});
|
|
45
|
+
if (this.errorBuffer.length >= this.maxBufferSize) {
|
|
46
|
+
this.flush();
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
// ── LLM Tracing ──
|
|
50
|
+
trace(options = {}) {
|
|
51
|
+
return new Trace(this, {
|
|
52
|
+
name: options.name ?? "",
|
|
53
|
+
traceId: options.traceId ?? (0, node_crypto_1.randomUUID)().replace(/-/g, ""),
|
|
54
|
+
sessionId: options.sessionId,
|
|
55
|
+
userId: options.userId,
|
|
56
|
+
promptName: options.promptName,
|
|
57
|
+
promptVersion: options.promptVersion,
|
|
58
|
+
});
|
|
59
|
+
}
|
|
60
|
+
/** @internal */
|
|
61
|
+
_sendTrace(data) {
|
|
62
|
+
this.traceBuffer.push(data);
|
|
63
|
+
if (this.traceBuffer.length >= 10) {
|
|
64
|
+
this._flushTraces();
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
// ── Auto-Instrumentation ──
|
|
68
|
+
wrapOpenAI(openaiClient) {
|
|
69
|
+
const { wrapOpenAI } = require("./integrations/openai");
|
|
70
|
+
return wrapOpenAI(openaiClient, this);
|
|
71
|
+
}
|
|
72
|
+
wrapAnthropic(anthropicClient) {
|
|
73
|
+
const { wrapAnthropic } = require("./integrations/anthropic");
|
|
74
|
+
return wrapAnthropic(anthropicClient, this);
|
|
75
|
+
}
|
|
76
|
+
// ── Flush & Transport ──
|
|
77
|
+
flush() {
|
|
78
|
+
this._flushErrors();
|
|
79
|
+
this._flushTraces();
|
|
80
|
+
}
|
|
81
|
+
close() {
|
|
82
|
+
this.closed = true;
|
|
83
|
+
if (this.timer) {
|
|
84
|
+
clearInterval(this.timer);
|
|
85
|
+
this.timer = null;
|
|
86
|
+
}
|
|
87
|
+
this.flush();
|
|
88
|
+
}
|
|
89
|
+
_flushErrors() {
|
|
90
|
+
if (this.errorBuffer.length === 0)
|
|
91
|
+
return;
|
|
92
|
+
const events = [...this.errorBuffer];
|
|
93
|
+
this.errorBuffer = [];
|
|
94
|
+
if (events.length === 1) {
|
|
95
|
+
this._post("/v1/ingest", events[0]);
|
|
96
|
+
}
|
|
97
|
+
else {
|
|
98
|
+
this._post("/v1/ingest/batch", { events });
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
_flushTraces() {
|
|
102
|
+
if (this.traceBuffer.length === 0)
|
|
103
|
+
return;
|
|
104
|
+
const traces = [...this.traceBuffer];
|
|
105
|
+
this.traceBuffer = [];
|
|
106
|
+
this._post("/v1/traces/batch", { traces }).catch(() => { });
|
|
107
|
+
}
|
|
108
|
+
async _post(path, payload) {
|
|
109
|
+
const body = JSON.stringify(payload);
|
|
110
|
+
const sig = (0, node_crypto_1.createHmac)("sha256", this.projectKey)
|
|
111
|
+
.update(body)
|
|
112
|
+
.digest("hex");
|
|
113
|
+
try {
|
|
114
|
+
await fetch(`${this.endpoint}${path}`, {
|
|
115
|
+
method: "POST",
|
|
116
|
+
headers: {
|
|
117
|
+
"Content-Type": "application/json",
|
|
118
|
+
"X-Signature": sig,
|
|
119
|
+
},
|
|
120
|
+
body,
|
|
121
|
+
});
|
|
122
|
+
}
|
|
123
|
+
catch {
|
|
124
|
+
// Silently drop on failure (never crash the app)
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
exports.BloopClient = BloopClient;
|
|
129
|
+
class Trace {
|
|
130
|
+
id;
|
|
131
|
+
name;
|
|
132
|
+
status = "completed";
|
|
133
|
+
sessionId;
|
|
134
|
+
userId;
|
|
135
|
+
input;
|
|
136
|
+
output;
|
|
137
|
+
metadata;
|
|
138
|
+
promptName;
|
|
139
|
+
promptVersion;
|
|
140
|
+
client;
|
|
141
|
+
spans = [];
|
|
142
|
+
startedAt;
|
|
143
|
+
endedAt;
|
|
144
|
+
constructor(client, options) {
|
|
145
|
+
this.client = client;
|
|
146
|
+
this.id = options.traceId;
|
|
147
|
+
this.name = options.name;
|
|
148
|
+
this.sessionId = options.sessionId;
|
|
149
|
+
this.userId = options.userId;
|
|
150
|
+
this.promptName = options.promptName;
|
|
151
|
+
this.promptVersion = options.promptVersion;
|
|
152
|
+
this.startedAt = Date.now();
|
|
153
|
+
}
|
|
154
|
+
span(options = {}) {
|
|
155
|
+
return new Span(this, {
|
|
156
|
+
spanType: options.spanType ?? "generation",
|
|
157
|
+
name: options.name ?? "",
|
|
158
|
+
model: options.model,
|
|
159
|
+
provider: options.provider,
|
|
160
|
+
parentSpanId: options.parentSpanId,
|
|
161
|
+
});
|
|
162
|
+
}
|
|
163
|
+
/** @internal */
|
|
164
|
+
_addSpan(data) {
|
|
165
|
+
this.spans.push(data);
|
|
166
|
+
}
|
|
167
|
+
end() {
|
|
168
|
+
this.endedAt = Date.now();
|
|
169
|
+
const data = {
|
|
170
|
+
id: this.id,
|
|
171
|
+
name: this.name,
|
|
172
|
+
status: this.status,
|
|
173
|
+
started_at: this.startedAt,
|
|
174
|
+
ended_at: this.endedAt,
|
|
175
|
+
spans: this.spans,
|
|
176
|
+
};
|
|
177
|
+
if (this.sessionId)
|
|
178
|
+
data.session_id = this.sessionId;
|
|
179
|
+
if (this.userId)
|
|
180
|
+
data.user_id = this.userId;
|
|
181
|
+
if (this.input !== undefined)
|
|
182
|
+
data.input = this.input;
|
|
183
|
+
if (this.output !== undefined)
|
|
184
|
+
data.output = this.output;
|
|
185
|
+
if (this.metadata)
|
|
186
|
+
data.metadata = this.metadata;
|
|
187
|
+
if (this.promptName)
|
|
188
|
+
data.prompt_name = this.promptName;
|
|
189
|
+
if (this.promptVersion)
|
|
190
|
+
data.prompt_version = this.promptVersion;
|
|
191
|
+
this.client._sendTrace(data);
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
exports.Trace = Trace;
|
|
195
|
+
class Span {
|
|
196
|
+
id;
|
|
197
|
+
spanType;
|
|
198
|
+
name;
|
|
199
|
+
model;
|
|
200
|
+
provider;
|
|
201
|
+
parentSpanId;
|
|
202
|
+
inputTokens = 0;
|
|
203
|
+
outputTokens = 0;
|
|
204
|
+
cost = 0;
|
|
205
|
+
latencyMs = 0;
|
|
206
|
+
timeToFirstTokenMs;
|
|
207
|
+
status = "ok";
|
|
208
|
+
errorMessage;
|
|
209
|
+
input;
|
|
210
|
+
output;
|
|
211
|
+
metadata;
|
|
212
|
+
trace;
|
|
213
|
+
startedAt;
|
|
214
|
+
constructor(trace, options) {
|
|
215
|
+
this.trace = trace;
|
|
216
|
+
this.id = (0, node_crypto_1.randomUUID)().replace(/-/g, "");
|
|
217
|
+
this.spanType = options.spanType;
|
|
218
|
+
this.name = options.name;
|
|
219
|
+
this.model = options.model;
|
|
220
|
+
this.provider = options.provider;
|
|
221
|
+
this.parentSpanId = options.parentSpanId;
|
|
222
|
+
this.startedAt = Date.now();
|
|
223
|
+
}
|
|
224
|
+
setTokens(inputTokens, outputTokens) {
|
|
225
|
+
this.inputTokens = inputTokens;
|
|
226
|
+
this.outputTokens = outputTokens;
|
|
227
|
+
}
|
|
228
|
+
setCost(cost) {
|
|
229
|
+
this.cost = cost;
|
|
230
|
+
}
|
|
231
|
+
setLatency(latencyMs, timeToFirstTokenMs) {
|
|
232
|
+
this.latencyMs = latencyMs;
|
|
233
|
+
this.timeToFirstTokenMs = timeToFirstTokenMs;
|
|
234
|
+
}
|
|
235
|
+
setError(message) {
|
|
236
|
+
this.status = "error";
|
|
237
|
+
this.errorMessage = message;
|
|
238
|
+
}
|
|
239
|
+
end() {
|
|
240
|
+
const endedAt = Date.now();
|
|
241
|
+
if (this.latencyMs === 0) {
|
|
242
|
+
this.latencyMs = endedAt - this.startedAt;
|
|
243
|
+
}
|
|
244
|
+
const data = {
|
|
245
|
+
id: this.id,
|
|
246
|
+
span_type: this.spanType,
|
|
247
|
+
name: this.name,
|
|
248
|
+
input_tokens: this.inputTokens,
|
|
249
|
+
output_tokens: this.outputTokens,
|
|
250
|
+
cost: this.cost,
|
|
251
|
+
latency_ms: this.latencyMs,
|
|
252
|
+
status: this.status,
|
|
253
|
+
started_at: this.startedAt,
|
|
254
|
+
ended_at: endedAt,
|
|
255
|
+
};
|
|
256
|
+
if (this.model)
|
|
257
|
+
data.model = this.model;
|
|
258
|
+
if (this.provider)
|
|
259
|
+
data.provider = this.provider;
|
|
260
|
+
if (this.parentSpanId)
|
|
261
|
+
data.parent_span_id = this.parentSpanId;
|
|
262
|
+
if (this.timeToFirstTokenMs !== undefined)
|
|
263
|
+
data.time_to_first_token_ms = this.timeToFirstTokenMs;
|
|
264
|
+
if (this.errorMessage)
|
|
265
|
+
data.error_message = this.errorMessage;
|
|
266
|
+
if (this.input !== undefined)
|
|
267
|
+
data.input = this.input;
|
|
268
|
+
if (this.output !== undefined)
|
|
269
|
+
data.output = this.output;
|
|
270
|
+
if (this.metadata)
|
|
271
|
+
data.metadata = this.metadata;
|
|
272
|
+
this.trace._addSpan(data);
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
exports.Span = Span;
|
package/dist/index.d.ts
ADDED
package/dist/index.js
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.wrapAnthropic = exports.wrapOpenAI = exports.Span = exports.Trace = exports.BloopClient = void 0;
|
|
4
|
+
var client_1 = require("./client");
|
|
5
|
+
Object.defineProperty(exports, "BloopClient", { enumerable: true, get: function () { return client_1.BloopClient; } });
|
|
6
|
+
Object.defineProperty(exports, "Trace", { enumerable: true, get: function () { return client_1.Trace; } });
|
|
7
|
+
Object.defineProperty(exports, "Span", { enumerable: true, get: function () { return client_1.Span; } });
|
|
8
|
+
var openai_1 = require("./integrations/openai");
|
|
9
|
+
Object.defineProperty(exports, "wrapOpenAI", { enumerable: true, get: function () { return openai_1.wrapOpenAI; } });
|
|
10
|
+
var anthropic_1 = require("./integrations/anthropic");
|
|
11
|
+
Object.defineProperty(exports, "wrapAnthropic", { enumerable: true, get: function () { return anthropic_1.wrapAnthropic; } });
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Anthropic auto-instrumentation for bloop LLM tracing.
|
|
3
|
+
*
|
|
4
|
+
* Wraps `messages.create()` to automatically capture:
|
|
5
|
+
* - Model, tokens, latency, TTFT (streaming), errors
|
|
6
|
+
* - Cost is always 0 -- calculated server-side from pricing table
|
|
7
|
+
*/
|
|
8
|
+
import type { BloopClient } from "../client";
|
|
9
|
+
export declare function wrapAnthropic<T>(anthropicClient: T, bloopClient: BloopClient): T;
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Anthropic auto-instrumentation for bloop LLM tracing.
|
|
4
|
+
*
|
|
5
|
+
* Wraps `messages.create()` to automatically capture:
|
|
6
|
+
* - Model, tokens, latency, TTFT (streaming), errors
|
|
7
|
+
* - Cost is always 0 -- calculated server-side from pricing table
|
|
8
|
+
*/
|
|
9
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
10
|
+
exports.wrapAnthropic = wrapAnthropic;
|
|
11
|
+
function detectProvider(client) {
|
|
12
|
+
try {
|
|
13
|
+
const baseUrl = String(client.baseURL || client._baseURL || "");
|
|
14
|
+
if (baseUrl.includes("anthropic.com"))
|
|
15
|
+
return "anthropic";
|
|
16
|
+
const url = new URL(baseUrl);
|
|
17
|
+
return url.hostname.split(".")[0] || "anthropic";
|
|
18
|
+
}
|
|
19
|
+
catch {
|
|
20
|
+
return "anthropic";
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
function wrapAnthropic(anthropicClient, bloopClient) {
|
|
24
|
+
const client = anthropicClient;
|
|
25
|
+
const provider = detectProvider(client);
|
|
26
|
+
const originalCreate = client.messages.create.bind(client.messages);
|
|
27
|
+
client.messages.create = function tracedCreate(...args) {
|
|
28
|
+
const params = args[0] || {};
|
|
29
|
+
const model = params.model || "unknown";
|
|
30
|
+
const stream = params.stream || false;
|
|
31
|
+
const startMs = Date.now();
|
|
32
|
+
const trace = bloopClient.trace({ name: `${provider}/${model}` });
|
|
33
|
+
const span = trace.span({
|
|
34
|
+
spanType: "generation",
|
|
35
|
+
name: "messages.create",
|
|
36
|
+
model,
|
|
37
|
+
provider,
|
|
38
|
+
});
|
|
39
|
+
if (stream) {
|
|
40
|
+
return handleStreaming(originalCreate, args, trace, span, startMs, model);
|
|
41
|
+
}
|
|
42
|
+
return originalCreate(...args).then((response) => {
|
|
43
|
+
const endMs = Date.now();
|
|
44
|
+
const usage = response?.usage;
|
|
45
|
+
if (usage) {
|
|
46
|
+
span.setTokens(usage.input_tokens || 0, usage.output_tokens || 0);
|
|
47
|
+
}
|
|
48
|
+
span.setLatency(endMs - startMs);
|
|
49
|
+
span.cost = 0; // Server-side pricing
|
|
50
|
+
span.model = response?.model || model;
|
|
51
|
+
span.end();
|
|
52
|
+
trace.end();
|
|
53
|
+
return response;
|
|
54
|
+
}, (err) => {
|
|
55
|
+
const endMs = Date.now();
|
|
56
|
+
span.setLatency(endMs - startMs);
|
|
57
|
+
span.setError(err.message);
|
|
58
|
+
span.end();
|
|
59
|
+
trace.status = "error";
|
|
60
|
+
trace.end();
|
|
61
|
+
throw err;
|
|
62
|
+
});
|
|
63
|
+
};
|
|
64
|
+
return anthropicClient;
|
|
65
|
+
}
|
|
66
|
+
async function* handleStreaming(originalCreate, args, trace, span, startMs, model) {
|
|
67
|
+
let firstTokenSeen = false;
|
|
68
|
+
let inputTokens = 0;
|
|
69
|
+
let outputTokens = 0;
|
|
70
|
+
let resolvedModel = model;
|
|
71
|
+
try {
|
|
72
|
+
const stream = await originalCreate(...args);
|
|
73
|
+
for await (const event of stream) {
|
|
74
|
+
const eventType = event?.type || "";
|
|
75
|
+
if (!firstTokenSeen && eventType === "content_block_delta") {
|
|
76
|
+
firstTokenSeen = true;
|
|
77
|
+
span.timeToFirstTokenMs = Date.now() - startMs;
|
|
78
|
+
}
|
|
79
|
+
// Track usage from message_start event
|
|
80
|
+
if (eventType === "message_start" && event.message) {
|
|
81
|
+
resolvedModel = event.message.model || resolvedModel;
|
|
82
|
+
if (event.message.usage) {
|
|
83
|
+
inputTokens = event.message.usage.input_tokens || 0;
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
// Track output tokens from message_delta event
|
|
87
|
+
if (eventType === "message_delta" && event.usage) {
|
|
88
|
+
outputTokens = event.usage.output_tokens || 0;
|
|
89
|
+
}
|
|
90
|
+
yield event;
|
|
91
|
+
}
|
|
92
|
+
const endMs = Date.now();
|
|
93
|
+
span.setTokens(inputTokens, outputTokens);
|
|
94
|
+
span.setLatency(endMs - startMs);
|
|
95
|
+
span.cost = 0;
|
|
96
|
+
span.model = resolvedModel;
|
|
97
|
+
span.end();
|
|
98
|
+
trace.end();
|
|
99
|
+
}
|
|
100
|
+
catch (err) {
|
|
101
|
+
const endMs = Date.now();
|
|
102
|
+
span.setTokens(inputTokens, outputTokens);
|
|
103
|
+
span.setLatency(endMs - startMs);
|
|
104
|
+
span.setError(err.message);
|
|
105
|
+
span.end();
|
|
106
|
+
trace.status = "error";
|
|
107
|
+
trace.end();
|
|
108
|
+
throw err;
|
|
109
|
+
}
|
|
110
|
+
}
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.wrapAnthropic = exports.wrapOpenAI = void 0;
|
|
4
|
+
var openai_1 = require("./openai");
|
|
5
|
+
Object.defineProperty(exports, "wrapOpenAI", { enumerable: true, get: function () { return openai_1.wrapOpenAI; } });
|
|
6
|
+
var anthropic_1 = require("./anthropic");
|
|
7
|
+
Object.defineProperty(exports, "wrapAnthropic", { enumerable: true, get: function () { return anthropic_1.wrapAnthropic; } });
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI auto-instrumentation for bloop LLM tracing.
|
|
3
|
+
*
|
|
4
|
+
* Wraps `chat.completions.create()` to automatically capture:
|
|
5
|
+
* - Model, tokens, latency, TTFT (streaming), errors
|
|
6
|
+
* - Cost is always 0 -- calculated server-side from pricing table
|
|
7
|
+
*/
|
|
8
|
+
import type { BloopClient } from "../client";
|
|
9
|
+
export declare function wrapOpenAI<T>(openaiClient: T, bloopClient: BloopClient): T;
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* OpenAI auto-instrumentation for bloop LLM tracing.
|
|
4
|
+
*
|
|
5
|
+
* Wraps `chat.completions.create()` to automatically capture:
|
|
6
|
+
* - Model, tokens, latency, TTFT (streaming), errors
|
|
7
|
+
* - Cost is always 0 -- calculated server-side from pricing table
|
|
8
|
+
*/
|
|
9
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
10
|
+
exports.wrapOpenAI = wrapOpenAI;
|
|
11
|
+
const PROVIDER_MAP = {
|
|
12
|
+
"api.openai.com": "openai",
|
|
13
|
+
"api.minimax.io": "minimax",
|
|
14
|
+
"api.moonshot.ai": "kimi",
|
|
15
|
+
"generativelanguage.googleapis.com": "google",
|
|
16
|
+
};
|
|
17
|
+
function detectProvider(client) {
|
|
18
|
+
try {
|
|
19
|
+
const baseUrl = String(client.baseURL || "");
|
|
20
|
+
for (const [domain, provider] of Object.entries(PROVIDER_MAP)) {
|
|
21
|
+
if (baseUrl.includes(domain))
|
|
22
|
+
return provider;
|
|
23
|
+
}
|
|
24
|
+
const url = new URL(baseUrl);
|
|
25
|
+
return url.hostname.split(".")[0] || "openai";
|
|
26
|
+
}
|
|
27
|
+
catch {
|
|
28
|
+
return "openai";
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
function wrapOpenAI(openaiClient, bloopClient) {
|
|
32
|
+
const client = openaiClient;
|
|
33
|
+
const provider = detectProvider(client);
|
|
34
|
+
const originalCreate = client.chat.completions.create.bind(client.chat.completions);
|
|
35
|
+
client.chat.completions.create = function tracedCreate(...args) {
|
|
36
|
+
const params = args[0] || {};
|
|
37
|
+
const model = params.model || "unknown";
|
|
38
|
+
const stream = params.stream || false;
|
|
39
|
+
const startMs = Date.now();
|
|
40
|
+
const trace = bloopClient.trace({ name: `${provider}/${model}` });
|
|
41
|
+
const span = trace.span({
|
|
42
|
+
spanType: "generation",
|
|
43
|
+
name: "chat.completions.create",
|
|
44
|
+
model,
|
|
45
|
+
provider,
|
|
46
|
+
});
|
|
47
|
+
if (stream) {
|
|
48
|
+
return handleStreaming(originalCreate, args, trace, span, startMs, model);
|
|
49
|
+
}
|
|
50
|
+
return originalCreate(...args).then((response) => {
|
|
51
|
+
const endMs = Date.now();
|
|
52
|
+
const usage = response?.usage;
|
|
53
|
+
if (usage) {
|
|
54
|
+
span.setTokens(usage.prompt_tokens || 0, usage.completion_tokens || 0);
|
|
55
|
+
}
|
|
56
|
+
span.setLatency(endMs - startMs);
|
|
57
|
+
span.cost = 0; // Server-side pricing
|
|
58
|
+
span.model = response?.model || model;
|
|
59
|
+
span.end();
|
|
60
|
+
trace.end();
|
|
61
|
+
return response;
|
|
62
|
+
}, (err) => {
|
|
63
|
+
const endMs = Date.now();
|
|
64
|
+
span.setLatency(endMs - startMs);
|
|
65
|
+
span.setError(err.message);
|
|
66
|
+
span.end();
|
|
67
|
+
trace.status = "error";
|
|
68
|
+
trace.end();
|
|
69
|
+
throw err;
|
|
70
|
+
});
|
|
71
|
+
};
|
|
72
|
+
return openaiClient;
|
|
73
|
+
}
|
|
74
|
+
async function* handleStreaming(originalCreate, args, trace, span, startMs, model) {
|
|
75
|
+
let firstTokenSeen = false;
|
|
76
|
+
let inputTokens = 0;
|
|
77
|
+
let outputTokens = 0;
|
|
78
|
+
let resolvedModel = model;
|
|
79
|
+
try {
|
|
80
|
+
const stream = await originalCreate(...args);
|
|
81
|
+
for await (const chunk of stream) {
|
|
82
|
+
if (!firstTokenSeen) {
|
|
83
|
+
firstTokenSeen = true;
|
|
84
|
+
span.timeToFirstTokenMs = Date.now() - startMs;
|
|
85
|
+
}
|
|
86
|
+
if (chunk.model)
|
|
87
|
+
resolvedModel = chunk.model;
|
|
88
|
+
// Track usage from final chunk (OpenAI includes it with stream_options)
|
|
89
|
+
if (chunk.usage) {
|
|
90
|
+
inputTokens = chunk.usage.prompt_tokens || 0;
|
|
91
|
+
outputTokens = chunk.usage.completion_tokens || 0;
|
|
92
|
+
}
|
|
93
|
+
yield chunk;
|
|
94
|
+
}
|
|
95
|
+
const endMs = Date.now();
|
|
96
|
+
span.setTokens(inputTokens, outputTokens);
|
|
97
|
+
span.setLatency(endMs - startMs);
|
|
98
|
+
span.cost = 0;
|
|
99
|
+
span.model = resolvedModel;
|
|
100
|
+
span.end();
|
|
101
|
+
trace.end();
|
|
102
|
+
}
|
|
103
|
+
catch (err) {
|
|
104
|
+
const endMs = Date.now();
|
|
105
|
+
span.setTokens(inputTokens, outputTokens);
|
|
106
|
+
span.setLatency(endMs - startMs);
|
|
107
|
+
span.setError(err.message);
|
|
108
|
+
span.end();
|
|
109
|
+
trace.status = "error";
|
|
110
|
+
trace.end();
|
|
111
|
+
throw err;
|
|
112
|
+
}
|
|
113
|
+
}
|