@lelemondev/sdk 0.2.0 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +124 -73
- package/dist/index.d.mts +50 -278
- package/dist/index.d.ts +50 -278
- package/dist/index.js +724 -524
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +723 -520
- package/dist/index.mjs.map +1 -1
- package/package.json +2 -2
package/dist/index.mjs
CHANGED
|
@@ -3,7 +3,7 @@ var __defProp = Object.defineProperty;
|
|
|
3
3
|
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
|
4
4
|
var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
|
|
5
5
|
|
|
6
|
-
// src/transport.ts
|
|
6
|
+
// src/core/transport.ts
|
|
7
7
|
var DEFAULT_BATCH_SIZE = 10;
|
|
8
8
|
var DEFAULT_FLUSH_INTERVAL_MS = 1e3;
|
|
9
9
|
var DEFAULT_REQUEST_TIMEOUT_MS = 1e4;
|
|
@@ -13,8 +13,6 @@ var Transport = class {
|
|
|
13
13
|
__publicField(this, "queue", []);
|
|
14
14
|
__publicField(this, "flushPromise", null);
|
|
15
15
|
__publicField(this, "flushTimer", null);
|
|
16
|
-
__publicField(this, "pendingResolvers", /* @__PURE__ */ new Map());
|
|
17
|
-
__publicField(this, "idCounter", 0);
|
|
18
16
|
this.config = {
|
|
19
17
|
apiKey: config.apiKey,
|
|
20
18
|
endpoint: config.endpoint,
|
|
@@ -32,30 +30,21 @@ var Transport = class {
|
|
|
32
30
|
return !this.config.disabled && !!this.config.apiKey;
|
|
33
31
|
}
|
|
34
32
|
/**
|
|
35
|
-
* Enqueue trace
|
|
33
|
+
* Enqueue a trace for sending
|
|
34
|
+
* Fire-and-forget - never blocks
|
|
36
35
|
*/
|
|
37
|
-
|
|
38
|
-
if (this.config.disabled)
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
this.
|
|
44
|
-
this.enqueue({ type: "create", tempId, data });
|
|
45
|
-
});
|
|
46
|
-
}
|
|
47
|
-
/**
|
|
48
|
-
* Enqueue trace completion (fire-and-forget)
|
|
49
|
-
*/
|
|
50
|
-
enqueueComplete(traceId, data) {
|
|
51
|
-
if (this.config.disabled || !traceId) {
|
|
52
|
-
return;
|
|
36
|
+
enqueue(trace) {
|
|
37
|
+
if (this.config.disabled) return;
|
|
38
|
+
this.queue.push(trace);
|
|
39
|
+
if (this.queue.length >= this.config.batchSize) {
|
|
40
|
+
this.flush();
|
|
41
|
+
} else {
|
|
42
|
+
this.scheduleFlush();
|
|
53
43
|
}
|
|
54
|
-
this.enqueue({ type: "complete", traceId, data });
|
|
55
44
|
}
|
|
56
45
|
/**
|
|
57
|
-
* Flush all pending
|
|
58
|
-
* Safe to call multiple times
|
|
46
|
+
* Flush all pending traces
|
|
47
|
+
* Safe to call multiple times
|
|
59
48
|
*/
|
|
60
49
|
async flush() {
|
|
61
50
|
if (this.flushPromise) {
|
|
@@ -73,29 +62,16 @@ var Transport = class {
|
|
|
73
62
|
return this.flushPromise;
|
|
74
63
|
}
|
|
75
64
|
/**
|
|
76
|
-
* Get pending
|
|
65
|
+
* Get pending count (for debugging)
|
|
77
66
|
*/
|
|
78
67
|
getPendingCount() {
|
|
79
68
|
return this.queue.length;
|
|
80
69
|
}
|
|
81
70
|
// ─────────────────────────────────────────────────────────────
|
|
82
|
-
// Private
|
|
71
|
+
// Private Methods
|
|
83
72
|
// ─────────────────────────────────────────────────────────────
|
|
84
|
-
generateTempId() {
|
|
85
|
-
return `temp_${++this.idCounter}_${Date.now()}`;
|
|
86
|
-
}
|
|
87
|
-
enqueue(item) {
|
|
88
|
-
this.queue.push(item);
|
|
89
|
-
if (this.queue.length >= this.config.batchSize) {
|
|
90
|
-
this.flush();
|
|
91
|
-
} else {
|
|
92
|
-
this.scheduleFlush();
|
|
93
|
-
}
|
|
94
|
-
}
|
|
95
73
|
scheduleFlush() {
|
|
96
|
-
if (this.flushTimer !== null)
|
|
97
|
-
return;
|
|
98
|
-
}
|
|
74
|
+
if (this.flushTimer !== null) return;
|
|
99
75
|
this.flushTimer = setTimeout(() => {
|
|
100
76
|
this.flushTimer = null;
|
|
101
77
|
this.flush();
|
|
@@ -108,53 +84,12 @@ var Transport = class {
|
|
|
108
84
|
}
|
|
109
85
|
}
|
|
110
86
|
async sendBatch(items) {
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
completes: []
|
|
114
|
-
};
|
|
115
|
-
for (const item of items) {
|
|
116
|
-
if (item.type === "create") {
|
|
117
|
-
payload.creates.push({ tempId: item.tempId, data: item.data });
|
|
118
|
-
} else {
|
|
119
|
-
payload.completes.push({ traceId: item.traceId, data: item.data });
|
|
120
|
-
}
|
|
121
|
-
}
|
|
122
|
-
if (payload.creates.length === 0 && payload.completes.length === 0) {
|
|
123
|
-
return;
|
|
124
|
-
}
|
|
125
|
-
this.log("Sending batch", {
|
|
126
|
-
creates: payload.creates.length,
|
|
127
|
-
completes: payload.completes.length
|
|
128
|
-
});
|
|
87
|
+
if (items.length === 0) return;
|
|
88
|
+
this.log(`Sending batch of ${items.length} traces`);
|
|
129
89
|
try {
|
|
130
|
-
|
|
131
|
-
"POST",
|
|
132
|
-
"/api/v1/traces/batch",
|
|
133
|
-
payload
|
|
134
|
-
);
|
|
135
|
-
if (response.created) {
|
|
136
|
-
for (const [tempId, realId] of Object.entries(response.created)) {
|
|
137
|
-
const resolver = this.pendingResolvers.get(tempId);
|
|
138
|
-
if (resolver) {
|
|
139
|
-
resolver(realId);
|
|
140
|
-
this.pendingResolvers.delete(tempId);
|
|
141
|
-
}
|
|
142
|
-
}
|
|
143
|
-
}
|
|
144
|
-
if (response.errors?.length && this.config.debug) {
|
|
145
|
-
console.warn("[Lelemon] Batch errors:", response.errors);
|
|
146
|
-
}
|
|
90
|
+
await this.request("POST", "/api/v1/traces/batch", { traces: items });
|
|
147
91
|
} catch (error) {
|
|
148
|
-
|
|
149
|
-
if (item.type === "create") {
|
|
150
|
-
const resolver = this.pendingResolvers.get(item.tempId);
|
|
151
|
-
if (resolver) {
|
|
152
|
-
resolver(null);
|
|
153
|
-
this.pendingResolvers.delete(item.tempId);
|
|
154
|
-
}
|
|
155
|
-
}
|
|
156
|
-
}
|
|
157
|
-
this.log("Batch failed", error);
|
|
92
|
+
this.log("Batch send failed", error);
|
|
158
93
|
}
|
|
159
94
|
}
|
|
160
95
|
async request(method, path, body) {
|
|
@@ -199,487 +134,755 @@ var Transport = class {
|
|
|
199
134
|
}
|
|
200
135
|
};
|
|
201
136
|
|
|
202
|
-
// src/
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
return
|
|
212
|
-
}
|
|
213
|
-
function
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
const result = {
|
|
220
|
-
llmCalls: [],
|
|
221
|
-
toolCalls: [],
|
|
222
|
-
totalInputTokens: 0,
|
|
223
|
-
totalOutputTokens: 0,
|
|
224
|
-
models: [],
|
|
225
|
-
provider: "openai"
|
|
226
|
-
};
|
|
227
|
-
for (const msg of messages) {
|
|
228
|
-
if (msg.role === "system") {
|
|
229
|
-
result.systemPrompt = msg.content ?? void 0;
|
|
230
|
-
} else if (msg.role === "user" && !result.userInput) {
|
|
231
|
-
result.userInput = msg.content ?? void 0;
|
|
232
|
-
} else if (msg.role === "assistant") {
|
|
233
|
-
const llmCall = {
|
|
234
|
-
provider: "openai",
|
|
235
|
-
output: msg.content
|
|
236
|
-
};
|
|
237
|
-
if (msg.tool_calls && msg.tool_calls.length > 0) {
|
|
238
|
-
llmCall.toolCalls = msg.tool_calls.map((tc) => ({
|
|
239
|
-
name: tc.function.name,
|
|
240
|
-
input: safeParseJSON(tc.function.arguments)
|
|
241
|
-
}));
|
|
242
|
-
for (const tc of msg.tool_calls) {
|
|
243
|
-
result.toolCalls.push({
|
|
244
|
-
name: tc.function.name,
|
|
245
|
-
input: safeParseJSON(tc.function.arguments)
|
|
246
|
-
});
|
|
247
|
-
}
|
|
248
|
-
}
|
|
249
|
-
result.llmCalls.push(llmCall);
|
|
250
|
-
if (msg.content) {
|
|
251
|
-
result.output = msg.content;
|
|
252
|
-
}
|
|
253
|
-
} else if (msg.role === "tool") {
|
|
254
|
-
const lastToolCall = result.toolCalls[result.toolCalls.length - 1];
|
|
255
|
-
if (lastToolCall) {
|
|
256
|
-
lastToolCall.output = safeParseJSON(msg.content ?? "");
|
|
257
|
-
}
|
|
258
|
-
}
|
|
137
|
+
// src/core/config.ts
|
|
138
|
+
var globalConfig = {};
|
|
139
|
+
var globalTransport = null;
|
|
140
|
+
var DEFAULT_ENDPOINT = "https://api.lelemon.dev";
|
|
141
|
+
function init(config = {}) {
|
|
142
|
+
globalConfig = config;
|
|
143
|
+
globalTransport = createTransport(config);
|
|
144
|
+
}
|
|
145
|
+
function getConfig() {
|
|
146
|
+
return globalConfig;
|
|
147
|
+
}
|
|
148
|
+
function isEnabled() {
|
|
149
|
+
return getTransport().isEnabled();
|
|
150
|
+
}
|
|
151
|
+
function getTransport() {
|
|
152
|
+
if (!globalTransport) {
|
|
153
|
+
globalTransport = createTransport(globalConfig);
|
|
259
154
|
}
|
|
260
|
-
return
|
|
155
|
+
return globalTransport;
|
|
261
156
|
}
|
|
262
|
-
function
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
toolCalls: [],
|
|
266
|
-
totalInputTokens: 0,
|
|
267
|
-
totalOutputTokens: 0,
|
|
268
|
-
models: [],
|
|
269
|
-
provider: "anthropic"
|
|
270
|
-
};
|
|
271
|
-
for (const msg of messages) {
|
|
272
|
-
if (msg.role === "user") {
|
|
273
|
-
if (!result.userInput) {
|
|
274
|
-
if (typeof msg.content === "string") {
|
|
275
|
-
result.userInput = msg.content;
|
|
276
|
-
} else if (Array.isArray(msg.content)) {
|
|
277
|
-
const textContent = msg.content.find(
|
|
278
|
-
(c) => c.type === "text"
|
|
279
|
-
);
|
|
280
|
-
if (textContent && "text" in textContent) {
|
|
281
|
-
result.userInput = textContent.text;
|
|
282
|
-
}
|
|
283
|
-
}
|
|
284
|
-
}
|
|
285
|
-
if (Array.isArray(msg.content)) {
|
|
286
|
-
for (const block of msg.content) {
|
|
287
|
-
if (block.type === "tool_result" && block.tool_use_id) {
|
|
288
|
-
const toolCall = result.toolCalls.find(
|
|
289
|
-
(tc) => tc.id === block.tool_use_id
|
|
290
|
-
);
|
|
291
|
-
if (toolCall) {
|
|
292
|
-
toolCall.output = block.content;
|
|
293
|
-
}
|
|
294
|
-
}
|
|
295
|
-
}
|
|
296
|
-
}
|
|
297
|
-
} else if (msg.role === "assistant") {
|
|
298
|
-
const llmCall = {
|
|
299
|
-
provider: "anthropic"
|
|
300
|
-
};
|
|
301
|
-
if (typeof msg.content === "string") {
|
|
302
|
-
llmCall.output = msg.content;
|
|
303
|
-
result.output = msg.content;
|
|
304
|
-
} else if (Array.isArray(msg.content)) {
|
|
305
|
-
const outputs = [];
|
|
306
|
-
const toolCalls = [];
|
|
307
|
-
for (const block of msg.content) {
|
|
308
|
-
if (block.type === "text" && block.text) {
|
|
309
|
-
outputs.push(block.text);
|
|
310
|
-
} else if (block.type === "tool_use" && block.name) {
|
|
311
|
-
const tc = {
|
|
312
|
-
name: block.name,
|
|
313
|
-
input: block.input
|
|
314
|
-
};
|
|
315
|
-
if (block.id) {
|
|
316
|
-
tc.id = block.id;
|
|
317
|
-
}
|
|
318
|
-
toolCalls.push(tc);
|
|
319
|
-
result.toolCalls.push(tc);
|
|
320
|
-
}
|
|
321
|
-
}
|
|
322
|
-
if (outputs.length) {
|
|
323
|
-
llmCall.output = outputs.join("\n");
|
|
324
|
-
result.output = outputs.join("\n");
|
|
325
|
-
}
|
|
326
|
-
if (toolCalls.length) {
|
|
327
|
-
llmCall.toolCalls = toolCalls;
|
|
328
|
-
}
|
|
329
|
-
}
|
|
330
|
-
result.llmCalls.push(llmCall);
|
|
331
|
-
}
|
|
157
|
+
async function flush() {
|
|
158
|
+
if (globalTransport) {
|
|
159
|
+
await globalTransport.flush();
|
|
332
160
|
}
|
|
333
|
-
return result;
|
|
334
161
|
}
|
|
335
|
-
function
|
|
336
|
-
const
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
input: part.functionCall.args
|
|
375
|
-
};
|
|
376
|
-
toolCalls.push(tc);
|
|
377
|
-
result.toolCalls.push(tc);
|
|
378
|
-
}
|
|
379
|
-
}
|
|
380
|
-
if (outputs.length) {
|
|
381
|
-
llmCall.output = outputs.join("\n");
|
|
382
|
-
result.output = outputs.join("\n");
|
|
383
|
-
}
|
|
384
|
-
if (toolCalls.length) {
|
|
385
|
-
llmCall.toolCalls = toolCalls;
|
|
162
|
+
function createTransport(config) {
|
|
163
|
+
const apiKey = config.apiKey ?? getEnvVar("LELEMON_API_KEY");
|
|
164
|
+
if (!apiKey && !config.disabled) {
|
|
165
|
+
console.warn(
|
|
166
|
+
"[Lelemon] No API key provided. Set apiKey in init() or LELEMON_API_KEY env var. Tracing disabled."
|
|
167
|
+
);
|
|
168
|
+
}
|
|
169
|
+
return new Transport({
|
|
170
|
+
apiKey: apiKey ?? "",
|
|
171
|
+
endpoint: config.endpoint ?? DEFAULT_ENDPOINT,
|
|
172
|
+
debug: config.debug ?? false,
|
|
173
|
+
disabled: config.disabled ?? !apiKey,
|
|
174
|
+
batchSize: config.batchSize,
|
|
175
|
+
flushIntervalMs: config.flushIntervalMs,
|
|
176
|
+
requestTimeoutMs: config.requestTimeoutMs
|
|
177
|
+
});
|
|
178
|
+
}
|
|
179
|
+
function getEnvVar(name) {
|
|
180
|
+
if (typeof process !== "undefined" && process.env) {
|
|
181
|
+
return process.env[name];
|
|
182
|
+
}
|
|
183
|
+
return void 0;
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
// src/providers/base.ts
|
|
187
|
+
function safeExtract(fn, fallback) {
|
|
188
|
+
try {
|
|
189
|
+
return fn() ?? fallback;
|
|
190
|
+
} catch {
|
|
191
|
+
return fallback;
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
function getNestedValue(obj, path) {
|
|
195
|
+
try {
|
|
196
|
+
const parts = path.split(".");
|
|
197
|
+
let current = obj;
|
|
198
|
+
for (const part of parts) {
|
|
199
|
+
if (current == null || typeof current !== "object") {
|
|
200
|
+
return void 0;
|
|
386
201
|
}
|
|
387
|
-
|
|
202
|
+
current = current[part];
|
|
388
203
|
}
|
|
204
|
+
return current;
|
|
205
|
+
} catch {
|
|
206
|
+
return void 0;
|
|
389
207
|
}
|
|
390
|
-
return result;
|
|
391
208
|
}
|
|
392
|
-
function
|
|
209
|
+
function isValidNumber(value) {
|
|
210
|
+
return typeof value === "number" && !isNaN(value) && isFinite(value);
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
// src/core/capture.ts
|
|
214
|
+
var globalContext = {};
|
|
215
|
+
function setGlobalContext(options) {
|
|
216
|
+
globalContext = options;
|
|
217
|
+
}
|
|
218
|
+
function getGlobalContext() {
|
|
219
|
+
return globalContext;
|
|
220
|
+
}
|
|
221
|
+
function captureTrace(params) {
|
|
393
222
|
try {
|
|
394
|
-
|
|
223
|
+
const transport = getTransport();
|
|
224
|
+
if (!transport.isEnabled()) return;
|
|
225
|
+
const context = getGlobalContext();
|
|
226
|
+
const request = {
|
|
227
|
+
provider: params.provider,
|
|
228
|
+
model: params.model,
|
|
229
|
+
input: sanitizeInput(params.input),
|
|
230
|
+
output: sanitizeOutput(params.output),
|
|
231
|
+
inputTokens: params.inputTokens,
|
|
232
|
+
outputTokens: params.outputTokens,
|
|
233
|
+
durationMs: params.durationMs,
|
|
234
|
+
status: params.status,
|
|
235
|
+
streaming: params.streaming,
|
|
236
|
+
sessionId: context.sessionId,
|
|
237
|
+
userId: context.userId,
|
|
238
|
+
metadata: { ...context.metadata, ...params.metadata },
|
|
239
|
+
tags: context.tags
|
|
240
|
+
};
|
|
241
|
+
transport.enqueue(request);
|
|
395
242
|
} catch {
|
|
396
|
-
return str;
|
|
397
243
|
}
|
|
398
244
|
}
|
|
399
|
-
function
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
245
|
+
function captureError(params) {
|
|
246
|
+
try {
|
|
247
|
+
const transport = getTransport();
|
|
248
|
+
if (!transport.isEnabled()) return;
|
|
249
|
+
const context = getGlobalContext();
|
|
250
|
+
const request = {
|
|
251
|
+
provider: params.provider,
|
|
252
|
+
model: params.model,
|
|
253
|
+
input: sanitizeInput(params.input),
|
|
254
|
+
output: null,
|
|
255
|
+
inputTokens: 0,
|
|
256
|
+
outputTokens: 0,
|
|
257
|
+
durationMs: params.durationMs,
|
|
258
|
+
status: "error",
|
|
259
|
+
errorMessage: params.error.message,
|
|
260
|
+
errorStack: params.error.stack,
|
|
261
|
+
streaming: params.streaming,
|
|
262
|
+
sessionId: context.sessionId,
|
|
263
|
+
userId: context.userId,
|
|
264
|
+
metadata: { ...context.metadata, ...params.metadata },
|
|
265
|
+
tags: context.tags
|
|
408
266
|
};
|
|
267
|
+
transport.enqueue(request);
|
|
268
|
+
} catch {
|
|
409
269
|
}
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
270
|
+
}
|
|
271
|
+
var MAX_STRING_LENGTH = 1e5;
|
|
272
|
+
var SENSITIVE_KEYS = ["api_key", "apikey", "password", "secret", "token", "authorization"];
|
|
273
|
+
function sanitizeInput(input) {
|
|
274
|
+
return sanitize(input, 0);
|
|
275
|
+
}
|
|
276
|
+
function sanitizeOutput(output) {
|
|
277
|
+
return sanitize(output, 0);
|
|
278
|
+
}
|
|
279
|
+
function sanitize(value, depth) {
|
|
280
|
+
if (depth > 10) return "[max depth exceeded]";
|
|
281
|
+
if (value === null || value === void 0) return value;
|
|
282
|
+
if (typeof value === "string") {
|
|
283
|
+
return value.length > MAX_STRING_LENGTH ? value.slice(0, MAX_STRING_LENGTH) + "...[truncated]" : value;
|
|
284
|
+
}
|
|
285
|
+
if (typeof value === "number" || typeof value === "boolean") {
|
|
286
|
+
return value;
|
|
287
|
+
}
|
|
288
|
+
if (Array.isArray(value)) {
|
|
289
|
+
return value.map((item) => sanitize(item, depth + 1));
|
|
290
|
+
}
|
|
291
|
+
if (typeof value === "object") {
|
|
292
|
+
const sanitized = {};
|
|
293
|
+
for (const [key, val] of Object.entries(value)) {
|
|
294
|
+
if (SENSITIVE_KEYS.some((k) => key.toLowerCase().includes(k))) {
|
|
295
|
+
sanitized[key] = "[REDACTED]";
|
|
296
|
+
} else {
|
|
297
|
+
sanitized[key] = sanitize(val, depth + 1);
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
return sanitized;
|
|
420
301
|
}
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
302
|
+
return String(value);
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
// src/providers/openai.ts
|
|
306
|
+
var PROVIDER_NAME = "openai";
|
|
307
|
+
function canHandle(client) {
|
|
308
|
+
if (!client || typeof client !== "object") return false;
|
|
309
|
+
const constructorName = client.constructor?.name;
|
|
310
|
+
if (constructorName === "OpenAI") return true;
|
|
311
|
+
const c = client;
|
|
312
|
+
return !!(c.chat && c.completions);
|
|
313
|
+
}
|
|
314
|
+
function wrapChatCreate(originalFn) {
|
|
315
|
+
return async function wrappedChatCreate(...args) {
|
|
316
|
+
const startTime = Date.now();
|
|
317
|
+
const request = args[0] || {};
|
|
318
|
+
const isStreaming = request.stream === true;
|
|
319
|
+
try {
|
|
320
|
+
const response = await originalFn(...args);
|
|
321
|
+
if (isStreaming && isAsyncIterable(response)) {
|
|
322
|
+
return wrapStream(response, request, startTime);
|
|
323
|
+
}
|
|
324
|
+
const durationMs = Date.now() - startTime;
|
|
325
|
+
const extracted = extractChatCompletion(response);
|
|
326
|
+
captureTrace({
|
|
327
|
+
provider: PROVIDER_NAME,
|
|
328
|
+
model: request.model || extracted.model || "unknown",
|
|
329
|
+
input: request.messages,
|
|
330
|
+
output: extracted.output,
|
|
331
|
+
inputTokens: extracted.tokens?.inputTokens || 0,
|
|
332
|
+
outputTokens: extracted.tokens?.outputTokens || 0,
|
|
333
|
+
durationMs,
|
|
334
|
+
status: "success",
|
|
335
|
+
streaming: false
|
|
336
|
+
});
|
|
337
|
+
return response;
|
|
338
|
+
} catch (error) {
|
|
339
|
+
const durationMs = Date.now() - startTime;
|
|
340
|
+
captureError({
|
|
341
|
+
provider: PROVIDER_NAME,
|
|
342
|
+
model: request.model || "unknown",
|
|
343
|
+
input: request.messages,
|
|
344
|
+
error: error instanceof Error ? error : new Error(String(error)),
|
|
345
|
+
durationMs,
|
|
346
|
+
streaming: isStreaming
|
|
347
|
+
});
|
|
348
|
+
throw error;
|
|
349
|
+
}
|
|
435
350
|
};
|
|
436
351
|
}
|
|
437
|
-
function
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
352
|
+
function wrapCompletionCreate(originalFn) {
|
|
353
|
+
return async function wrappedCompletionCreate(...args) {
|
|
354
|
+
const startTime = Date.now();
|
|
355
|
+
const request = args[0] || {};
|
|
356
|
+
try {
|
|
357
|
+
const response = await originalFn(...args);
|
|
358
|
+
const durationMs = Date.now() - startTime;
|
|
359
|
+
const extracted = extractLegacyCompletion(response);
|
|
360
|
+
captureTrace({
|
|
361
|
+
provider: PROVIDER_NAME,
|
|
362
|
+
model: request.model || extracted.model || "unknown",
|
|
363
|
+
input: request.prompt,
|
|
364
|
+
output: extracted.output,
|
|
365
|
+
inputTokens: extracted.tokens?.inputTokens || 0,
|
|
366
|
+
outputTokens: extracted.tokens?.outputTokens || 0,
|
|
367
|
+
durationMs,
|
|
368
|
+
status: "success",
|
|
369
|
+
streaming: false
|
|
370
|
+
});
|
|
371
|
+
return response;
|
|
372
|
+
} catch (error) {
|
|
373
|
+
const durationMs = Date.now() - startTime;
|
|
374
|
+
captureError({
|
|
375
|
+
provider: PROVIDER_NAME,
|
|
376
|
+
model: request.model || "unknown",
|
|
377
|
+
input: request.prompt,
|
|
378
|
+
error: error instanceof Error ? error : new Error(String(error)),
|
|
379
|
+
durationMs,
|
|
380
|
+
streaming: false
|
|
381
|
+
});
|
|
382
|
+
throw error;
|
|
383
|
+
}
|
|
384
|
+
};
|
|
385
|
+
}
|
|
386
|
+
function wrapEmbeddingsCreate(originalFn) {
|
|
387
|
+
return async function wrappedEmbeddingsCreate(...args) {
|
|
388
|
+
const startTime = Date.now();
|
|
389
|
+
const request = args[0] || {};
|
|
390
|
+
try {
|
|
391
|
+
const response = await originalFn(...args);
|
|
392
|
+
const durationMs = Date.now() - startTime;
|
|
393
|
+
const tokens = extractEmbeddingTokens(response);
|
|
394
|
+
captureTrace({
|
|
395
|
+
provider: PROVIDER_NAME,
|
|
396
|
+
model: request.model || "unknown",
|
|
397
|
+
input: request.input,
|
|
398
|
+
output: "[embedding vectors]",
|
|
399
|
+
inputTokens: tokens?.inputTokens || 0,
|
|
400
|
+
outputTokens: 0,
|
|
401
|
+
durationMs,
|
|
402
|
+
status: "success",
|
|
403
|
+
streaming: false
|
|
404
|
+
});
|
|
405
|
+
return response;
|
|
406
|
+
} catch (error) {
|
|
407
|
+
const durationMs = Date.now() - startTime;
|
|
408
|
+
captureError({
|
|
409
|
+
provider: PROVIDER_NAME,
|
|
410
|
+
model: request.model || "unknown",
|
|
411
|
+
input: request.input,
|
|
412
|
+
error: error instanceof Error ? error : new Error(String(error)),
|
|
413
|
+
durationMs,
|
|
414
|
+
streaming: false
|
|
415
|
+
});
|
|
416
|
+
throw error;
|
|
417
|
+
}
|
|
418
|
+
};
|
|
419
|
+
}
|
|
420
|
+
function isAsyncIterable(value) {
|
|
421
|
+
return value != null && typeof value[Symbol.asyncIterator] === "function";
|
|
422
|
+
}
|
|
423
|
+
async function* wrapStream(stream, request, startTime) {
|
|
424
|
+
const chunks = [];
|
|
425
|
+
let tokens = null;
|
|
426
|
+
let error = null;
|
|
427
|
+
try {
|
|
428
|
+
for await (const chunk of stream) {
|
|
429
|
+
const content = extractStreamChunkContent(chunk);
|
|
430
|
+
if (content) {
|
|
431
|
+
chunks.push(content);
|
|
432
|
+
}
|
|
433
|
+
const chunkTokens = extractStreamChunkTokens(chunk);
|
|
434
|
+
if (chunkTokens) {
|
|
435
|
+
tokens = chunkTokens;
|
|
436
|
+
}
|
|
437
|
+
yield chunk;
|
|
438
|
+
}
|
|
439
|
+
} catch (err) {
|
|
440
|
+
error = err instanceof Error ? err : new Error(String(err));
|
|
441
|
+
throw err;
|
|
442
|
+
} finally {
|
|
443
|
+
const durationMs = Date.now() - startTime;
|
|
444
|
+
const output = chunks.join("");
|
|
445
|
+
if (error) {
|
|
446
|
+
captureError({
|
|
447
|
+
provider: PROVIDER_NAME,
|
|
448
|
+
model: request.model || "unknown",
|
|
449
|
+
input: request.messages,
|
|
450
|
+
error,
|
|
451
|
+
durationMs,
|
|
452
|
+
streaming: true
|
|
453
|
+
});
|
|
454
|
+
} else {
|
|
455
|
+
captureTrace({
|
|
456
|
+
provider: PROVIDER_NAME,
|
|
457
|
+
model: request.model || "unknown",
|
|
458
|
+
input: request.messages,
|
|
459
|
+
output,
|
|
460
|
+
inputTokens: tokens?.inputTokens || 0,
|
|
461
|
+
outputTokens: tokens?.outputTokens || 0,
|
|
462
|
+
durationMs,
|
|
463
|
+
status: "success",
|
|
464
|
+
streaming: true
|
|
465
|
+
});
|
|
466
466
|
}
|
|
467
467
|
}
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
468
|
+
}
|
|
469
|
+
function extractChatCompletion(response) {
|
|
470
|
+
const model = safeExtract(() => getNestedValue(response, "model"), null);
|
|
471
|
+
const output = safeExtract(
|
|
472
|
+
() => getNestedValue(response, "choices.0.message.content"),
|
|
473
|
+
null
|
|
474
|
+
);
|
|
475
|
+
const tokens = extractTokens(response);
|
|
476
|
+
return { model, output, tokens };
|
|
477
|
+
}
|
|
478
|
+
function extractLegacyCompletion(response) {
|
|
479
|
+
const model = safeExtract(() => getNestedValue(response, "model"), null);
|
|
480
|
+
const output = safeExtract(
|
|
481
|
+
() => getNestedValue(response, "choices.0.text"),
|
|
482
|
+
null
|
|
483
|
+
);
|
|
484
|
+
const tokens = extractTokens(response);
|
|
485
|
+
return { model, output, tokens };
|
|
486
|
+
}
|
|
487
|
+
function extractTokens(response) {
|
|
488
|
+
try {
|
|
489
|
+
const usage = getNestedValue(response, "usage");
|
|
490
|
+
if (!usage || typeof usage !== "object") return null;
|
|
491
|
+
const u = usage;
|
|
492
|
+
const promptTokens = u.prompt_tokens;
|
|
493
|
+
const completionTokens = u.completion_tokens;
|
|
494
|
+
const totalTokens = u.total_tokens;
|
|
495
|
+
if (!isValidNumber(promptTokens) && !isValidNumber(completionTokens)) {
|
|
496
|
+
return null;
|
|
477
497
|
}
|
|
498
|
+
return {
|
|
499
|
+
inputTokens: isValidNumber(promptTokens) ? promptTokens : 0,
|
|
500
|
+
outputTokens: isValidNumber(completionTokens) ? completionTokens : 0,
|
|
501
|
+
totalTokens: isValidNumber(totalTokens) ? totalTokens : 0
|
|
502
|
+
};
|
|
503
|
+
} catch {
|
|
504
|
+
return null;
|
|
478
505
|
}
|
|
479
|
-
return result;
|
|
480
506
|
}
|
|
481
|
-
function
|
|
482
|
-
|
|
483
|
-
|
|
507
|
+
function extractEmbeddingTokens(response) {
|
|
508
|
+
try {
|
|
509
|
+
const usage = getNestedValue(response, "usage");
|
|
510
|
+
if (!usage || typeof usage !== "object") return null;
|
|
511
|
+
const u = usage;
|
|
512
|
+
const promptTokens = u.prompt_tokens;
|
|
513
|
+
const totalTokens = u.total_tokens;
|
|
514
|
+
return {
|
|
515
|
+
inputTokens: isValidNumber(promptTokens) ? promptTokens : 0,
|
|
516
|
+
outputTokens: 0,
|
|
517
|
+
totalTokens: isValidNumber(totalTokens) ? totalTokens : 0
|
|
518
|
+
};
|
|
519
|
+
} catch {
|
|
520
|
+
return null;
|
|
484
521
|
}
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
522
|
+
}
|
|
523
|
+
function extractStreamChunkContent(chunk) {
|
|
524
|
+
try {
|
|
525
|
+
return chunk?.choices?.[0]?.delta?.content ?? null;
|
|
526
|
+
} catch {
|
|
527
|
+
return null;
|
|
491
528
|
}
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
529
|
+
}
|
|
530
|
+
function extractStreamChunkTokens(chunk) {
|
|
531
|
+
try {
|
|
532
|
+
const usage = chunk?.usage;
|
|
533
|
+
if (!usage) return null;
|
|
534
|
+
return {
|
|
535
|
+
inputTokens: isValidNumber(usage.prompt_tokens) ? usage.prompt_tokens : 0,
|
|
536
|
+
outputTokens: isValidNumber(usage.completion_tokens) ? usage.completion_tokens : 0,
|
|
537
|
+
totalTokens: 0
|
|
538
|
+
};
|
|
539
|
+
} catch {
|
|
540
|
+
return null;
|
|
496
541
|
}
|
|
497
|
-
return result;
|
|
498
542
|
}
|
|
499
543
|
|
|
500
|
-
// src/
|
|
501
|
-
var
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
544
|
+
// src/providers/anthropic.ts
|
|
545
|
+
var PROVIDER_NAME2 = "anthropic";
|
|
546
|
+
function canHandle2(client) {
|
|
547
|
+
if (!client || typeof client !== "object") return false;
|
|
548
|
+
const constructorName = client.constructor?.name;
|
|
549
|
+
if (constructorName === "Anthropic") return true;
|
|
550
|
+
const c = client;
|
|
551
|
+
return !!(c.messages && typeof c.messages === "object");
|
|
507
552
|
}
|
|
508
|
-
function
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
553
|
+
function wrapMessagesCreate(originalFn) {
|
|
554
|
+
return async function wrappedMessagesCreate(...args) {
|
|
555
|
+
const startTime = Date.now();
|
|
556
|
+
const request = args[0] || {};
|
|
557
|
+
const isStreaming = request.stream === true;
|
|
558
|
+
try {
|
|
559
|
+
const response = await originalFn(...args);
|
|
560
|
+
if (isStreaming && isAsyncIterable2(response)) {
|
|
561
|
+
return wrapStream2(response, request, startTime);
|
|
562
|
+
}
|
|
563
|
+
const durationMs = Date.now() - startTime;
|
|
564
|
+
const extracted = extractMessageResponse(response);
|
|
565
|
+
captureTrace({
|
|
566
|
+
provider: PROVIDER_NAME2,
|
|
567
|
+
model: request.model || extracted.model || "unknown",
|
|
568
|
+
input: { system: request.system, messages: request.messages },
|
|
569
|
+
output: extracted.output,
|
|
570
|
+
inputTokens: extracted.tokens?.inputTokens || 0,
|
|
571
|
+
outputTokens: extracted.tokens?.outputTokens || 0,
|
|
572
|
+
durationMs,
|
|
573
|
+
status: "success",
|
|
574
|
+
streaming: false
|
|
575
|
+
});
|
|
576
|
+
return response;
|
|
577
|
+
} catch (error) {
|
|
578
|
+
const durationMs = Date.now() - startTime;
|
|
579
|
+
captureError({
|
|
580
|
+
provider: PROVIDER_NAME2,
|
|
581
|
+
model: request.model || "unknown",
|
|
582
|
+
input: { system: request.system, messages: request.messages },
|
|
583
|
+
error: error instanceof Error ? error : new Error(String(error)),
|
|
584
|
+
durationMs,
|
|
585
|
+
streaming: isStreaming
|
|
586
|
+
});
|
|
587
|
+
throw error;
|
|
588
|
+
}
|
|
589
|
+
};
|
|
513
590
|
}
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
591
|
+
function wrapMessagesStream(originalFn) {
|
|
592
|
+
return function wrappedMessagesStream(...args) {
|
|
593
|
+
const startTime = Date.now();
|
|
594
|
+
const request = args[0] || {};
|
|
595
|
+
try {
|
|
596
|
+
const stream = originalFn(...args);
|
|
597
|
+
if (stream && typeof stream === "object") {
|
|
598
|
+
return wrapAnthropicStream(stream, request, startTime);
|
|
599
|
+
}
|
|
600
|
+
return stream;
|
|
601
|
+
} catch (error) {
|
|
602
|
+
const durationMs = Date.now() - startTime;
|
|
603
|
+
captureError({
|
|
604
|
+
provider: PROVIDER_NAME2,
|
|
605
|
+
model: request.model || "unknown",
|
|
606
|
+
input: { system: request.system, messages: request.messages },
|
|
607
|
+
error: error instanceof Error ? error : new Error(String(error)),
|
|
608
|
+
durationMs,
|
|
609
|
+
streaming: true
|
|
610
|
+
});
|
|
611
|
+
throw error;
|
|
612
|
+
}
|
|
613
|
+
};
|
|
518
614
|
}
|
|
519
|
-
function
|
|
520
|
-
return
|
|
615
|
+
function isAsyncIterable2(value) {
|
|
616
|
+
return value != null && typeof value[Symbol.asyncIterator] === "function";
|
|
521
617
|
}
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
618
|
+
function wrapAnthropicStream(stream, request, startTime) {
|
|
619
|
+
const originalStream = stream;
|
|
620
|
+
if (!originalStream[Symbol.asyncIterator]) {
|
|
621
|
+
return stream;
|
|
622
|
+
}
|
|
623
|
+
const chunks = [];
|
|
624
|
+
let inputTokens = 0;
|
|
625
|
+
let outputTokens = 0;
|
|
626
|
+
let model = request.model || "unknown";
|
|
627
|
+
let captured = false;
|
|
628
|
+
const wrappedIterator = async function* () {
|
|
629
|
+
try {
|
|
630
|
+
for await (const event of originalStream) {
|
|
631
|
+
if (event.type === "message_start" && event.message) {
|
|
632
|
+
model = event.message.model || model;
|
|
633
|
+
if (event.message.usage) {
|
|
634
|
+
inputTokens = event.message.usage.input_tokens || 0;
|
|
635
|
+
}
|
|
636
|
+
}
|
|
637
|
+
if (event.type === "content_block_delta" && event.delta?.text) {
|
|
638
|
+
chunks.push(event.delta.text);
|
|
639
|
+
}
|
|
640
|
+
if (event.type === "message_delta" && event.usage) {
|
|
641
|
+
outputTokens = event.usage.output_tokens || 0;
|
|
642
|
+
}
|
|
643
|
+
yield event;
|
|
644
|
+
}
|
|
645
|
+
} catch (error) {
|
|
646
|
+
if (!captured) {
|
|
647
|
+
captured = true;
|
|
648
|
+
const durationMs = Date.now() - startTime;
|
|
649
|
+
captureError({
|
|
650
|
+
provider: PROVIDER_NAME2,
|
|
651
|
+
model,
|
|
652
|
+
input: { system: request.system, messages: request.messages },
|
|
653
|
+
error: error instanceof Error ? error : new Error(String(error)),
|
|
654
|
+
durationMs,
|
|
655
|
+
streaming: true
|
|
656
|
+
});
|
|
657
|
+
}
|
|
658
|
+
throw error;
|
|
659
|
+
} finally {
|
|
660
|
+
if (!captured) {
|
|
661
|
+
captured = true;
|
|
662
|
+
const durationMs = Date.now() - startTime;
|
|
663
|
+
captureTrace({
|
|
664
|
+
provider: PROVIDER_NAME2,
|
|
665
|
+
model,
|
|
666
|
+
input: { system: request.system, messages: request.messages },
|
|
667
|
+
output: chunks.join(""),
|
|
668
|
+
inputTokens,
|
|
669
|
+
outputTokens,
|
|
670
|
+
durationMs,
|
|
671
|
+
status: "success",
|
|
672
|
+
streaming: true
|
|
673
|
+
});
|
|
674
|
+
}
|
|
550
675
|
}
|
|
551
|
-
}
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
const parsed = parseResponse(response);
|
|
559
|
-
if (parsed.model || parsed.inputTokens || parsed.outputTokens) {
|
|
560
|
-
this.llmCalls.push(parsed);
|
|
676
|
+
};
|
|
677
|
+
return new Proxy(stream, {
|
|
678
|
+
get(target, prop, receiver) {
|
|
679
|
+
if (prop === Symbol.asyncIterator) {
|
|
680
|
+
return () => wrappedIterator()[Symbol.asyncIterator]();
|
|
681
|
+
}
|
|
682
|
+
return Reflect.get(target, prop, receiver);
|
|
561
683
|
}
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
684
|
+
});
|
|
685
|
+
}
|
|
686
|
+
async function* wrapStream2(stream, request, startTime) {
|
|
687
|
+
const chunks = [];
|
|
688
|
+
let inputTokens = 0;
|
|
689
|
+
let outputTokens = 0;
|
|
690
|
+
let model = request.model || "unknown";
|
|
691
|
+
let error = null;
|
|
692
|
+
try {
|
|
693
|
+
for await (const event of stream) {
|
|
694
|
+
if (event.type === "message_start" && event.message) {
|
|
695
|
+
model = event.message.model || model;
|
|
696
|
+
if (event.message.usage) {
|
|
697
|
+
inputTokens = event.message.usage.input_tokens || 0;
|
|
698
|
+
}
|
|
699
|
+
}
|
|
700
|
+
if (event.type === "content_block_delta" && event.delta?.text) {
|
|
701
|
+
chunks.push(event.delta.text);
|
|
702
|
+
}
|
|
703
|
+
if (event.type === "message_delta" && event.usage) {
|
|
704
|
+
outputTokens = event.usage.output_tokens || 0;
|
|
705
|
+
}
|
|
706
|
+
yield event;
|
|
707
|
+
}
|
|
708
|
+
} catch (err) {
|
|
709
|
+
error = err instanceof Error ? err : new Error(String(err));
|
|
710
|
+
throw err;
|
|
711
|
+
} finally {
|
|
712
|
+
const durationMs = Date.now() - startTime;
|
|
713
|
+
if (error) {
|
|
714
|
+
captureError({
|
|
715
|
+
provider: PROVIDER_NAME2,
|
|
716
|
+
model,
|
|
717
|
+
input: { system: request.system, messages: request.messages },
|
|
718
|
+
error,
|
|
719
|
+
durationMs,
|
|
720
|
+
streaming: true
|
|
588
721
|
});
|
|
589
|
-
}
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
const durationMs = Date.now() - this.startTime;
|
|
601
|
-
const parsed = messages ? parseMessages(messages) : null;
|
|
602
|
-
const errorObj = error instanceof Error ? error : new Error(String(error));
|
|
603
|
-
const allLLMCalls = parsed ? [...this.llmCalls, ...parsed.llmCalls] : this.llmCalls;
|
|
604
|
-
const { totalInputTokens, totalOutputTokens, models } = this.aggregateCalls(allLLMCalls);
|
|
605
|
-
this.idPromise.then((id) => {
|
|
606
|
-
if (!id) return;
|
|
607
|
-
this.transport.enqueueComplete(id, {
|
|
608
|
-
status: "error",
|
|
609
|
-
errorMessage: errorObj.message,
|
|
610
|
-
errorStack: errorObj.stack,
|
|
611
|
-
output: parsed?.output,
|
|
612
|
-
systemPrompt: parsed?.systemPrompt,
|
|
613
|
-
llmCalls: allLLMCalls.length > 0 ? allLLMCalls : void 0,
|
|
614
|
-
toolCalls: parsed?.toolCalls,
|
|
615
|
-
models: models.length > 0 ? models : void 0,
|
|
616
|
-
totalInputTokens,
|
|
617
|
-
totalOutputTokens,
|
|
618
|
-
durationMs
|
|
722
|
+
} else {
|
|
723
|
+
captureTrace({
|
|
724
|
+
provider: PROVIDER_NAME2,
|
|
725
|
+
model,
|
|
726
|
+
input: { system: request.system, messages: request.messages },
|
|
727
|
+
output: chunks.join(""),
|
|
728
|
+
inputTokens,
|
|
729
|
+
outputTokens,
|
|
730
|
+
durationMs,
|
|
731
|
+
status: "success",
|
|
732
|
+
streaming: true
|
|
619
733
|
});
|
|
620
|
-
}
|
|
621
|
-
}
|
|
622
|
-
/**
|
|
623
|
-
* Get the trace ID (may be null if not yet created or failed)
|
|
624
|
-
*/
|
|
625
|
-
getId() {
|
|
626
|
-
return this.id;
|
|
627
|
-
}
|
|
628
|
-
/**
|
|
629
|
-
* Wait for trace ID to be available
|
|
630
|
-
*/
|
|
631
|
-
async waitForId() {
|
|
632
|
-
return this.idPromise;
|
|
734
|
+
}
|
|
633
735
|
}
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
736
|
+
}
|
|
737
|
+
function extractMessageResponse(response) {
|
|
738
|
+
const model = safeExtract(() => response.model ?? null, null);
|
|
739
|
+
const output = safeExtract(() => {
|
|
740
|
+
if (!response.content || !Array.isArray(response.content)) return null;
|
|
741
|
+
const textBlocks = response.content.filter((block) => block.type === "text" && block.text).map((block) => block.text);
|
|
742
|
+
return textBlocks.join("") || null;
|
|
743
|
+
}, null);
|
|
744
|
+
const tokens = extractTokens2(response);
|
|
745
|
+
return { model, output, tokens };
|
|
746
|
+
}
|
|
747
|
+
function extractTokens2(response) {
|
|
748
|
+
try {
|
|
749
|
+
const usage = response.usage;
|
|
750
|
+
if (!usage) return null;
|
|
751
|
+
const inputTokens = usage.input_tokens;
|
|
752
|
+
const outputTokens = usage.output_tokens;
|
|
753
|
+
if (!isValidNumber(inputTokens) && !isValidNumber(outputTokens)) {
|
|
754
|
+
return null;
|
|
645
755
|
}
|
|
646
756
|
return {
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
|
|
757
|
+
inputTokens: isValidNumber(inputTokens) ? inputTokens : 0,
|
|
758
|
+
outputTokens: isValidNumber(outputTokens) ? outputTokens : 0,
|
|
759
|
+
totalTokens: (isValidNumber(inputTokens) ? inputTokens : 0) + (isValidNumber(outputTokens) ? outputTokens : 0)
|
|
650
760
|
};
|
|
761
|
+
} catch {
|
|
762
|
+
return null;
|
|
651
763
|
}
|
|
652
|
-
};
|
|
653
|
-
function getTransport() {
|
|
654
|
-
if (!globalTransport) {
|
|
655
|
-
globalTransport = createTransport(globalConfig);
|
|
656
|
-
}
|
|
657
|
-
return globalTransport;
|
|
658
764
|
}
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
|
|
662
|
-
|
|
663
|
-
|
|
664
|
-
|
|
765
|
+
|
|
766
|
+
// src/observe.ts
|
|
767
|
+
function observe(client, options) {
|
|
768
|
+
if (options) {
|
|
769
|
+
setGlobalContext(options);
|
|
770
|
+
}
|
|
771
|
+
const config = getConfig();
|
|
772
|
+
if (config.disabled) {
|
|
773
|
+
return client;
|
|
774
|
+
}
|
|
775
|
+
if (canHandle(client)) {
|
|
776
|
+
if (config.debug) {
|
|
777
|
+
console.log("[Lelemon] Wrapping OpenAI client");
|
|
778
|
+
}
|
|
779
|
+
return wrapOpenAI(client);
|
|
665
780
|
}
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
|
|
670
|
-
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
|
|
781
|
+
if (canHandle2(client)) {
|
|
782
|
+
if (config.debug) {
|
|
783
|
+
console.log("[Lelemon] Wrapping Anthropic client");
|
|
784
|
+
}
|
|
785
|
+
return wrapAnthropic(client);
|
|
786
|
+
}
|
|
787
|
+
console.warn(
|
|
788
|
+
"[Lelemon] Unknown client type. Tracing not enabled. Supported: OpenAI, Anthropic"
|
|
789
|
+
);
|
|
790
|
+
return client;
|
|
791
|
+
}
|
|
792
|
+
function wrapOpenAI(client) {
|
|
793
|
+
const typed = client;
|
|
794
|
+
return new Proxy(typed, {
|
|
795
|
+
get(target, prop, receiver) {
|
|
796
|
+
const value = Reflect.get(target, prop, receiver);
|
|
797
|
+
if (prop === "chat" && value && typeof value === "object") {
|
|
798
|
+
return wrapOpenAIChat(value);
|
|
799
|
+
}
|
|
800
|
+
if (prop === "completions" && value && typeof value === "object") {
|
|
801
|
+
return wrapOpenAICompletions(value);
|
|
802
|
+
}
|
|
803
|
+
if (prop === "embeddings" && value && typeof value === "object") {
|
|
804
|
+
return wrapOpenAIEmbeddings(value);
|
|
805
|
+
}
|
|
806
|
+
return value;
|
|
807
|
+
}
|
|
674
808
|
});
|
|
675
809
|
}
|
|
676
|
-
function
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
810
|
+
function wrapOpenAIChat(chat) {
|
|
811
|
+
return new Proxy(chat, {
|
|
812
|
+
get(target, prop, receiver) {
|
|
813
|
+
const value = Reflect.get(target, prop, receiver);
|
|
814
|
+
if (prop === "completions" && value && typeof value === "object") {
|
|
815
|
+
return wrapOpenAIChatCompletions(value);
|
|
816
|
+
}
|
|
817
|
+
return value;
|
|
818
|
+
}
|
|
819
|
+
});
|
|
820
|
+
}
|
|
821
|
+
function wrapOpenAIChatCompletions(completions) {
|
|
822
|
+
return new Proxy(completions, {
|
|
823
|
+
get(target, prop, receiver) {
|
|
824
|
+
const value = Reflect.get(target, prop, receiver);
|
|
825
|
+
if (prop === "create" && typeof value === "function") {
|
|
826
|
+
return wrapChatCreate(value.bind(target));
|
|
827
|
+
}
|
|
828
|
+
return value;
|
|
829
|
+
}
|
|
830
|
+
});
|
|
831
|
+
}
|
|
832
|
+
function wrapOpenAICompletions(completions) {
|
|
833
|
+
return new Proxy(completions, {
|
|
834
|
+
get(target, prop, receiver) {
|
|
835
|
+
const value = Reflect.get(target, prop, receiver);
|
|
836
|
+
if (prop === "create" && typeof value === "function") {
|
|
837
|
+
return wrapCompletionCreate(value.bind(target));
|
|
838
|
+
}
|
|
839
|
+
return value;
|
|
840
|
+
}
|
|
841
|
+
});
|
|
842
|
+
}
|
|
843
|
+
function wrapOpenAIEmbeddings(embeddings) {
|
|
844
|
+
return new Proxy(embeddings, {
|
|
845
|
+
get(target, prop, receiver) {
|
|
846
|
+
const value = Reflect.get(target, prop, receiver);
|
|
847
|
+
if (prop === "create" && typeof value === "function") {
|
|
848
|
+
return wrapEmbeddingsCreate(value.bind(target));
|
|
849
|
+
}
|
|
850
|
+
return value;
|
|
851
|
+
}
|
|
852
|
+
});
|
|
853
|
+
}
|
|
854
|
+
function wrapAnthropic(client) {
|
|
855
|
+
const typed = client;
|
|
856
|
+
return new Proxy(typed, {
|
|
857
|
+
get(target, prop, receiver) {
|
|
858
|
+
const value = Reflect.get(target, prop, receiver);
|
|
859
|
+
if (prop === "messages" && value && typeof value === "object") {
|
|
860
|
+
return wrapAnthropicMessages(value);
|
|
861
|
+
}
|
|
862
|
+
return value;
|
|
863
|
+
}
|
|
864
|
+
});
|
|
865
|
+
}
|
|
866
|
+
function wrapAnthropicMessages(messages) {
|
|
867
|
+
return new Proxy(messages, {
|
|
868
|
+
get(target, prop, receiver) {
|
|
869
|
+
const value = Reflect.get(target, prop, receiver);
|
|
870
|
+
if (prop === "create" && typeof value === "function") {
|
|
871
|
+
return wrapMessagesCreate(value.bind(target));
|
|
872
|
+
}
|
|
873
|
+
if (prop === "stream" && typeof value === "function") {
|
|
874
|
+
return wrapMessagesStream(value.bind(target));
|
|
875
|
+
}
|
|
876
|
+
return value;
|
|
877
|
+
}
|
|
878
|
+
});
|
|
879
|
+
}
|
|
880
|
+
function createObserve(defaultOptions) {
|
|
881
|
+
return function scopedObserve(client, options) {
|
|
882
|
+
return observe(client, { ...defaultOptions, ...options });
|
|
883
|
+
};
|
|
681
884
|
}
|
|
682
885
|
|
|
683
|
-
export {
|
|
886
|
+
export { createObserve, flush, init, isEnabled, observe };
|
|
684
887
|
//# sourceMappingURL=index.mjs.map
|
|
685
888
|
//# sourceMappingURL=index.mjs.map
|