@lelemondev/sdk 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +151 -0
- package/dist/index.d.mts +248 -0
- package/dist/index.d.ts +248 -0
- package/dist/index.js +551 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +544 -0
- package/dist/index.mjs.map +1 -0
- package/package.json +72 -0
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,544 @@
|
|
|
1
|
+
/* @lelemon/sdk - LLM Observability */
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
|
4
|
+
var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
|
|
5
|
+
|
|
6
|
+
// src/transport.ts
|
|
7
|
+
var Transport = class {
|
|
8
|
+
constructor(config) {
|
|
9
|
+
__publicField(this, "config");
|
|
10
|
+
this.config = config;
|
|
11
|
+
}
|
|
12
|
+
/**
|
|
13
|
+
* Check if transport is enabled
|
|
14
|
+
*/
|
|
15
|
+
isEnabled() {
|
|
16
|
+
return !this.config.disabled && !!this.config.apiKey;
|
|
17
|
+
}
|
|
18
|
+
/**
|
|
19
|
+
* Create a new trace
|
|
20
|
+
*/
|
|
21
|
+
async createTrace(data) {
|
|
22
|
+
return this.request("POST", "/api/v1/traces", data);
|
|
23
|
+
}
|
|
24
|
+
/**
|
|
25
|
+
* Complete a trace (success or error)
|
|
26
|
+
*/
|
|
27
|
+
async completeTrace(traceId, data) {
|
|
28
|
+
await this.request("PATCH", `/api/v1/traces/${traceId}`, data);
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* Make HTTP request to API
|
|
32
|
+
*/
|
|
33
|
+
async request(method, path, body) {
|
|
34
|
+
if (this.config.disabled) {
|
|
35
|
+
return {};
|
|
36
|
+
}
|
|
37
|
+
const url = `${this.config.endpoint}${path}`;
|
|
38
|
+
if (this.config.debug) {
|
|
39
|
+
console.log(`[Lelemon] ${method} ${url}`, body);
|
|
40
|
+
}
|
|
41
|
+
try {
|
|
42
|
+
const response = await fetch(url, {
|
|
43
|
+
method,
|
|
44
|
+
headers: {
|
|
45
|
+
"Content-Type": "application/json",
|
|
46
|
+
Authorization: `Bearer ${this.config.apiKey}`
|
|
47
|
+
},
|
|
48
|
+
body: body ? JSON.stringify(body) : void 0
|
|
49
|
+
});
|
|
50
|
+
if (!response.ok) {
|
|
51
|
+
const error = await response.text();
|
|
52
|
+
throw new Error(`Lelemon API error: ${response.status} ${error}`);
|
|
53
|
+
}
|
|
54
|
+
const text = await response.text();
|
|
55
|
+
if (!text) {
|
|
56
|
+
return {};
|
|
57
|
+
}
|
|
58
|
+
return JSON.parse(text);
|
|
59
|
+
} catch (error) {
|
|
60
|
+
if (this.config.debug) {
|
|
61
|
+
console.error("[Lelemon] Request failed:", error);
|
|
62
|
+
}
|
|
63
|
+
throw error;
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
};
|
|
67
|
+
|
|
68
|
+
// src/parser.ts
|
|
69
|
+
function isOpenAIFormat(messages) {
|
|
70
|
+
if (!messages.length) return false;
|
|
71
|
+
const first = messages[0];
|
|
72
|
+
return typeof first === "object" && first !== null && "role" in first && ["system", "user", "assistant", "tool"].includes(first.role);
|
|
73
|
+
}
|
|
74
|
+
function isAnthropicFormat(messages) {
|
|
75
|
+
if (!messages.length) return false;
|
|
76
|
+
const first = messages[0];
|
|
77
|
+
return typeof first === "object" && first !== null && "role" in first && ["user", "assistant"].includes(first.role) && (typeof first.content === "string" || Array.isArray(first.content));
|
|
78
|
+
}
|
|
79
|
+
function isGeminiFormat(messages) {
|
|
80
|
+
if (!messages.length) return false;
|
|
81
|
+
const first = messages[0];
|
|
82
|
+
return typeof first === "object" && first !== null && "role" in first && ["user", "model"].includes(first.role) && "parts" in first && Array.isArray(first.parts);
|
|
83
|
+
}
|
|
84
|
+
function parseOpenAI(messages) {
|
|
85
|
+
const result = {
|
|
86
|
+
llmCalls: [],
|
|
87
|
+
toolCalls: [],
|
|
88
|
+
totalInputTokens: 0,
|
|
89
|
+
totalOutputTokens: 0,
|
|
90
|
+
models: [],
|
|
91
|
+
provider: "openai"
|
|
92
|
+
};
|
|
93
|
+
for (const msg of messages) {
|
|
94
|
+
if (msg.role === "system") {
|
|
95
|
+
result.systemPrompt = msg.content ?? void 0;
|
|
96
|
+
} else if (msg.role === "user" && !result.userInput) {
|
|
97
|
+
result.userInput = msg.content ?? void 0;
|
|
98
|
+
} else if (msg.role === "assistant") {
|
|
99
|
+
const llmCall = {
|
|
100
|
+
provider: "openai",
|
|
101
|
+
output: msg.content
|
|
102
|
+
};
|
|
103
|
+
if (msg.tool_calls && msg.tool_calls.length > 0) {
|
|
104
|
+
llmCall.toolCalls = msg.tool_calls.map((tc) => ({
|
|
105
|
+
name: tc.function.name,
|
|
106
|
+
input: safeParseJSON(tc.function.arguments)
|
|
107
|
+
}));
|
|
108
|
+
for (const tc of msg.tool_calls) {
|
|
109
|
+
result.toolCalls.push({
|
|
110
|
+
name: tc.function.name,
|
|
111
|
+
input: safeParseJSON(tc.function.arguments)
|
|
112
|
+
});
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
result.llmCalls.push(llmCall);
|
|
116
|
+
if (msg.content) {
|
|
117
|
+
result.output = msg.content;
|
|
118
|
+
}
|
|
119
|
+
} else if (msg.role === "tool") {
|
|
120
|
+
const lastToolCall = result.toolCalls[result.toolCalls.length - 1];
|
|
121
|
+
if (lastToolCall) {
|
|
122
|
+
lastToolCall.output = safeParseJSON(msg.content ?? "");
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
return result;
|
|
127
|
+
}
|
|
128
|
+
function parseAnthropic(messages) {
|
|
129
|
+
const result = {
|
|
130
|
+
llmCalls: [],
|
|
131
|
+
toolCalls: [],
|
|
132
|
+
totalInputTokens: 0,
|
|
133
|
+
totalOutputTokens: 0,
|
|
134
|
+
models: [],
|
|
135
|
+
provider: "anthropic"
|
|
136
|
+
};
|
|
137
|
+
for (const msg of messages) {
|
|
138
|
+
if (msg.role === "user") {
|
|
139
|
+
if (!result.userInput) {
|
|
140
|
+
if (typeof msg.content === "string") {
|
|
141
|
+
result.userInput = msg.content;
|
|
142
|
+
} else if (Array.isArray(msg.content)) {
|
|
143
|
+
const textContent = msg.content.find(
|
|
144
|
+
(c) => c.type === "text"
|
|
145
|
+
);
|
|
146
|
+
if (textContent && "text" in textContent) {
|
|
147
|
+
result.userInput = textContent.text;
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
if (Array.isArray(msg.content)) {
|
|
152
|
+
for (const block of msg.content) {
|
|
153
|
+
if (block.type === "tool_result" && block.tool_use_id) {
|
|
154
|
+
const toolCall = result.toolCalls.find(
|
|
155
|
+
(tc) => tc.id === block.tool_use_id
|
|
156
|
+
);
|
|
157
|
+
if (toolCall) {
|
|
158
|
+
toolCall.output = block.content;
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
} else if (msg.role === "assistant") {
|
|
164
|
+
const llmCall = {
|
|
165
|
+
provider: "anthropic"
|
|
166
|
+
};
|
|
167
|
+
if (typeof msg.content === "string") {
|
|
168
|
+
llmCall.output = msg.content;
|
|
169
|
+
result.output = msg.content;
|
|
170
|
+
} else if (Array.isArray(msg.content)) {
|
|
171
|
+
const outputs = [];
|
|
172
|
+
const toolCalls = [];
|
|
173
|
+
for (const block of msg.content) {
|
|
174
|
+
if (block.type === "text" && block.text) {
|
|
175
|
+
outputs.push(block.text);
|
|
176
|
+
} else if (block.type === "tool_use" && block.name) {
|
|
177
|
+
const tc = {
|
|
178
|
+
name: block.name,
|
|
179
|
+
input: block.input
|
|
180
|
+
};
|
|
181
|
+
if (block.id) {
|
|
182
|
+
tc.id = block.id;
|
|
183
|
+
}
|
|
184
|
+
toolCalls.push(tc);
|
|
185
|
+
result.toolCalls.push(tc);
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
if (outputs.length) {
|
|
189
|
+
llmCall.output = outputs.join("\n");
|
|
190
|
+
result.output = outputs.join("\n");
|
|
191
|
+
}
|
|
192
|
+
if (toolCalls.length) {
|
|
193
|
+
llmCall.toolCalls = toolCalls;
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
result.llmCalls.push(llmCall);
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
return result;
|
|
200
|
+
}
|
|
201
|
+
function parseGemini(messages) {
|
|
202
|
+
const result = {
|
|
203
|
+
llmCalls: [],
|
|
204
|
+
toolCalls: [],
|
|
205
|
+
totalInputTokens: 0,
|
|
206
|
+
totalOutputTokens: 0,
|
|
207
|
+
models: [],
|
|
208
|
+
provider: "gemini"
|
|
209
|
+
};
|
|
210
|
+
for (const msg of messages) {
|
|
211
|
+
if (msg.role === "user") {
|
|
212
|
+
if (!result.userInput) {
|
|
213
|
+
const textPart = msg.parts.find((p) => p.text);
|
|
214
|
+
if (textPart?.text) {
|
|
215
|
+
result.userInput = textPart.text;
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
for (const part of msg.parts) {
|
|
219
|
+
if (part.functionResponse) {
|
|
220
|
+
const toolCall = result.toolCalls.find(
|
|
221
|
+
(tc) => tc.name === part.functionResponse.name
|
|
222
|
+
);
|
|
223
|
+
if (toolCall) {
|
|
224
|
+
toolCall.output = part.functionResponse.response;
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
} else if (msg.role === "model") {
|
|
229
|
+
const llmCall = {
|
|
230
|
+
provider: "gemini"
|
|
231
|
+
};
|
|
232
|
+
const outputs = [];
|
|
233
|
+
const toolCalls = [];
|
|
234
|
+
for (const part of msg.parts) {
|
|
235
|
+
if (part.text) {
|
|
236
|
+
outputs.push(part.text);
|
|
237
|
+
} else if (part.functionCall) {
|
|
238
|
+
const tc = {
|
|
239
|
+
name: part.functionCall.name,
|
|
240
|
+
input: part.functionCall.args
|
|
241
|
+
};
|
|
242
|
+
toolCalls.push(tc);
|
|
243
|
+
result.toolCalls.push(tc);
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
if (outputs.length) {
|
|
247
|
+
llmCall.output = outputs.join("\n");
|
|
248
|
+
result.output = outputs.join("\n");
|
|
249
|
+
}
|
|
250
|
+
if (toolCalls.length) {
|
|
251
|
+
llmCall.toolCalls = toolCalls;
|
|
252
|
+
}
|
|
253
|
+
result.llmCalls.push(llmCall);
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
return result;
|
|
257
|
+
}
|
|
258
|
+
function safeParseJSON(str) {
|
|
259
|
+
try {
|
|
260
|
+
return JSON.parse(str);
|
|
261
|
+
} catch {
|
|
262
|
+
return str;
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
function parseMessages(messages) {
|
|
266
|
+
if (!messages) {
|
|
267
|
+
return {
|
|
268
|
+
llmCalls: [],
|
|
269
|
+
toolCalls: [],
|
|
270
|
+
totalInputTokens: 0,
|
|
271
|
+
totalOutputTokens: 0,
|
|
272
|
+
models: [],
|
|
273
|
+
provider: "unknown"
|
|
274
|
+
};
|
|
275
|
+
}
|
|
276
|
+
if (!Array.isArray(messages)) {
|
|
277
|
+
return {
|
|
278
|
+
llmCalls: [],
|
|
279
|
+
toolCalls: [],
|
|
280
|
+
totalInputTokens: 0,
|
|
281
|
+
totalOutputTokens: 0,
|
|
282
|
+
models: [],
|
|
283
|
+
provider: "unknown",
|
|
284
|
+
output: typeof messages === "string" ? messages : JSON.stringify(messages)
|
|
285
|
+
};
|
|
286
|
+
}
|
|
287
|
+
if (isGeminiFormat(messages)) {
|
|
288
|
+
return parseGemini(messages);
|
|
289
|
+
} else if (isOpenAIFormat(messages)) {
|
|
290
|
+
return parseOpenAI(messages);
|
|
291
|
+
} else if (isAnthropicFormat(messages)) {
|
|
292
|
+
return parseAnthropic(messages);
|
|
293
|
+
}
|
|
294
|
+
return {
|
|
295
|
+
llmCalls: [],
|
|
296
|
+
toolCalls: [],
|
|
297
|
+
totalInputTokens: 0,
|
|
298
|
+
totalOutputTokens: 0,
|
|
299
|
+
models: [],
|
|
300
|
+
provider: "unknown"
|
|
301
|
+
};
|
|
302
|
+
}
|
|
303
|
+
function parseResponse(response) {
|
|
304
|
+
if (!response || typeof response !== "object") {
|
|
305
|
+
return {};
|
|
306
|
+
}
|
|
307
|
+
const res = response;
|
|
308
|
+
const result = {};
|
|
309
|
+
if ("model" in res) {
|
|
310
|
+
result.model = res.model;
|
|
311
|
+
}
|
|
312
|
+
if ("modelId" in res) {
|
|
313
|
+
result.model = res.modelId;
|
|
314
|
+
}
|
|
315
|
+
if ("usage" in res && typeof res.usage === "object" && res.usage !== null) {
|
|
316
|
+
const usage = res.usage;
|
|
317
|
+
result.inputTokens = usage.prompt_tokens ?? usage.input_tokens;
|
|
318
|
+
result.outputTokens = usage.completion_tokens ?? usage.output_tokens;
|
|
319
|
+
}
|
|
320
|
+
if ("$metadata" in res && "usage" in res) {
|
|
321
|
+
result.provider = "bedrock";
|
|
322
|
+
}
|
|
323
|
+
if ("anthropic_version" in res || "amazon-bedrock-invocationMetrics" in res) {
|
|
324
|
+
result.provider = "bedrock";
|
|
325
|
+
}
|
|
326
|
+
if ("candidates" in res || "promptFeedback" in res) {
|
|
327
|
+
result.provider = "gemini";
|
|
328
|
+
if ("usageMetadata" in res && typeof res.usageMetadata === "object" && res.usageMetadata !== null) {
|
|
329
|
+
const usage = res.usageMetadata;
|
|
330
|
+
result.inputTokens = usage.promptTokenCount;
|
|
331
|
+
result.outputTokens = usage.candidatesTokenCount;
|
|
332
|
+
}
|
|
333
|
+
}
|
|
334
|
+
if (result.model && !result.provider) {
|
|
335
|
+
if (result.model.startsWith("gpt") || result.model.startsWith("o1")) {
|
|
336
|
+
result.provider = "openai";
|
|
337
|
+
} else if (result.model.startsWith("claude")) {
|
|
338
|
+
result.provider = "anthropic";
|
|
339
|
+
} else if (result.model.startsWith("gemini")) {
|
|
340
|
+
result.provider = "gemini";
|
|
341
|
+
} else if (result.model.startsWith("anthropic.") || result.model.startsWith("amazon.") || result.model.startsWith("meta.") || result.model.startsWith("cohere.") || result.model.startsWith("mistral.") || result.model.includes(":")) {
|
|
342
|
+
result.provider = "bedrock";
|
|
343
|
+
}
|
|
344
|
+
}
|
|
345
|
+
return result;
|
|
346
|
+
}
|
|
347
|
+
function parseBedrockResponse(body) {
|
|
348
|
+
if (!body || typeof body !== "object") {
|
|
349
|
+
return {};
|
|
350
|
+
}
|
|
351
|
+
const res = body;
|
|
352
|
+
const result = { provider: "bedrock" };
|
|
353
|
+
if ("usage" in res && typeof res.usage === "object" && res.usage !== null) {
|
|
354
|
+
const usage = res.usage;
|
|
355
|
+
result.inputTokens = usage.input_tokens;
|
|
356
|
+
result.outputTokens = usage.output_tokens;
|
|
357
|
+
}
|
|
358
|
+
if ("amazon-bedrock-invocationMetrics" in res) {
|
|
359
|
+
const metrics = res["amazon-bedrock-invocationMetrics"];
|
|
360
|
+
if (metrics.inputTokenCount) result.inputTokens = metrics.inputTokenCount;
|
|
361
|
+
if (metrics.outputTokenCount) result.outputTokens = metrics.outputTokenCount;
|
|
362
|
+
}
|
|
363
|
+
return result;
|
|
364
|
+
}
|
|
365
|
+
|
|
366
|
+
// src/tracer.ts
|
|
367
|
+
var DEFAULT_ENDPOINT = "https://api.lelemon.dev";
|
|
368
|
+
var globalConfig = {};
|
|
369
|
+
var globalTransport = null;
|
|
370
|
+
function init(config = {}) {
|
|
371
|
+
globalConfig = config;
|
|
372
|
+
globalTransport = createTransport(config);
|
|
373
|
+
}
|
|
374
|
+
function createTransport(config) {
|
|
375
|
+
const apiKey = config.apiKey ?? getEnvVar("LELEMON_API_KEY");
|
|
376
|
+
if (!apiKey && !config.disabled) {
|
|
377
|
+
console.warn(
|
|
378
|
+
"[Lelemon] No API key provided. Set apiKey in config or LELEMON_API_KEY env var. Tracing disabled."
|
|
379
|
+
);
|
|
380
|
+
}
|
|
381
|
+
return new Transport({
|
|
382
|
+
apiKey: apiKey ?? "",
|
|
383
|
+
endpoint: config.endpoint ?? DEFAULT_ENDPOINT,
|
|
384
|
+
debug: config.debug ?? false,
|
|
385
|
+
disabled: config.disabled ?? !apiKey
|
|
386
|
+
});
|
|
387
|
+
}
|
|
388
|
+
function getTransport() {
|
|
389
|
+
if (!globalTransport) {
|
|
390
|
+
globalTransport = createTransport(globalConfig);
|
|
391
|
+
}
|
|
392
|
+
return globalTransport;
|
|
393
|
+
}
|
|
394
|
+
function getEnvVar(name) {
|
|
395
|
+
if (typeof process !== "undefined" && process.env) {
|
|
396
|
+
return process.env[name];
|
|
397
|
+
}
|
|
398
|
+
return void 0;
|
|
399
|
+
}
|
|
400
|
+
var Trace = class {
|
|
401
|
+
constructor(options, transport, debug, disabled) {
|
|
402
|
+
__publicField(this, "id", null);
|
|
403
|
+
__publicField(this, "transport");
|
|
404
|
+
__publicField(this, "options");
|
|
405
|
+
__publicField(this, "startTime");
|
|
406
|
+
__publicField(this, "completed", false);
|
|
407
|
+
__publicField(this, "debug");
|
|
408
|
+
__publicField(this, "disabled");
|
|
409
|
+
__publicField(this, "llmCalls", []);
|
|
410
|
+
this.options = options;
|
|
411
|
+
this.transport = transport;
|
|
412
|
+
this.startTime = Date.now();
|
|
413
|
+
this.debug = debug;
|
|
414
|
+
this.disabled = disabled;
|
|
415
|
+
}
|
|
416
|
+
/**
|
|
417
|
+
* Initialize trace on server (called internally)
|
|
418
|
+
*/
|
|
419
|
+
async init() {
|
|
420
|
+
if (this.disabled) return;
|
|
421
|
+
try {
|
|
422
|
+
const result = await this.transport.createTrace({
|
|
423
|
+
name: this.options.name,
|
|
424
|
+
sessionId: this.options.sessionId,
|
|
425
|
+
userId: this.options.userId,
|
|
426
|
+
input: this.options.input,
|
|
427
|
+
metadata: this.options.metadata,
|
|
428
|
+
tags: this.options.tags
|
|
429
|
+
});
|
|
430
|
+
this.id = result.id;
|
|
431
|
+
} catch (error) {
|
|
432
|
+
if (this.debug) {
|
|
433
|
+
console.error("[Lelemon] Failed to create trace:", error);
|
|
434
|
+
}
|
|
435
|
+
}
|
|
436
|
+
}
|
|
437
|
+
/**
|
|
438
|
+
* Log an LLM response (optional - for tracking individual calls)
|
|
439
|
+
* Use this if you want to track tokens per call, not just at the end
|
|
440
|
+
*/
|
|
441
|
+
log(response) {
|
|
442
|
+
const parsed = parseResponse(response);
|
|
443
|
+
if (parsed.model || parsed.inputTokens || parsed.outputTokens) {
|
|
444
|
+
this.llmCalls.push(parsed);
|
|
445
|
+
}
|
|
446
|
+
}
|
|
447
|
+
/**
|
|
448
|
+
* Complete trace successfully
|
|
449
|
+
* @param messages - The full message history (OpenAI/Anthropic format)
|
|
450
|
+
*/
|
|
451
|
+
async success(messages) {
|
|
452
|
+
if (this.completed) return;
|
|
453
|
+
this.completed = true;
|
|
454
|
+
if (this.disabled || !this.id) return;
|
|
455
|
+
const durationMs = Date.now() - this.startTime;
|
|
456
|
+
const parsed = parseMessages(messages);
|
|
457
|
+
const allLLMCalls = [...this.llmCalls, ...parsed.llmCalls];
|
|
458
|
+
let totalInputTokens = 0;
|
|
459
|
+
let totalOutputTokens = 0;
|
|
460
|
+
const models = /* @__PURE__ */ new Set();
|
|
461
|
+
for (const call of allLLMCalls) {
|
|
462
|
+
if (call.inputTokens) totalInputTokens += call.inputTokens;
|
|
463
|
+
if (call.outputTokens) totalOutputTokens += call.outputTokens;
|
|
464
|
+
if (call.model) models.add(call.model);
|
|
465
|
+
}
|
|
466
|
+
try {
|
|
467
|
+
await this.transport.completeTrace(this.id, {
|
|
468
|
+
status: "completed",
|
|
469
|
+
output: parsed.output,
|
|
470
|
+
systemPrompt: parsed.systemPrompt,
|
|
471
|
+
llmCalls: allLLMCalls,
|
|
472
|
+
toolCalls: parsed.toolCalls,
|
|
473
|
+
models: Array.from(models),
|
|
474
|
+
totalInputTokens,
|
|
475
|
+
totalOutputTokens,
|
|
476
|
+
durationMs
|
|
477
|
+
});
|
|
478
|
+
} catch (err) {
|
|
479
|
+
if (this.debug) {
|
|
480
|
+
console.error("[Lelemon] Failed to complete trace:", err);
|
|
481
|
+
}
|
|
482
|
+
}
|
|
483
|
+
}
|
|
484
|
+
/**
|
|
485
|
+
* Complete trace with error
|
|
486
|
+
* @param error - The error that occurred
|
|
487
|
+
* @param messages - The message history up to the failure (optional)
|
|
488
|
+
*/
|
|
489
|
+
async error(error, messages) {
|
|
490
|
+
if (this.completed) return;
|
|
491
|
+
this.completed = true;
|
|
492
|
+
if (this.disabled || !this.id) return;
|
|
493
|
+
const durationMs = Date.now() - this.startTime;
|
|
494
|
+
const parsed = messages ? parseMessages(messages) : null;
|
|
495
|
+
const errorObj = error instanceof Error ? error : new Error(String(error));
|
|
496
|
+
const allLLMCalls = parsed ? [...this.llmCalls, ...parsed.llmCalls] : this.llmCalls;
|
|
497
|
+
let totalInputTokens = 0;
|
|
498
|
+
let totalOutputTokens = 0;
|
|
499
|
+
const models = /* @__PURE__ */ new Set();
|
|
500
|
+
for (const call of allLLMCalls) {
|
|
501
|
+
if (call.inputTokens) totalInputTokens += call.inputTokens;
|
|
502
|
+
if (call.outputTokens) totalOutputTokens += call.outputTokens;
|
|
503
|
+
if (call.model) models.add(call.model);
|
|
504
|
+
}
|
|
505
|
+
const request = {
|
|
506
|
+
status: "error",
|
|
507
|
+
errorMessage: errorObj.message,
|
|
508
|
+
errorStack: errorObj.stack,
|
|
509
|
+
durationMs,
|
|
510
|
+
totalInputTokens,
|
|
511
|
+
totalOutputTokens,
|
|
512
|
+
models: Array.from(models)
|
|
513
|
+
};
|
|
514
|
+
if (parsed) {
|
|
515
|
+
request.output = parsed.output;
|
|
516
|
+
request.systemPrompt = parsed.systemPrompt;
|
|
517
|
+
request.llmCalls = allLLMCalls;
|
|
518
|
+
request.toolCalls = parsed.toolCalls;
|
|
519
|
+
}
|
|
520
|
+
try {
|
|
521
|
+
await this.transport.completeTrace(this.id, request);
|
|
522
|
+
} catch (err) {
|
|
523
|
+
if (this.debug) {
|
|
524
|
+
console.error("[Lelemon] Failed to complete trace:", err);
|
|
525
|
+
}
|
|
526
|
+
}
|
|
527
|
+
}
|
|
528
|
+
};
|
|
529
|
+
function trace(options) {
|
|
530
|
+
const transport = getTransport();
|
|
531
|
+
const debug = globalConfig.debug ?? false;
|
|
532
|
+
const disabled = globalConfig.disabled ?? !transport.isEnabled();
|
|
533
|
+
const t = new Trace(options, transport, debug, disabled);
|
|
534
|
+
t.init().catch((err) => {
|
|
535
|
+
if (debug) {
|
|
536
|
+
console.error("[Lelemon] Trace init failed:", err);
|
|
537
|
+
}
|
|
538
|
+
});
|
|
539
|
+
return t;
|
|
540
|
+
}
|
|
541
|
+
|
|
542
|
+
export { Trace, init, parseBedrockResponse, parseMessages, parseResponse, trace };
|
|
543
|
+
//# sourceMappingURL=index.mjs.map
|
|
544
|
+
//# sourceMappingURL=index.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/transport.ts","../src/parser.ts","../src/tracer.ts"],"names":[],"mappings":";;;;;;AAaO,IAAM,YAAN,MAAgB;AAAA,EAGrB,YAAY,MAAA,EAAyB;AAFrC,IAAA,aAAA,CAAA,IAAA,EAAQ,QAAA,CAAA;AAGN,IAAA,IAAA,CAAK,MAAA,GAAS,MAAA;AAAA,EAChB;AAAA;AAAA;AAAA;AAAA,EAKA,SAAA,GAAqB;AACnB,IAAA,OAAO,CAAC,IAAA,CAAK,MAAA,CAAO,YAAY,CAAC,CAAC,KAAK,MAAA,CAAO,MAAA;AAAA,EAChD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,YAAY,IAAA,EAAmD;AACnE,IAAA,OAAO,IAAA,CAAK,OAAA,CAAwB,MAAA,EAAQ,gBAAA,EAAkB,IAAI,CAAA;AAAA,EACpE;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAAA,CAAc,OAAA,EAAiB,IAAA,EAA2C;AAC9E,IAAA,MAAM,KAAK,OAAA,CAAQ,OAAA,EAAS,CAAA,eAAA,EAAkB,OAAO,IAAI,IAAI,CAAA;AAAA,EAC/D;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,OAAA,CAAW,MAAA,EAAgB,IAAA,EAAc,IAAA,EAA4B;AACjF,IAAA,IAAI,IAAA,CAAK,OAAO,QAAA,EAAU;AACxB,MAAA,OAAO,EAAC;AAAA,IACV;AAEA,IAAA,MAAM,MAAM,CAAA,EAAG,IAAA,CAAK,MAAA,CAAO,QAAQ,GAAG,IAAI,CAAA,CAAA;AAE1C,IAAA,IAAI,IAAA,CAAK,OAAO,KAAA,EAAO;AACrB,MAAA,OAAA,CAAQ,IAAI,CAAA,UAAA,EAAa,MAAM,CAAA,CAAA,EAAI,GAAG,IAAI,IAAI,CAAA;AAAA,IAChD;AAEA,IAAA,IAAI;AACF,MAAA,MAAM,QAAA,GAAW,MAAM,KAAA,CAAM,GAAA,EAAK;AAAA,QAChC,MAAA;AAAA,QACA,OAAA,EAAS;AAAA,UACP,cAAA,EAAgB,kBAAA;AAAA,UAChB,aAAA,EAAe,CAAA,OAAA,EAAU,IAAA,CAAK,MAAA,CAAO,MAAM,CAAA;AAAA,SAC7C;AAAA,QACA,IAAA,EAAM,IAAA,GAAO,IAAA,CAAK,SAAA,CAAU,IAAI,CAAA,GAAI,KAAA;AAAA,OACrC,CAAA;AAED,MAAA,IAAI,CAAC,SAAS,EAAA,EAAI;AAChB,QAAA,MAAM,KAAA,GAAQ,MAAM,QAAA,CAAS,IAAA,EAAK;AAClC,QAAA,MAAM,IAAI,KAAA,CAAM,CAAA,mBAAA,EAAsB,SAAS,MAAM,CAAA,CAAA,EAAI,KAAK,CAAA,CAAE,CAAA;AAAA,MAClE;AAGA,MAAA,MAAM,IAAA,GAAO,MAAM,QAAA,CAAS,IAAA,EAAK;AACjC,MAAA,IAAI,CAAC,IAAA,EAAM;AACT,QAAA,OAAO,EAAC;AAAA,MACV;AAEA,MAAA,OAAO,IAAA,CAAK,MAAM,IAAI,CAAA;AAAA,IACxB,SAAS,KAAA,EAAO;AACd,MAAA,IAAI,IAAA,CAAK,OAAO,KAAA,EAAO;AACrB,QAAA,OAAA,CAAQ,KAAA,CAAM,6BAA6B,KAAK,CAAA;AAAA,MAClD;AACA,MAAA,MAAM,KAAA;AAAA,IACR;AAAA,EACF;AACF,CAAA;;;AC5CA,SAAS,eAAe,QAAA,EAAkD;AACxE,EAAA,IAAI,CAAC,QAAA,CAAS,MAAA,EAAQ,OAAO,KAAA;AAC7B,EAAA,MAAM,KAAA,GAAQ,SAAS,CAAC,CAAA;AAExB,EAAA,OACE,OAAO,KAAA,KAAU,QAAA,IACjB,KAAA,KAAU,QACV,MAAA,IAAU,KAAA,IACV,CAAC,QAAA,EAAU,QAAQ,WAAA,EAAa,MAAM,CAAA,CAAE,QAAA,CAAS,MAAM,IAAc,CAAA;AAEzE;AAKA,SAAS,kBAAkB,QAAA,EAAqD;AAC9E,EAAA,IAAI,CAAC,QAAA,CAAS,MAAA,EAAQ,OAAO,KAAA;AAC7B,EAAA,MAAM,KAAA,GAAQ,SAAS,CAAC,CAAA;AAExB,EAAA,OACE,OAAO,UAAU,QAAA,IACjB,KAAA,KAAU,QACV,MAAA,IAAU,KAAA,IACV,CAAC,MAAA,EAAQ,WAAW,CAAA,CAAE,SAAS,KAAA,CAAM,IAAc,MAClD,OAAO,KAAA,CAAM,YAAY,QAAA,IAAY,KAAA,CAAM,OAAA,CAAQ,KAAA,CAAM,OAAO,CAAA,CAAA;AAErE;AAKA,SAAS,eAAe,QAAA,EAAkD;AACxE,EAAA,IAAI,CAAC,QAAA,CAAS,MAAA,EAAQ,OAAO,KAAA;AAC7B,EAAA,MAAM,KAAA,GAAQ,SAAS,CAAC,CAAA;AAExB,EAAA,OACE,OAAO,UAAU,QAAA,IACjB,KAAA,KAAU,QACV,MAAA,IAAU,KAAA,IACV,CAAC,MAAA,EAAQ,OAAO,EAAE,QAAA,CAAS,KAAA,CAAM,IAAc,CAAA,IAC/C,OAAA,IAAW,SACX,KAAA,CAAM,OAAA,CAAQ,MAAM,KAAK,CAAA;AAE7B;AAKA,SAAS,YAAY,QAAA,EAAwC;AAC3D,EAAA,MAAM,MAAA,GAAsB;AAAA,IAC1B,UAAU,EAAC;AAAA,IACX,WAAW,EAAC;AAAA,IACZ,gBAAA,EAAkB,CAAA;AAAA,IAClB,iBAAA,EAAmB,CAAA;AAAA,IACnB,QAAQ,EAAC;AAAA,IACT,QAAA,EAAU;AAAA,GACZ;AAEA,EAAA,KAAA,MAAW,OAAO,QAAA,EAAU;AAC1B,IAAA,IAAI,GAAA,CAAI,SAAS,QAAA,EAAU;AACzB,MAAA,MAAA,CAAO,YAAA,GAAe,IAAI,OAAA,IAAW,MAAA;AAAA,IACvC,WAAW,GAAA,CAAI,IAAA,KAAS,MAAA,IAAU,CAAC,OAAO,SAAA,EAAW;AACnD,MAAA,MAAA,CAAO,SAAA,GAAY,IAAI,OAAA,IAAW,MAAA;AAAA,IACpC,CAAA,MAAA,IAAW,GAAA,CAAI,IAAA,KAAS,WAAA,EAAa;AAEnC,MAAA,MAAM,OAAA,GAAyB;AAAA,QAC7B,QAAA,EAAU,QAAA;AAAA,QACV,QAAQ,GAAA,CAAI;AAAA,OACd;AAGA,MAAA,IAAI,GAAA,CAAI,UAAA,IAAc,GAAA,CAAI,UAAA,CAAW,SAAS,CAAA,EAAG;AAC/C,QAAA,OAAA,CAAQ,SAAA,GAAY,GAAA,CAAI,UAAA,CAAW,GAAA,CAAI,CAAC,EAAA,MAAwB;AAAA,UAC9D,IAAA,EAAM,GAAG,QAAA,CAAS,IAAA;AAAA,UAClB,KAAA,EAAO,aAAA,CAAc,EAAA,CAAG,QAAA,CAAS,SAAS;AAAA,SAC5C,CAAE,CAAA;AAGF,QAAA,KAAA,MAAW,EAAA,IAAM,IAAI,UAAA,EAAY;AAC/B,UAAA,MAAA,CAAO,UAAU,IAAA,CAAK;AAAA,YACpB,IAAA,EAAM,GAAG,QAAA,CAAS,IAAA;AAAA,YAClB,KAAA,EAAO,aAAA,CAAc,EAAA,CAAG,QAAA,CAAS,SAAS;AAAA,WAC3C,CAAA;AAAA,QACH;AAAA,MACF;AAEA,MAAA,MAAA,CAAO,QAAA,CAAS,KAAK,OAAO,CAAA;AAG5B,MAAA,IAAI,IAAI,OAAA,EAAS;AACf,QAAA,MAAA,CAAO,SAAS,GAAA,CAAI,OAAA;AAAA,MACtB;AAAA,IACF,CAAA,MAAA,IAAW,GAAA,CAAI,IAAA,KAAS,MAAA,EAAQ;AAE9B,MAAA,MAAM,eAAe,MAAA,CAAO,SAAA,CAAU,MAAA,CAAO,SAAA,CAAU,SAAS,CAAC,CAAA;AACjE,MAAA,IAAI,YAAA,EAAc;AAChB,QAAA,YAAA,CAAa,MAAA,GAAS,aAAA,CAAc,GAAA,CAAI,OAAA,IAAW,EAAE,CAAA;AAAA,MACvD;AAAA,IACF;AAAA,EACF;AAEA,EAAA,OAAO,MAAA;AACT;AAKA,SAAS,eAAe,QAAA,EAA2C;AACjE,EAAA,MAAM,MAAA,GAAsB;AAAA,IAC1B,UAAU,EAAC;AAAA,IACX,WAAW,EAAC;AAAA,IACZ,gBAAA,EAAkB,CAAA;AAAA,IAClB,iBAAA,EAAmB,CAAA;AAAA,IACnB,QAAQ,EAAC;AAAA,IACT,QAAA,EAAU;AAAA,GACZ;AAEA,EAAA,KAAA,MAAW,OAAO,QAAA,EAAU;AAC1B,IAAA,IAAI,GAAA,CAAI,SAAS,MAAA,EAAQ;AAEvB,MAAA,IAAI,CAAC,OAAO,SAAA,EAAW;AACrB,QAAA,IAAI,OAAO,GAAA,CAAI,OAAA,KAAY,QAAA,EAAU;AACnC,UAAA,MAAA,CAAO,YAAY,GAAA,CAAI,OAAA;AAAA,QACzB,CAAA,MAAA,IAAW,KAAA,CAAM,OAAA,CAAQ,GAAA,CAAI,OAAO,CAAA,EAAG;AACrC,UAAA,MAAM,WAAA,GAAc,IAAI,OAAA,CAAQ,IAAA;AAAA,YAC9B,CAAC,CAAA,KAAwB,CAAA,CAAE,IAAA,KAAS;AAAA,WACtC;AACA,UAAA,IAAI,WAAA,IAAe,UAAU,WAAA,EAAa;AACxC,YAAA,MAAA,CAAO,YAAY,WAAA,CAAY,IAAA;AAAA,UACjC;AAAA,QACF;AAAA,MACF;AAGA,MAAA,IAAI,KAAA,CAAM,OAAA,CAAQ,GAAA,CAAI,OAAO,CAAA,EAAG;AAC9B,QAAA,KAAA,MAAW,KAAA,IAAS,IAAI,OAAA,EAA+B;AACrD,UAAA,IAAI,KAAA,CAAM,IAAA,KAAS,aAAA,IAAiB,KAAA,CAAM,WAAA,EAAa;AAErD,YAAA,MAAM,QAAA,GAAW,OAAO,SAAA,CAAU,IAAA;AAAA,cAChC,CAAC,EAAA,KAAQ,EAAA,CAAkC,EAAA,KAAO,KAAA,CAAM;AAAA,aAC1D;AACA,YAAA,IAAI,QAAA,EAAU;AACZ,cAAA,QAAA,CAAS,SAAS,KAAA,CAAM,OAAA;AAAA,YAC1B;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAA,MAAA,IAAW,GAAA,CAAI,IAAA,KAAS,WAAA,EAAa;AACnC,MAAA,MAAM,OAAA,GAAyB;AAAA,QAC7B,QAAA,EAAU;AAAA,OACZ;AAEA,MAAA,IAAI,OAAO,GAAA,CAAI,OAAA,KAAY,QAAA,EAAU;AACnC,QAAA,OAAA,CAAQ,SAAS,GAAA,CAAI,OAAA;AACrB,QAAA,MAAA,CAAO,SAAS,GAAA,CAAI,OAAA;AAAA,MACtB,CAAA,MAAA,IAAW,KAAA,CAAM,OAAA,CAAQ,GAAA,CAAI,OAAO,CAAA,EAAG;AACrC,QAAA,MAAM,UAAoB,EAAC;AAC3B,QAAA,MAAM,YAA8B,EAAC;AAErC,QAAA,KAAA,MAAW,KAAA,IAAS,IAAI,OAAA,EAA+B;AACrD,UAAA,IAAI,KAAA,CAAM,IAAA,KAAS,MAAA,IAAU,KAAA,CAAM,IAAA,EAAM;AACvC,YAAA,OAAA,CAAQ,IAAA,CAAK,MAAM,IAAI,CAAA;AAAA,UACzB,CAAA,MAAA,IAAW,KAAA,CAAM,IAAA,KAAS,UAAA,IAAc,MAAM,IAAA,EAAM;AAClD,YAAA,MAAM,EAAA,GAAuC;AAAA,cAC3C,MAAM,KAAA,CAAM,IAAA;AAAA,cACZ,OAAO,KAAA,CAAM;AAAA,aACf;AACA,YAAA,IAAI,MAAM,EAAA,EAAI;AACZ,cAAA,EAAA,CAAG,KAAK,KAAA,CAAM,EAAA;AAAA,YAChB;AACA,YAAA,SAAA,CAAU,KAAK,EAAE,CAAA;AACjB,YAAA,MAAA,CAAO,SAAA,CAAU,KAAK,EAAE,CAAA;AAAA,UAC1B;AAAA,QACF;AAEA,QAAA,IAAI,QAAQ,MAAA,EAAQ;AAClB,UAAA,OAAA,CAAQ,MAAA,GAAS,OAAA,CAAQ,IAAA,CAAK,IAAI,CAAA;AAClC,UAAA,MAAA,CAAO,MAAA,GAAS,OAAA,CAAQ,IAAA,CAAK,IAAI,CAAA;AAAA,QACnC;AACA,QAAA,IAAI,UAAU,MAAA,EAAQ;AACpB,UAAA,OAAA,CAAQ,SAAA,GAAY,SAAA;AAAA,QACtB;AAAA,MACF;AAEA,MAAA,MAAA,CAAO,QAAA,CAAS,KAAK,OAAO,CAAA;AAAA,IAC9B;AAAA,EACF;AAEA,EAAA,OAAO,MAAA;AACT;AAKA,SAAS,YAAY,QAAA,EAAwC;AAC3D,EAAA,MAAM,MAAA,GAAsB;AAAA,IAC1B,UAAU,EAAC;AAAA,IACX,WAAW,EAAC;AAAA,IACZ,gBAAA,EAAkB,CAAA;AAAA,IAClB,iBAAA,EAAmB,CAAA;AAAA,IACnB,QAAQ,EAAC;AAAA,IACT,QAAA,EAAU;AAAA,GACZ;AAEA,EAAA,KAAA,MAAW,OAAO,QAAA,EAAU;AAC1B,IAAA,IAAI,GAAA,CAAI,SAAS,MAAA,EAAQ;AAEvB,MAAA,IAAI,CAAC,OAAO,SAAA,EAAW;AACrB,QAAA,MAAM,WAAW,GAAA,CAAI,KAAA,CAAM,KAAK,CAAC,CAAA,KAAM,EAAE,IAAI,CAAA;AAC7C,QAAA,IAAI,UAAU,IAAA,EAAM;AAClB,UAAA,MAAA,CAAO,YAAY,QAAA,CAAS,IAAA;AAAA,QAC9B;AAAA,MACF;AAGA,MAAA,KAAA,MAAW,IAAA,IAAQ,IAAI,KAAA,EAAO;AAC5B,QAAA,IAAI,KAAK,gBAAA,EAAkB;AACzB,UAAA,MAAM,QAAA,GAAW,OAAO,SAAA,CAAU,IAAA;AAAA,YAChC,CAAC,EAAA,KAAO,EAAA,CAAG,IAAA,KAAS,KAAK,gBAAA,CAAkB;AAAA,WAC7C;AACA,UAAA,IAAI,QAAA,EAAU;AACZ,YAAA,QAAA,CAAS,MAAA,GAAS,KAAK,gBAAA,CAAiB,QAAA;AAAA,UAC1C;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAA,MAAA,IAAW,GAAA,CAAI,IAAA,KAAS,OAAA,EAAS;AAC/B,MAAA,MAAM,OAAA,GAAyB;AAAA,QAC7B,QAAA,EAAU;AAAA,OACZ;AAEA,MAAA,MAAM,UAAoB,EAAC;AAC3B,MAAA,MAAM,YAA8B,EAAC;AAErC,MAAA,KAAA,MAAW,IAAA,IAAQ,IAAI,KAAA,EAAO;AAC5B,QAAA,IAAI,KAAK,IAAA,EAAM;AACb,UAAA,OAAA,CAAQ,IAAA,CAAK,KAAK,IAAI,CAAA;AAAA,QACxB,CAAA,MAAA,IAAW,KAAK,YAAA,EAAc;AAC5B,UAAA,MAAM,EAAA,GAAqB;AAAA,YACzB,IAAA,EAAM,KAAK,YAAA,CAAa,IAAA;AAAA,YACxB,KAAA,EAAO,KAAK,YAAA,CAAa;AAAA,WAC3B;AACA,UAAA,SAAA,CAAU,KAAK,EAAE,CAAA;AACjB,UAAA,MAAA,CAAO,SAAA,CAAU,KAAK,EAAE,CAAA;AAAA,QAC1B;AAAA,MACF;AAEA,MAAA,IAAI,QAAQ,MAAA,EAAQ;AAClB,QAAA,OAAA,CAAQ,MAAA,GAAS,OAAA,CAAQ,IAAA,CAAK,IAAI,CAAA;AAClC,QAAA,MAAA,CAAO,MAAA,GAAS,OAAA,CAAQ,IAAA,CAAK,IAAI,CAAA;AAAA,MACnC;AACA,MAAA,IAAI,UAAU,MAAA,EAAQ;AACpB,QAAA,OAAA,CAAQ,SAAA,GAAY,SAAA;AAAA,MACtB;AAEA,MAAA,MAAA,CAAO,QAAA,CAAS,KAAK,OAAO,CAAA;AAAA,IAC9B;AAAA,EACF;AAEA,EAAA,OAAO,MAAA;AACT;AAKA,SAAS,cAAc,GAAA,EAAsB;AAC3C,EAAA,IAAI;AACF,IAAA,OAAO,IAAA,CAAK,MAAM,GAAG,CAAA;AAAA,EACvB,CAAA,CAAA,MAAQ;AACN,IAAA,OAAO,GAAA;AAAA,EACT;AACF;AAKO,SAAS,cAAc,QAAA,EAAgC;AAE5D,EAAA,IAAI,CAAC,QAAA,EAAU;AACb,IAAA,OAAO;AAAA,MACL,UAAU,EAAC;AAAA,MACX,WAAW,EAAC;AAAA,MACZ,gBAAA,EAAkB,CAAA;AAAA,MAClB,iBAAA,EAAmB,CAAA;AAAA,MACnB,QAAQ,EAAC;AAAA,MACT,QAAA,EAAU;AAAA,KACZ;AAAA,EACF;AAGA,EAAA,IAAI,CAAC,KAAA,CAAM,OAAA,CAAQ,QAAQ,CAAA,EAAG;AAC5B,IAAA,OAAO;AAAA,MACL,UAAU,EAAC;AAAA,MACX,WAAW,EAAC;AAAA,MACZ,gBAAA,EAAkB,CAAA;AAAA,MAClB,iBAAA,EAAmB,CAAA;AAAA,MACnB,QAAQ,EAAC;AAAA,MACT,QAAA,EAAU,SAAA;AAAA,MACV,QAAQ,OAAO,QAAA,KAAa,WAAW,QAAA,GAAW,IAAA,CAAK,UAAU,QAAQ;AAAA,KAC3E;AAAA,EACF;AAGA,EAAA,IAAI,cAAA,CAAe,QAAQ,CAAA,EAAG;AAC5B,IAAA,OAAO,YAAY,QAAQ,CAAA;AAAA,EAC7B,CAAA,MAAA,IAAW,cAAA,CAAe,QAAQ,CAAA,EAAG;AACnC,IAAA,OAAO,YAAY,QAAQ,CAAA;AAAA,EAC7B,CAAA,MAAA,IAAW,iBAAA,CAAkB,QAAQ,CAAA,EAAG;AACtC,IAAA,OAAO,eAAe,QAAQ,CAAA;AAAA,EAChC;AAGA,EAAA,OAAO;AAAA,IACL,UAAU,EAAC;AAAA,IACX,WAAW,EAAC;AAAA,IACZ,gBAAA,EAAkB,CAAA;AAAA,IAClB,iBAAA,EAAmB,CAAA;AAAA,IACnB,QAAQ,EAAC;AAAA,IACT,QAAA,EAAU;AAAA,GACZ;AACF;AAMO,SAAS,cAAc,QAAA,EAA2C;AACvE,EAAA,IAAI,CAAC,QAAA,IAAY,OAAO,QAAA,KAAa,QAAA,EAAU;AAC7C,IAAA,OAAO,EAAC;AAAA,EACV;AAEA,EAAA,MAAM,GAAA,GAAM,QAAA;AACZ,EAAA,MAAM,SAAiC,EAAC;AAGxC,EAAA,IAAI,WAAW,GAAA,EAAK;AAClB,IAAA,MAAA,CAAO,QAAQ,GAAA,CAAI,KAAA;AAAA,EACrB;AAEA,EAAA,IAAI,aAAa,GAAA,EAAK;AACpB,IAAA,MAAA,CAAO,QAAQ,GAAA,CAAI,OAAA;AAAA,EACrB;AAGA,EAAA,IAAI,OAAA,IAAW,OAAO,OAAO,GAAA,CAAI,UAAU,QAAA,IAAY,GAAA,CAAI,UAAU,IAAA,EAAM;AACzE,IAAA,MAAM,QAAQ,GAAA,CAAI,KAAA;AAGlB,IAAA,MAAA,CAAO,WAAA,GAAc,KAAA,CAAM,aAAA,IAAiB,KAAA,CAAM,YAAA;AAClD,IAAA,MAAA,CAAO,YAAA,GAAe,KAAA,CAAM,iBAAA,IAAqB,KAAA,CAAM,aAAA;AAAA,EACzD;AAGA,EAAA,IAAI,WAAA,IAAe,GAAA,IAAO,OAAA,IAAW,GAAA,EAAK;AACxC,IAAA,MAAA,CAAO,QAAA,GAAW,SAAA;AAAA,EACpB;AAGA,EAAA,IAAI,mBAAA,IAAuB,GAAA,IAAO,kCAAA,IAAsC,GAAA,EAAK;AAC3E,IAAA,MAAA,CAAO,QAAA,GAAW,SAAA;AAAA,EACpB;AAGA,EAAA,IAAI,YAAA,IAAgB,GAAA,IAAO,gBAAA,IAAoB,GAAA,EAAK;AAClD,IAAA,MAAA,CAAO,QAAA,GAAW,QAAA;AAElB,IAAA,IAAI,eAAA,IAAmB,OAAO,OAAO,GAAA,CAAI,kBAAkB,QAAA,IAAY,GAAA,CAAI,kBAAkB,IAAA,EAAM;AACjG,MAAA,MAAM,QAAQ,GAAA,CAAI,aAAA;AAClB,MAAA,MAAA,CAAO,cAAc,KAAA,CAAM,gBAAA;AAC3B,MAAA,MAAA,CAAO,eAAe,KAAA,CAAM,oBAAA;AAAA,IAC9B;AAAA,EACF;AAGA,EAAA,IAAI,MAAA,CAAO,KAAA,IAAS,CAAC,MAAA,CAAO,QAAA,EAAU;AACpC,IAAA,IAAI,MAAA,CAAO,MAAM,UAAA,CAAW,KAAK,KAAK,MAAA,CAAO,KAAA,CAAM,UAAA,CAAW,IAAI,CAAA,EAAG;AACnE,MAAA,MAAA,CAAO,QAAA,GAAW,QAAA;AAAA,IACpB,CAAA,MAAA,IAAW,MAAA,CAAO,KAAA,CAAM,UAAA,CAAW,QAAQ,CAAA,EAAG;AAC5C,MAAA,MAAA,CAAO,QAAA,GAAW,WAAA;AAAA,IACpB,CAAA,MAAA,IAAW,MAAA,CAAO,KAAA,CAAM,UAAA,CAAW,QAAQ,CAAA,EAAG;AAC5C,MAAA,MAAA,CAAO,QAAA,GAAW,QAAA;AAAA,IACpB,CAAA,MAAA,IACE,MAAA,CAAO,KAAA,CAAM,UAAA,CAAW,YAAY,CAAA,IACpC,MAAA,CAAO,KAAA,CAAM,UAAA,CAAW,SAAS,CAAA,IACjC,MAAA,CAAO,MAAM,UAAA,CAAW,OAAO,CAAA,IAC/B,MAAA,CAAO,KAAA,CAAM,UAAA,CAAW,SAAS,CAAA,IACjC,MAAA,CAAO,KAAA,CAAM,UAAA,CAAW,UAAU,CAAA,IAClC,MAAA,CAAO,KAAA,CAAM,QAAA,CAAS,GAAG,CAAA,EACzB;AACA,MAAA,MAAA,CAAO,QAAA,GAAW,SAAA;AAAA,IACpB;AAAA,EACF;AAEA,EAAA,OAAO,MAAA;AACT;AAMO,SAAS,qBAAqB,IAAA,EAAuC;AAC1E,EAAA,IAAI,CAAC,IAAA,IAAQ,OAAO,IAAA,KAAS,QAAA,EAAU;AACrC,IAAA,OAAO,EAAC;AAAA,EACV;AAEA,EAAA,MAAM,GAAA,GAAM,IAAA;AACZ,EAAA,MAAM,MAAA,GAAiC,EAAE,QAAA,EAAU,SAAA,EAAU;AAG7D,EAAA,IAAI,OAAA,IAAW,OAAO,OAAO,GAAA,CAAI,UAAU,QAAA,IAAY,GAAA,CAAI,UAAU,IAAA,EAAM;AACzE,IAAA,MAAM,QAAQ,GAAA,CAAI,KAAA;AAClB,IAAA,MAAA,CAAO,cAAc,KAAA,CAAM,YAAA;AAC3B,IAAA,MAAA,CAAO,eAAe,KAAA,CAAM,aAAA;AAAA,EAC9B;AAGA,EAAA,IAAI,sCAAsC,GAAA,EAAK;AAC7C,IAAA,MAAM,OAAA,GAAU,IAAI,kCAAkC,CAAA;AACtD,IAAA,IAAI,OAAA,CAAQ,eAAA,EAAiB,MAAA,CAAO,WAAA,GAAc,OAAA,CAAQ,eAAA;AAC1D,IAAA,IAAI,OAAA,CAAQ,gBAAA,EAAkB,MAAA,CAAO,YAAA,GAAe,OAAA,CAAQ,gBAAA;AAAA,EAC9D;AAEA,EAAA,OAAO,MAAA;AACT;;;ACxbA,IAAM,gBAAA,GAAmB,yBAAA;AAGzB,IAAI,eAA8B,EAAC;AACnC,IAAI,eAAA,GAAoC,IAAA;AAMjC,SAAS,IAAA,CAAK,MAAA,GAAwB,EAAC,EAAS;AACrD,EAAA,YAAA,GAAe,MAAA;AACf,EAAA,eAAA,GAAkB,gBAAgB,MAAM,CAAA;AAC1C;AAKA,SAAS,gBAAgB,MAAA,EAAkC;AACzD,EAAA,MAAM,MAAA,GAAS,MAAA,CAAO,MAAA,IAAU,SAAA,CAAU,iBAAiB,CAAA;AAE3D,EAAA,IAAI,CAAC,MAAA,IAAU,CAAC,MAAA,CAAO,QAAA,EAAU;AAC/B,IAAA,OAAA,CAAQ,IAAA;AAAA,MACN;AAAA,KACF;AAAA,EACF;AAEA,EAAA,OAAO,IAAI,SAAA,CAAU;AAAA,IACnB,QAAQ,MAAA,IAAU,EAAA;AAAA,IAClB,QAAA,EAAU,OAAO,QAAA,IAAY,gBAAA;AAAA,IAC7B,KAAA,EAAO,OAAO,KAAA,IAAS,KAAA;AAAA,IACvB,QAAA,EAAU,MAAA,CAAO,QAAA,IAAY,CAAC;AAAA,GAC/B,CAAA;AACH;AAKA,SAAS,YAAA,GAA0B;AACjC,EAAA,IAAI,CAAC,eAAA,EAAiB;AACpB,IAAA,eAAA,GAAkB,gBAAgB,YAAY,CAAA;AAAA,EAChD;AACA,EAAA,OAAO,eAAA;AACT;AAKA,SAAS,UAAU,IAAA,EAAkC;AACnD,EAAA,IAAI,OAAO,OAAA,KAAY,WAAA,IAAe,OAAA,CAAQ,GAAA,EAAK;AACjD,IAAA,OAAO,OAAA,CAAQ,IAAI,IAAI,CAAA;AAAA,EACzB;AACA,EAAA,OAAO,MAAA;AACT;AAKO,IAAM,QAAN,MAAY;AAAA,EAUjB,WAAA,CAAY,OAAA,EAAuB,SAAA,EAAsB,KAAA,EAAgB,QAAA,EAAmB;AAT5F,IAAA,aAAA,CAAA,IAAA,EAAQ,IAAA,EAAoB,IAAA,CAAA;AAC5B,IAAA,aAAA,CAAA,IAAA,EAAQ,WAAA,CAAA;AACR,IAAA,aAAA,CAAA,IAAA,EAAQ,SAAA,CAAA;AACR,IAAA,aAAA,CAAA,IAAA,EAAQ,WAAA,CAAA;AACR,IAAA,aAAA,CAAA,IAAA,EAAQ,WAAA,EAAY,KAAA,CAAA;AACpB,IAAA,aAAA,CAAA,IAAA,EAAQ,OAAA,CAAA;AACR,IAAA,aAAA,CAAA,IAAA,EAAQ,UAAA,CAAA;AACR,IAAA,aAAA,CAAA,IAAA,EAAQ,YAA4B,EAAC,CAAA;AAGnC,IAAA,IAAA,CAAK,OAAA,GAAU,OAAA;AACf,IAAA,IAAA,CAAK,SAAA,GAAY,SAAA;AACjB,IAAA,IAAA,CAAK,SAAA,GAAY,KAAK,GAAA,EAAI;AAC1B,IAAA,IAAA,CAAK,KAAA,GAAQ,KAAA;AACb,IAAA,IAAA,CAAK,QAAA,GAAW,QAAA;AAAA,EAClB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,IAAA,GAAsB;AAC1B,IAAA,IAAI,KAAK,QAAA,EAAU;AAEnB,IAAA,IAAI;AACF,MAAA,MAAM,MAAA,GAAS,MAAM,IAAA,CAAK,SAAA,CAAU,WAAA,CAAY;AAAA,QAC9C,IAAA,EAAM,KAAK,OAAA,CAAQ,IAAA;AAAA,QACnB,SAAA,EAAW,KAAK,OAAA,CAAQ,SAAA;AAAA,QACxB,MAAA,EAAQ,KAAK,OAAA,CAAQ,MAAA;AAAA,QACrB,KAAA,EAAO,KAAK,OAAA,CAAQ,KAAA;AAAA,QACpB,QAAA,EAAU,KAAK,OAAA,CAAQ,QAAA;AAAA,QACvB,IAAA,EAAM,KAAK,OAAA,CAAQ;AAAA,OACpB,CAAA;AACD,MAAA,IAAA,CAAK,KAAK,MAAA,CAAO,EAAA;AAAA,IACnB,SAAS,KAAA,EAAO;AACd,MAAA,IAAI,KAAK,KAAA,EAAO;AACd,QAAA,OAAA,CAAQ,KAAA,CAAM,qCAAqC,KAAK,CAAA;AAAA,MAC1D;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,IAAI,QAAA,EAAyB;AAC3B,IAAA,MAAM,MAAA,GAAS,cAAc,QAAQ,CAAA;AACrC,IAAA,IAAI,MAAA,CAAO,KAAA,IAAS,MAAA,CAAO,WAAA,IAAe,OAAO,YAAA,EAAc;AAC7D,MAAA,IAAA,CAAK,QAAA,CAAS,KAAK,MAAM,CAAA;AAAA,IAC3B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,QAAQ,QAAA,EAAkC;AAC9C,IAAA,IAAI,KAAK,SAAA,EAAW;AACpB,IAAA,IAAA,CAAK,SAAA,GAAY,IAAA;AAEjB,IAAA,IAAI,IAAA,CAAK,QAAA,IAAY,CAAC,IAAA,CAAK,EAAA,EAAI;AAE/B,IAAA,MAAM,UAAA,GAAa,IAAA,CAAK,GAAA,EAAI,GAAI,IAAA,CAAK,SAAA;AACrC,IAAA,MAAM,MAAA,GAAS,cAAc,QAAQ,CAAA;AAGrC,IAAA,MAAM,cAAc,CAAC,GAAG,KAAK,QAAA,EAAU,GAAG,OAAO,QAAQ,CAAA;AAGzD,IAAA,IAAI,gBAAA,GAAmB,CAAA;AACvB,IAAA,IAAI,iBAAA,GAAoB,CAAA;AACxB,IAAA,MAAM,MAAA,uBAAa,GAAA,EAAY;AAE/B,IAAA,KAAA,MAAW,QAAQ,WAAA,EAAa;AAC9B,MAAA,IAAI,IAAA,CAAK,WAAA,EAAa,gBAAA,IAAoB,IAAA,CAAK,WAAA;AAC/C,MAAA,IAAI,IAAA,CAAK,YAAA,EAAc,iBAAA,IAAqB,IAAA,CAAK,YAAA;AACjD,MAAA,IAAI,IAAA,CAAK,KAAA,EAAO,MAAA,CAAO,GAAA,CAAI,KAAK,KAAK,CAAA;AAAA,IACvC;AAEA,IAAA,IAAI;AACF,MAAA,MAAM,IAAA,CAAK,SAAA,CAAU,aAAA,CAAc,IAAA,CAAK,EAAA,EAAI;AAAA,QAC1C,MAAA,EAAQ,WAAA;AAAA,QACR,QAAQ,MAAA,CAAO,MAAA;AAAA,QACf,cAAc,MAAA,CAAO,YAAA;AAAA,QACrB,QAAA,EAAU,WAAA;AAAA,QACV,WAAW,MAAA,CAAO,SAAA;AAAA,QAClB,MAAA,EAAQ,KAAA,CAAM,IAAA,CAAK,MAAM,CAAA;AAAA,QACzB,gBAAA;AAAA,QACA,iBAAA;AAAA,QACA;AAAA,OACD,CAAA;AAAA,IACH,SAAS,GAAA,EAAK;AACZ,MAAA,IAAI,KAAK,KAAA,EAAO;AACd,QAAA,OAAA,CAAQ,KAAA,CAAM,uCAAuC,GAAG,CAAA;AAAA,MAC1D;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,KAAA,CAAM,KAAA,EAAwB,QAAA,EAAmC;AACrE,IAAA,IAAI,KAAK,SAAA,EAAW;AACpB,IAAA,IAAA,CAAK,SAAA,GAAY,IAAA;AAEjB,IAAA,IAAI,IAAA,CAAK,QAAA,IAAY,CAAC,IAAA,CAAK,EAAA,EAAI;AAE/B,IAAA,MAAM,UAAA,GAAa,IAAA,CAAK,GAAA,EAAI,GAAI,IAAA,CAAK,SAAA;AACrC,IAAA,MAAM,MAAA,GAAS,QAAA,GAAW,aAAA,CAAc,QAAQ,CAAA,GAAI,IAAA;AAEpD,IAAA,MAAM,QAAA,GAAW,iBAAiB,KAAA,GAAQ,KAAA,GAAQ,IAAI,KAAA,CAAM,MAAA,CAAO,KAAK,CAAC,CAAA;AAGzE,IAAA,MAAM,WAAA,GAAc,MAAA,GAChB,CAAC,GAAG,IAAA,CAAK,UAAU,GAAG,MAAA,CAAO,QAAQ,CAAA,GACrC,IAAA,CAAK,QAAA;AAGT,IAAA,IAAI,gBAAA,GAAmB,CAAA;AACvB,IAAA,IAAI,iBAAA,GAAoB,CAAA;AACxB,IAAA,MAAM,MAAA,uBAAa,GAAA,EAAY;AAE/B,IAAA,KAAA,MAAW,QAAQ,WAAA,EAAa;AAC9B,MAAA,IAAI,IAAA,CAAK,WAAA,EAAa,gBAAA,IAAoB,IAAA,CAAK,WAAA;AAC/C,MAAA,IAAI,IAAA,CAAK,YAAA,EAAc,iBAAA,IAAqB,IAAA,CAAK,YAAA;AACjD,MAAA,IAAI,IAAA,CAAK,KAAA,EAAO,MAAA,CAAO,GAAA,CAAI,KAAK,KAAK,CAAA;AAAA,IACvC;AAEA,IAAA,MAAM,OAAA,GAAgC;AAAA,MACpC,MAAA,EAAQ,OAAA;AAAA,MACR,cAAc,QAAA,CAAS,OAAA;AAAA,MACvB,YAAY,QAAA,CAAS,KAAA;AAAA,MACrB,UAAA;AAAA,MACA,gBAAA;AAAA,MACA,iBAAA;AAAA,MACA,MAAA,EAAQ,KAAA,CAAM,IAAA,CAAK,MAAM;AAAA,KAC3B;AAEA,IAAA,IAAI,MAAA,EAAQ;AACV,MAAA,OAAA,CAAQ,SAAS,MAAA,CAAO,MAAA;AACxB,MAAA,OAAA,CAAQ,eAAe,MAAA,CAAO,YAAA;AAC9B,MAAA,OAAA,CAAQ,QAAA,GAAW,WAAA;AACnB,MAAA,OAAA,CAAQ,YAAY,MAAA,CAAO,SAAA;AAAA,IAC7B;AAEA,IAAA,IAAI;AACF,MAAA,MAAM,IAAA,CAAK,SAAA,CAAU,aAAA,CAAc,IAAA,CAAK,IAAI,OAAO,CAAA;AAAA,IACrD,SAAS,GAAA,EAAK;AACZ,MAAA,IAAI,KAAK,KAAA,EAAO;AACd,QAAA,OAAA,CAAQ,KAAA,CAAM,uCAAuC,GAAG,CAAA;AAAA,MAC1D;AAAA,IACF;AAAA,EACF;AACF;AAgBO,SAAS,MAAM,OAAA,EAA8B;AAClD,EAAA,MAAM,YAAY,YAAA,EAAa;AAC/B,EAAA,MAAM,KAAA,GAAQ,aAAa,KAAA,IAAS,KAAA;AACpC,EAAA,MAAM,QAAA,GAAW,YAAA,CAAa,QAAA,IAAY,CAAC,UAAU,SAAA,EAAU;AAE/D,EAAA,MAAM,IAAI,IAAI,KAAA,CAAM,OAAA,EAAS,SAAA,EAAW,OAAO,QAAQ,CAAA;AAGvD,EAAA,CAAA,CAAE,IAAA,EAAK,CAAE,KAAA,CAAM,CAAC,GAAA,KAAQ;AACtB,IAAA,IAAI,KAAA,EAAO;AACT,MAAA,OAAA,CAAQ,KAAA,CAAM,gCAAgC,GAAG,CAAA;AAAA,IACnD;AAAA,EACF,CAAC,CAAA;AAED,EAAA,OAAO,CAAA;AACT","file":"index.mjs","sourcesContent":["/**\r\n * Transport layer for sending data to Lelemon API\r\n */\r\n\r\nimport type { CreateTraceRequest, CompleteTraceRequest } from './types';\r\n\r\ninterface TransportConfig {\r\n apiKey: string;\r\n endpoint: string;\r\n debug: boolean;\r\n disabled: boolean;\r\n}\r\n\r\nexport class Transport {\r\n private config: TransportConfig;\r\n\r\n constructor(config: TransportConfig) {\r\n this.config = config;\r\n }\r\n\r\n /**\r\n * Check if transport is enabled\r\n */\r\n isEnabled(): boolean {\r\n return !this.config.disabled && !!this.config.apiKey;\r\n }\r\n\r\n /**\r\n * Create a new trace\r\n */\r\n async createTrace(data: CreateTraceRequest): Promise<{ id: string }> {\r\n return this.request<{ id: string }>('POST', '/api/v1/traces', data);\r\n }\r\n\r\n /**\r\n * Complete a trace (success or error)\r\n */\r\n async completeTrace(traceId: string, data: CompleteTraceRequest): Promise<void> {\r\n await this.request('PATCH', `/api/v1/traces/${traceId}`, data);\r\n }\r\n\r\n /**\r\n * Make HTTP request to API\r\n */\r\n private async request<T>(method: string, path: string, body?: unknown): Promise<T> {\r\n if (this.config.disabled) {\r\n return {} as T;\r\n }\r\n\r\n const url = `${this.config.endpoint}${path}`;\r\n\r\n if (this.config.debug) {\r\n console.log(`[Lelemon] ${method} ${url}`, body);\r\n }\r\n\r\n try {\r\n const response = await fetch(url, {\r\n method,\r\n headers: {\r\n 'Content-Type': 'application/json',\r\n Authorization: `Bearer ${this.config.apiKey}`,\r\n },\r\n body: body ? JSON.stringify(body) : undefined,\r\n });\r\n\r\n if (!response.ok) {\r\n const error = await response.text();\r\n throw new Error(`Lelemon API error: ${response.status} ${error}`);\r\n }\r\n\r\n // Handle empty responses\r\n const text = await response.text();\r\n if (!text) {\r\n return {} as T;\r\n }\r\n\r\n return JSON.parse(text);\r\n } catch (error) {\r\n if (this.config.debug) {\r\n console.error('[Lelemon] Request failed:', error);\r\n }\r\n throw error;\r\n }\r\n }\r\n}\r\n","/**\r\n * Message Parser\r\n * Auto-detects OpenAI/Anthropic/Gemini message formats and extracts relevant data\r\n */\r\n\r\nimport type {\r\n Message,\r\n OpenAIMessage,\r\n OpenAIToolCall,\r\n AnthropicMessage,\r\n AnthropicContent,\r\n ParsedTrace,\r\n ParsedLLMCall,\r\n ParsedToolCall,\r\n} from './types';\r\n\r\n// ============================================\r\n// GEMINI FORMAT\r\n// ============================================\r\n\r\ninterface GeminiMessage {\r\n role: 'user' | 'model';\r\n parts: GeminiPart[];\r\n}\r\n\r\ninterface GeminiPart {\r\n text?: string;\r\n functionCall?: {\r\n name: string;\r\n args: Record<string, unknown>;\r\n };\r\n functionResponse?: {\r\n name: string;\r\n response: unknown;\r\n };\r\n}\r\n\r\n/**\r\n * Detect if messages are in OpenAI format\r\n */\r\nfunction isOpenAIFormat(messages: unknown[]): messages is OpenAIMessage[] {\r\n if (!messages.length) return false;\r\n const first = messages[0] as Record<string, unknown>;\r\n // OpenAI has role: system/user/assistant/tool\r\n return (\r\n typeof first === 'object' &&\r\n first !== null &&\r\n 'role' in first &&\r\n ['system', 'user', 'assistant', 'tool'].includes(first.role as string)\r\n );\r\n}\r\n\r\n/**\r\n * Detect if messages are in Anthropic format\r\n */\r\nfunction isAnthropicFormat(messages: unknown[]): messages is AnthropicMessage[] {\r\n if (!messages.length) return false;\r\n const first = messages[0] as Record<string, unknown>;\r\n // Anthropic only has user/assistant, and content can be array\r\n return (\r\n typeof first === 'object' &&\r\n first !== null &&\r\n 'role' in first &&\r\n ['user', 'assistant'].includes(first.role as string) &&\r\n (typeof first.content === 'string' || Array.isArray(first.content))\r\n );\r\n}\r\n\r\n/**\r\n * Detect if messages are in Gemini format\r\n */\r\nfunction isGeminiFormat(messages: unknown[]): messages is GeminiMessage[] {\r\n if (!messages.length) return false;\r\n const first = messages[0] as Record<string, unknown>;\r\n // Gemini uses 'user' | 'model' and has 'parts' array\r\n return (\r\n typeof first === 'object' &&\r\n first !== null &&\r\n 'role' in first &&\r\n ['user', 'model'].includes(first.role as string) &&\r\n 'parts' in first &&\r\n Array.isArray(first.parts)\r\n );\r\n}\r\n\r\n/**\r\n * Parse OpenAI messages\r\n */\r\nfunction parseOpenAI(messages: OpenAIMessage[]): ParsedTrace {\r\n const result: ParsedTrace = {\r\n llmCalls: [],\r\n toolCalls: [],\r\n totalInputTokens: 0,\r\n totalOutputTokens: 0,\r\n models: [],\r\n provider: 'openai',\r\n };\r\n\r\n for (const msg of messages) {\r\n if (msg.role === 'system') {\r\n result.systemPrompt = msg.content ?? undefined;\r\n } else if (msg.role === 'user' && !result.userInput) {\r\n result.userInput = msg.content ?? undefined;\r\n } else if (msg.role === 'assistant') {\r\n // Track as LLM call\r\n const llmCall: ParsedLLMCall = {\r\n provider: 'openai',\r\n output: msg.content,\r\n };\r\n\r\n // Extract tool calls if present\r\n if (msg.tool_calls && msg.tool_calls.length > 0) {\r\n llmCall.toolCalls = msg.tool_calls.map((tc: OpenAIToolCall) => ({\r\n name: tc.function.name,\r\n input: safeParseJSON(tc.function.arguments),\r\n }));\r\n\r\n // Also add to global tool calls\r\n for (const tc of msg.tool_calls) {\r\n result.toolCalls.push({\r\n name: tc.function.name,\r\n input: safeParseJSON(tc.function.arguments),\r\n });\r\n }\r\n }\r\n\r\n result.llmCalls.push(llmCall);\r\n\r\n // Last assistant message is the output\r\n if (msg.content) {\r\n result.output = msg.content;\r\n }\r\n } else if (msg.role === 'tool') {\r\n // Find the matching tool call and add output\r\n const lastToolCall = result.toolCalls[result.toolCalls.length - 1];\r\n if (lastToolCall) {\r\n lastToolCall.output = safeParseJSON(msg.content ?? '');\r\n }\r\n }\r\n }\r\n\r\n return result;\r\n}\r\n\r\n/**\r\n * Parse Anthropic messages\r\n */\r\nfunction parseAnthropic(messages: AnthropicMessage[]): ParsedTrace {\r\n const result: ParsedTrace = {\r\n llmCalls: [],\r\n toolCalls: [],\r\n totalInputTokens: 0,\r\n totalOutputTokens: 0,\r\n models: [],\r\n provider: 'anthropic',\r\n };\r\n\r\n for (const msg of messages) {\r\n if (msg.role === 'user') {\r\n // First user message is input\r\n if (!result.userInput) {\r\n if (typeof msg.content === 'string') {\r\n result.userInput = msg.content;\r\n } else if (Array.isArray(msg.content)) {\r\n const textContent = msg.content.find(\r\n (c: AnthropicContent) => c.type === 'text'\r\n );\r\n if (textContent && 'text' in textContent) {\r\n result.userInput = textContent.text;\r\n }\r\n }\r\n }\r\n\r\n // Check for tool_result in content\r\n if (Array.isArray(msg.content)) {\r\n for (const block of msg.content as AnthropicContent[]) {\r\n if (block.type === 'tool_result' && block.tool_use_id) {\r\n // Find matching tool call and add result\r\n const toolCall = result.toolCalls.find(\r\n (tc) => (tc as unknown as { id?: string }).id === block.tool_use_id\r\n );\r\n if (toolCall) {\r\n toolCall.output = block.content;\r\n }\r\n }\r\n }\r\n }\r\n } else if (msg.role === 'assistant') {\r\n const llmCall: ParsedLLMCall = {\r\n provider: 'anthropic',\r\n };\r\n\r\n if (typeof msg.content === 'string') {\r\n llmCall.output = msg.content;\r\n result.output = msg.content;\r\n } else if (Array.isArray(msg.content)) {\r\n const outputs: string[] = [];\r\n const toolCalls: ParsedToolCall[] = [];\r\n\r\n for (const block of msg.content as AnthropicContent[]) {\r\n if (block.type === 'text' && block.text) {\r\n outputs.push(block.text);\r\n } else if (block.type === 'tool_use' && block.name) {\r\n const tc: ParsedToolCall & { id?: string } = {\r\n name: block.name,\r\n input: block.input,\r\n };\r\n if (block.id) {\r\n tc.id = block.id;\r\n }\r\n toolCalls.push(tc);\r\n result.toolCalls.push(tc);\r\n }\r\n }\r\n\r\n if (outputs.length) {\r\n llmCall.output = outputs.join('\\n');\r\n result.output = outputs.join('\\n');\r\n }\r\n if (toolCalls.length) {\r\n llmCall.toolCalls = toolCalls;\r\n }\r\n }\r\n\r\n result.llmCalls.push(llmCall);\r\n }\r\n }\r\n\r\n return result;\r\n}\r\n\r\n/**\r\n * Parse Gemini messages\r\n */\r\nfunction parseGemini(messages: GeminiMessage[]): ParsedTrace {\r\n const result: ParsedTrace = {\r\n llmCalls: [],\r\n toolCalls: [],\r\n totalInputTokens: 0,\r\n totalOutputTokens: 0,\r\n models: [],\r\n provider: 'gemini',\r\n };\r\n\r\n for (const msg of messages) {\r\n if (msg.role === 'user') {\r\n // First user message is input\r\n if (!result.userInput) {\r\n const textPart = msg.parts.find((p) => p.text);\r\n if (textPart?.text) {\r\n result.userInput = textPart.text;\r\n }\r\n }\r\n\r\n // Check for function responses (tool results)\r\n for (const part of msg.parts) {\r\n if (part.functionResponse) {\r\n const toolCall = result.toolCalls.find(\r\n (tc) => tc.name === part.functionResponse!.name\r\n );\r\n if (toolCall) {\r\n toolCall.output = part.functionResponse.response;\r\n }\r\n }\r\n }\r\n } else if (msg.role === 'model') {\r\n const llmCall: ParsedLLMCall = {\r\n provider: 'gemini',\r\n };\r\n\r\n const outputs: string[] = [];\r\n const toolCalls: ParsedToolCall[] = [];\r\n\r\n for (const part of msg.parts) {\r\n if (part.text) {\r\n outputs.push(part.text);\r\n } else if (part.functionCall) {\r\n const tc: ParsedToolCall = {\r\n name: part.functionCall.name,\r\n input: part.functionCall.args,\r\n };\r\n toolCalls.push(tc);\r\n result.toolCalls.push(tc);\r\n }\r\n }\r\n\r\n if (outputs.length) {\r\n llmCall.output = outputs.join('\\n');\r\n result.output = outputs.join('\\n');\r\n }\r\n if (toolCalls.length) {\r\n llmCall.toolCalls = toolCalls;\r\n }\r\n\r\n result.llmCalls.push(llmCall);\r\n }\r\n }\r\n\r\n return result;\r\n}\r\n\r\n/**\r\n * Safely parse JSON, returning original string if parsing fails\r\n */\r\nfunction safeParseJSON(str: string): unknown {\r\n try {\r\n return JSON.parse(str);\r\n } catch {\r\n return str;\r\n }\r\n}\r\n\r\n/**\r\n * Parse messages array and extract structured data\r\n */\r\nexport function parseMessages(messages: unknown): ParsedTrace {\r\n // Handle null/undefined\r\n if (!messages) {\r\n return {\r\n llmCalls: [],\r\n toolCalls: [],\r\n totalInputTokens: 0,\r\n totalOutputTokens: 0,\r\n models: [],\r\n provider: 'unknown',\r\n };\r\n }\r\n\r\n // Ensure it's an array\r\n if (!Array.isArray(messages)) {\r\n return {\r\n llmCalls: [],\r\n toolCalls: [],\r\n totalInputTokens: 0,\r\n totalOutputTokens: 0,\r\n models: [],\r\n provider: 'unknown',\r\n output: typeof messages === 'string' ? messages : JSON.stringify(messages),\r\n };\r\n }\r\n\r\n // Detect format and parse (order matters - Gemini first since it's most specific)\r\n if (isGeminiFormat(messages)) {\r\n return parseGemini(messages);\r\n } else if (isOpenAIFormat(messages)) {\r\n return parseOpenAI(messages);\r\n } else if (isAnthropicFormat(messages)) {\r\n return parseAnthropic(messages);\r\n }\r\n\r\n // Unknown format - just store as-is\r\n return {\r\n llmCalls: [],\r\n toolCalls: [],\r\n totalInputTokens: 0,\r\n totalOutputTokens: 0,\r\n models: [],\r\n provider: 'unknown',\r\n };\r\n}\r\n\r\n/**\r\n * Extract data from an OpenAI/Anthropic/Bedrock response object\r\n * This handles the raw API response (not the messages array)\r\n */\r\nexport function parseResponse(response: unknown): Partial<ParsedLLMCall> {\r\n if (!response || typeof response !== 'object') {\r\n return {};\r\n }\r\n\r\n const res = response as Record<string, unknown>;\r\n const result: Partial<ParsedLLMCall> = {};\r\n\r\n // Extract model\r\n if ('model' in res) {\r\n result.model = res.model as string;\r\n }\r\n // Bedrock model ID in response\r\n if ('modelId' in res) {\r\n result.model = res.modelId as string;\r\n }\r\n\r\n // Extract usage - handle all formats\r\n if ('usage' in res && typeof res.usage === 'object' && res.usage !== null) {\r\n const usage = res.usage as Record<string, number>;\r\n // OpenAI: prompt_tokens, completion_tokens\r\n // Anthropic/Bedrock: input_tokens, output_tokens\r\n result.inputTokens = usage.prompt_tokens ?? usage.input_tokens;\r\n result.outputTokens = usage.completion_tokens ?? usage.output_tokens;\r\n }\r\n\r\n // Bedrock Converse API format\r\n if ('$metadata' in res && 'usage' in res) {\r\n result.provider = 'bedrock';\r\n }\r\n\r\n // Bedrock InvokeModel response (parsed body)\r\n if ('anthropic_version' in res || 'amazon-bedrock-invocationMetrics' in res) {\r\n result.provider = 'bedrock';\r\n }\r\n\r\n // Gemini response format\r\n if ('candidates' in res || 'promptFeedback' in res) {\r\n result.provider = 'gemini';\r\n // Gemini usageMetadata\r\n if ('usageMetadata' in res && typeof res.usageMetadata === 'object' && res.usageMetadata !== null) {\r\n const usage = res.usageMetadata as Record<string, number>;\r\n result.inputTokens = usage.promptTokenCount;\r\n result.outputTokens = usage.candidatesTokenCount;\r\n }\r\n }\r\n\r\n // Detect provider from model name\r\n if (result.model && !result.provider) {\r\n if (result.model.startsWith('gpt') || result.model.startsWith('o1')) {\r\n result.provider = 'openai';\r\n } else if (result.model.startsWith('claude')) {\r\n result.provider = 'anthropic';\r\n } else if (result.model.startsWith('gemini')) {\r\n result.provider = 'gemini';\r\n } else if (\r\n result.model.startsWith('anthropic.') ||\r\n result.model.startsWith('amazon.') ||\r\n result.model.startsWith('meta.') ||\r\n result.model.startsWith('cohere.') ||\r\n result.model.startsWith('mistral.') ||\r\n result.model.includes(':') // Bedrock ARN format\r\n ) {\r\n result.provider = 'bedrock';\r\n }\r\n }\r\n\r\n return result;\r\n}\r\n\r\n/**\r\n * Parse Bedrock InvokeModel response body\r\n * Call this with the parsed JSON body from Bedrock\r\n */\r\nexport function parseBedrockResponse(body: unknown): Partial<ParsedLLMCall> {\r\n if (!body || typeof body !== 'object') {\r\n return {};\r\n }\r\n\r\n const res = body as Record<string, unknown>;\r\n const result: Partial<ParsedLLMCall> = { provider: 'bedrock' };\r\n\r\n // Claude on Bedrock\r\n if ('usage' in res && typeof res.usage === 'object' && res.usage !== null) {\r\n const usage = res.usage as Record<string, number>;\r\n result.inputTokens = usage.input_tokens;\r\n result.outputTokens = usage.output_tokens;\r\n }\r\n\r\n // Model from invocation metrics\r\n if ('amazon-bedrock-invocationMetrics' in res) {\r\n const metrics = res['amazon-bedrock-invocationMetrics'] as Record<string, unknown>;\r\n if (metrics.inputTokenCount) result.inputTokens = metrics.inputTokenCount as number;\r\n if (metrics.outputTokenCount) result.outputTokens = metrics.outputTokenCount as number;\r\n }\r\n\r\n return result;\r\n}\r\n","/**\r\n * Lelemon Tracer - Simple, low-friction API\r\n *\r\n * Usage:\r\n * const t = trace({ input: userMessage });\r\n * try {\r\n * // your agent code\r\n * t.success(messages);\r\n * } catch (error) {\r\n * t.error(error, messages);\r\n * }\r\n */\r\n\r\nimport { Transport } from './transport';\r\nimport { parseMessages, parseResponse } from './parser';\r\nimport type {\r\n LelemonConfig,\r\n TraceOptions,\r\n ParsedLLMCall,\r\n CompleteTraceRequest,\r\n} from './types';\r\n\r\nconst DEFAULT_ENDPOINT = 'https://api.lelemon.dev';\r\n\r\n// Global config (set via init())\r\nlet globalConfig: LelemonConfig = {};\r\nlet globalTransport: Transport | null = null;\r\n\r\n/**\r\n * Initialize the SDK globally (optional)\r\n * If not called, trace() will auto-initialize with env vars\r\n */\r\nexport function init(config: LelemonConfig = {}): void {\r\n globalConfig = config;\r\n globalTransport = createTransport(config);\r\n}\r\n\r\n/**\r\n * Create a transport instance\r\n */\r\nfunction createTransport(config: LelemonConfig): Transport {\r\n const apiKey = config.apiKey ?? getEnvVar('LELEMON_API_KEY');\r\n\r\n if (!apiKey && !config.disabled) {\r\n console.warn(\r\n '[Lelemon] No API key provided. Set apiKey in config or LELEMON_API_KEY env var. Tracing disabled.'\r\n );\r\n }\r\n\r\n return new Transport({\r\n apiKey: apiKey ?? '',\r\n endpoint: config.endpoint ?? DEFAULT_ENDPOINT,\r\n debug: config.debug ?? false,\r\n disabled: config.disabled ?? !apiKey,\r\n });\r\n}\r\n\r\n/**\r\n * Get transport (create if needed)\r\n */\r\nfunction getTransport(): Transport {\r\n if (!globalTransport) {\r\n globalTransport = createTransport(globalConfig);\r\n }\r\n return globalTransport;\r\n}\r\n\r\n/**\r\n * Get environment variable (works in Node and edge)\r\n */\r\nfunction getEnvVar(name: string): string | undefined {\r\n if (typeof process !== 'undefined' && process.env) {\r\n return process.env[name];\r\n }\r\n return undefined;\r\n}\r\n\r\n/**\r\n * Active trace handle returned by trace()\r\n */\r\nexport class Trace {\r\n private id: string | null = null;\r\n private transport: Transport;\r\n private options: TraceOptions;\r\n private startTime: number;\r\n private completed = false;\r\n private debug: boolean;\r\n private disabled: boolean;\r\n private llmCalls: ParsedLLMCall[] = [];\r\n\r\n constructor(options: TraceOptions, transport: Transport, debug: boolean, disabled: boolean) {\r\n this.options = options;\r\n this.transport = transport;\r\n this.startTime = Date.now();\r\n this.debug = debug;\r\n this.disabled = disabled;\r\n }\r\n\r\n /**\r\n * Initialize trace on server (called internally)\r\n */\r\n async init(): Promise<void> {\r\n if (this.disabled) return;\r\n\r\n try {\r\n const result = await this.transport.createTrace({\r\n name: this.options.name,\r\n sessionId: this.options.sessionId,\r\n userId: this.options.userId,\r\n input: this.options.input,\r\n metadata: this.options.metadata,\r\n tags: this.options.tags,\r\n });\r\n this.id = result.id;\r\n } catch (error) {\r\n if (this.debug) {\r\n console.error('[Lelemon] Failed to create trace:', error);\r\n }\r\n }\r\n }\r\n\r\n /**\r\n * Log an LLM response (optional - for tracking individual calls)\r\n * Use this if you want to track tokens per call, not just at the end\r\n */\r\n log(response: unknown): void {\r\n const parsed = parseResponse(response);\r\n if (parsed.model || parsed.inputTokens || parsed.outputTokens) {\r\n this.llmCalls.push(parsed);\r\n }\r\n }\r\n\r\n /**\r\n * Complete trace successfully\r\n * @param messages - The full message history (OpenAI/Anthropic format)\r\n */\r\n async success(messages: unknown): Promise<void> {\r\n if (this.completed) return;\r\n this.completed = true;\r\n\r\n if (this.disabled || !this.id) return;\r\n\r\n const durationMs = Date.now() - this.startTime;\r\n const parsed = parseMessages(messages);\r\n\r\n // Merge logged LLM calls with parsed ones\r\n const allLLMCalls = [...this.llmCalls, ...parsed.llmCalls];\r\n\r\n // Calculate totals\r\n let totalInputTokens = 0;\r\n let totalOutputTokens = 0;\r\n const models = new Set<string>();\r\n\r\n for (const call of allLLMCalls) {\r\n if (call.inputTokens) totalInputTokens += call.inputTokens;\r\n if (call.outputTokens) totalOutputTokens += call.outputTokens;\r\n if (call.model) models.add(call.model);\r\n }\r\n\r\n try {\r\n await this.transport.completeTrace(this.id, {\r\n status: 'completed',\r\n output: parsed.output,\r\n systemPrompt: parsed.systemPrompt,\r\n llmCalls: allLLMCalls,\r\n toolCalls: parsed.toolCalls,\r\n models: Array.from(models),\r\n totalInputTokens,\r\n totalOutputTokens,\r\n durationMs,\r\n });\r\n } catch (err) {\r\n if (this.debug) {\r\n console.error('[Lelemon] Failed to complete trace:', err);\r\n }\r\n }\r\n }\r\n\r\n /**\r\n * Complete trace with error\r\n * @param error - The error that occurred\r\n * @param messages - The message history up to the failure (optional)\r\n */\r\n async error(error: Error | unknown, messages?: unknown): Promise<void> {\r\n if (this.completed) return;\r\n this.completed = true;\r\n\r\n if (this.disabled || !this.id) return;\r\n\r\n const durationMs = Date.now() - this.startTime;\r\n const parsed = messages ? parseMessages(messages) : null;\r\n\r\n const errorObj = error instanceof Error ? error : new Error(String(error));\r\n\r\n // Merge logged LLM calls\r\n const allLLMCalls = parsed\r\n ? [...this.llmCalls, ...parsed.llmCalls]\r\n : this.llmCalls;\r\n\r\n // Calculate totals\r\n let totalInputTokens = 0;\r\n let totalOutputTokens = 0;\r\n const models = new Set<string>();\r\n\r\n for (const call of allLLMCalls) {\r\n if (call.inputTokens) totalInputTokens += call.inputTokens;\r\n if (call.outputTokens) totalOutputTokens += call.outputTokens;\r\n if (call.model) models.add(call.model);\r\n }\r\n\r\n const request: CompleteTraceRequest = {\r\n status: 'error',\r\n errorMessage: errorObj.message,\r\n errorStack: errorObj.stack,\r\n durationMs,\r\n totalInputTokens,\r\n totalOutputTokens,\r\n models: Array.from(models),\r\n };\r\n\r\n if (parsed) {\r\n request.output = parsed.output;\r\n request.systemPrompt = parsed.systemPrompt;\r\n request.llmCalls = allLLMCalls;\r\n request.toolCalls = parsed.toolCalls;\r\n }\r\n\r\n try {\r\n await this.transport.completeTrace(this.id, request);\r\n } catch (err) {\r\n if (this.debug) {\r\n console.error('[Lelemon] Failed to complete trace:', err);\r\n }\r\n }\r\n }\r\n}\r\n\r\n/**\r\n * Start a new trace\r\n *\r\n * @example\r\n * const t = trace({ input: userMessage });\r\n * try {\r\n * const messages = [...];\r\n * // ... your agent code ...\r\n * await t.success(messages);\r\n * } catch (error) {\r\n * await t.error(error, messages);\r\n * throw error;\r\n * }\r\n */\r\nexport function trace(options: TraceOptions): Trace {\r\n const transport = getTransport();\r\n const debug = globalConfig.debug ?? false;\r\n const disabled = globalConfig.disabled ?? !transport.isEnabled();\r\n\r\n const t = new Trace(options, transport, debug, disabled);\r\n\r\n // Initialize async (fire and forget)\r\n t.init().catch((err) => {\r\n if (debug) {\r\n console.error('[Lelemon] Trace init failed:', err);\r\n }\r\n });\r\n\r\n return t;\r\n}\r\n\r\n// Re-export for backwards compatibility\r\nexport { Trace as LLMTracer };\r\n"]}
|