@crevanta/stelvara-sdk 0.1.0-alpha.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +108 -0
- package/dist/auto.cjs +857 -0
- package/dist/auto.cjs.map +1 -0
- package/dist/auto.d.cts +32 -0
- package/dist/auto.d.ts +32 -0
- package/dist/auto.js +827 -0
- package/dist/auto.js.map +1 -0
- package/dist/index.cjs +403 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +267 -0
- package/dist/index.d.ts +267 -0
- package/dist/index.js +368 -0
- package/dist/index.js.map +1 -0
- package/package.json +58 -0
package/dist/auto.js
ADDED
|
@@ -0,0 +1,827 @@
|
|
|
1
|
+
// src/buffer.ts
|
|
2
|
+
var delay = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
|
|
3
|
+
var TraceBuffer = class {
|
|
4
|
+
queue = [];
|
|
5
|
+
timer = null;
|
|
6
|
+
flushing = false;
|
|
7
|
+
running = true;
|
|
8
|
+
config;
|
|
9
|
+
shutdownHook = null;
|
|
10
|
+
constructor(config) {
|
|
11
|
+
this.config = config;
|
|
12
|
+
this.start();
|
|
13
|
+
}
|
|
14
|
+
/**
|
|
15
|
+
* Add a trace to the buffer.
|
|
16
|
+
* Returns false if the buffer is shut down.
|
|
17
|
+
* Drops oldest trace if buffer is full.
|
|
18
|
+
*/
|
|
19
|
+
enqueue(trace) {
|
|
20
|
+
if (!this.running) return false;
|
|
21
|
+
if (this.queue.length >= this.config.maxSize) {
|
|
22
|
+
this.queue.shift();
|
|
23
|
+
}
|
|
24
|
+
this.queue.push(trace);
|
|
25
|
+
return true;
|
|
26
|
+
}
|
|
27
|
+
/** Number of traces waiting to be flushed. */
|
|
28
|
+
get pendingCount() {
|
|
29
|
+
return this.queue.length;
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Flush all pending traces to the API.
|
|
33
|
+
* Sends in batches of batchSize.
|
|
34
|
+
*/
|
|
35
|
+
async flush() {
|
|
36
|
+
if (this.flushing || this.queue.length === 0) return;
|
|
37
|
+
this.flushing = true;
|
|
38
|
+
try {
|
|
39
|
+
while (this.queue.length > 0) {
|
|
40
|
+
const batch = this.queue.splice(0, this.config.batchSize);
|
|
41
|
+
await this.sendBatch(batch);
|
|
42
|
+
}
|
|
43
|
+
} finally {
|
|
44
|
+
this.flushing = false;
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
/**
|
|
48
|
+
* Flush remaining traces and stop the buffer.
|
|
49
|
+
* Resolves when done or after timeoutMs.
|
|
50
|
+
*/
|
|
51
|
+
async shutdown(timeoutMs = 5e3) {
|
|
52
|
+
if (!this.running) return;
|
|
53
|
+
this.running = false;
|
|
54
|
+
if (this.timer !== null) {
|
|
55
|
+
clearInterval(this.timer);
|
|
56
|
+
this.timer = null;
|
|
57
|
+
}
|
|
58
|
+
this.removeShutdownHook();
|
|
59
|
+
const flushPromise = this.flush();
|
|
60
|
+
const timeoutPromise = delay(timeoutMs);
|
|
61
|
+
await Promise.race([flushPromise, timeoutPromise]);
|
|
62
|
+
}
|
|
63
|
+
/**
|
|
64
|
+
* Send a single batch to the ingestion API with retry logic.
|
|
65
|
+
*/
|
|
66
|
+
async sendBatch(batch) {
|
|
67
|
+
const url = `${this.config.endpoint}/traces`;
|
|
68
|
+
const cleanBatch = batch.map(({ _localKey, ...clean }) => clean);
|
|
69
|
+
for (let attempt = 0; attempt < this.config.maxRetries; attempt++) {
|
|
70
|
+
try {
|
|
71
|
+
const response = await fetch(url, {
|
|
72
|
+
method: "POST",
|
|
73
|
+
headers: {
|
|
74
|
+
Authorization: `Bearer ${this.config.apiKey}`,
|
|
75
|
+
"Content-Type": "application/json"
|
|
76
|
+
},
|
|
77
|
+
body: JSON.stringify(cleanBatch)
|
|
78
|
+
});
|
|
79
|
+
if (response.status === 201) {
|
|
80
|
+
const data = await response.json();
|
|
81
|
+
const traceIds = data.trace_ids ?? [];
|
|
82
|
+
this.config.onFlushSuccess?.(batch, traceIds);
|
|
83
|
+
return;
|
|
84
|
+
}
|
|
85
|
+
if (response.status === 429) {
|
|
86
|
+
const retryAfterHeader = response.headers.get("Retry-After");
|
|
87
|
+
const retryAfterSec = retryAfterHeader ? parseInt(retryAfterHeader, 10) : 2 ** attempt;
|
|
88
|
+
await delay(retryAfterSec * 1e3);
|
|
89
|
+
continue;
|
|
90
|
+
}
|
|
91
|
+
return;
|
|
92
|
+
} catch {
|
|
93
|
+
const backoffMs = 2 ** attempt * 1e3;
|
|
94
|
+
await delay(backoffMs);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
start() {
|
|
99
|
+
this.timer = setInterval(() => {
|
|
100
|
+
void this.flush();
|
|
101
|
+
}, this.config.flushIntervalMs);
|
|
102
|
+
this.installShutdownHook();
|
|
103
|
+
}
|
|
104
|
+
installShutdownHook() {
|
|
105
|
+
const handler = () => {
|
|
106
|
+
void this.shutdown();
|
|
107
|
+
};
|
|
108
|
+
const g = globalThis;
|
|
109
|
+
if (typeof g.window !== "undefined" && typeof g.window.addEventListener === "function") {
|
|
110
|
+
const win = g.window;
|
|
111
|
+
win.addEventListener("beforeunload", handler);
|
|
112
|
+
this.shutdownHook = () => win.removeEventListener("beforeunload", handler);
|
|
113
|
+
} else if (typeof process !== "undefined" && typeof process.on === "function") {
|
|
114
|
+
process.on("beforeExit", handler);
|
|
115
|
+
this.shutdownHook = () => process.removeListener("beforeExit", handler);
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
removeShutdownHook() {
|
|
119
|
+
this.shutdownHook?.();
|
|
120
|
+
this.shutdownHook = null;
|
|
121
|
+
}
|
|
122
|
+
};
|
|
123
|
+
|
|
124
|
+
// src/client.ts
|
|
125
|
+
var DEFAULT_ENDPOINT = "https://auinkdnurzlaitpwhknm.supabase.co/functions/v1";
|
|
126
|
+
function resolveConfig(config) {
|
|
127
|
+
if (!config.apiKey) throw new Error("apiKey is required");
|
|
128
|
+
if (!config.agentId) throw new Error("agentId is required");
|
|
129
|
+
return {
|
|
130
|
+
apiKey: config.apiKey,
|
|
131
|
+
agentId: config.agentId,
|
|
132
|
+
endpoint: (config.endpoint ?? DEFAULT_ENDPOINT).replace(/\/+$/, ""),
|
|
133
|
+
bufferSize: config.bufferSize ?? 100,
|
|
134
|
+
batchSize: config.batchSize ?? 50,
|
|
135
|
+
flushIntervalMs: config.flushIntervalMs ?? 5e3,
|
|
136
|
+
maxRetries: config.maxRetries ?? 3,
|
|
137
|
+
enabled: config.enabled ?? true
|
|
138
|
+
};
|
|
139
|
+
}
|
|
140
|
+
var StelvaraClient = class {
|
|
141
|
+
config;
|
|
142
|
+
buffer;
|
|
143
|
+
traceIdMap = /* @__PURE__ */ new Map();
|
|
144
|
+
traceCounter = 0;
|
|
145
|
+
constructor(config) {
|
|
146
|
+
this.config = resolveConfig(config);
|
|
147
|
+
this.buffer = new TraceBuffer({
|
|
148
|
+
endpoint: this.config.endpoint,
|
|
149
|
+
apiKey: this.config.apiKey,
|
|
150
|
+
maxSize: this.config.bufferSize,
|
|
151
|
+
batchSize: this.config.batchSize,
|
|
152
|
+
flushIntervalMs: this.config.flushIntervalMs,
|
|
153
|
+
maxRetries: this.config.maxRetries,
|
|
154
|
+
onFlushSuccess: (batch, traceIds) => this.registerTraceIds(batch, traceIds)
|
|
155
|
+
});
|
|
156
|
+
}
|
|
157
|
+
/**
|
|
158
|
+
* Capture a trace payload and enqueue for delivery.
|
|
159
|
+
* Returns a local trace key for use with tagOutcome().
|
|
160
|
+
* Returns null if the client is disabled.
|
|
161
|
+
*/
|
|
162
|
+
captureTrace(payload, options) {
|
|
163
|
+
if (!this.config.enabled) return null;
|
|
164
|
+
const localKey = `stv_local_${++this.traceCounter}`;
|
|
165
|
+
const envelope = {
|
|
166
|
+
agent_id: this.config.agentId,
|
|
167
|
+
payload,
|
|
168
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
169
|
+
_localKey: localKey
|
|
170
|
+
};
|
|
171
|
+
if (options?.sessionId != null) {
|
|
172
|
+
envelope.session_id = options.sessionId;
|
|
173
|
+
}
|
|
174
|
+
if (options?.parentTraceId != null) {
|
|
175
|
+
envelope.parent_trace_id = options.parentTraceId;
|
|
176
|
+
}
|
|
177
|
+
if (options?.traceType != null) {
|
|
178
|
+
envelope.trace_type = options.traceType;
|
|
179
|
+
}
|
|
180
|
+
this.buffer.enqueue(envelope);
|
|
181
|
+
return localKey;
|
|
182
|
+
}
|
|
183
|
+
/**
|
|
184
|
+
* Tag a trace with business outcomes.
|
|
185
|
+
* Resolves local trace keys to server UUIDs automatically.
|
|
186
|
+
* Fire-and-forget — catches errors internally.
|
|
187
|
+
*/
|
|
188
|
+
async tagOutcome(traceId, outcomes) {
|
|
189
|
+
if (!this.config.enabled) return;
|
|
190
|
+
let resolvedId = traceId;
|
|
191
|
+
if (traceId.startsWith("stv_local_")) {
|
|
192
|
+
const serverId = this.traceIdMap.get(traceId);
|
|
193
|
+
if (serverId) {
|
|
194
|
+
resolvedId = serverId;
|
|
195
|
+
} else {
|
|
196
|
+
console.warn(
|
|
197
|
+
`[stelvara] Trace ${traceId} not yet flushed \u2014 outcome may fail`
|
|
198
|
+
);
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
const url = `${this.config.endpoint}/outcomes`;
|
|
202
|
+
try {
|
|
203
|
+
await fetch(url, {
|
|
204
|
+
method: "POST",
|
|
205
|
+
headers: {
|
|
206
|
+
Authorization: `Bearer ${this.config.apiKey}`,
|
|
207
|
+
"Content-Type": "application/json"
|
|
208
|
+
},
|
|
209
|
+
body: JSON.stringify({ trace_id: resolvedId, outcomes })
|
|
210
|
+
});
|
|
211
|
+
} catch {
|
|
212
|
+
console.warn(`[stelvara] Failed to tag outcome for trace ${resolvedId}`);
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
/**
|
|
216
|
+
* Tag multiple traces with business outcomes in a single API call.
|
|
217
|
+
*
|
|
218
|
+
* @param entries - Array of { trace_id, outcomes } objects (max 100).
|
|
219
|
+
* @returns Response with inserted count, failed count, and per-entry results.
|
|
220
|
+
* @throws {Error} If entries is empty or exceeds 100 items.
|
|
221
|
+
*
|
|
222
|
+
* @example
|
|
223
|
+
* const result = await client.tagOutcomesBatch([
|
|
224
|
+
* { trace_id: 'uuid-1', outcomes: { revenue_impact: 45 } },
|
|
225
|
+
* { trace_id: 'uuid-2', outcomes: { customer_satisfied: 1 } },
|
|
226
|
+
* ]);
|
|
227
|
+
* console.log(`Tagged ${result.inserted} outcomes`);
|
|
228
|
+
*/
|
|
229
|
+
async tagOutcomesBatch(entries) {
|
|
230
|
+
if (!this.config.enabled) return null;
|
|
231
|
+
if (entries.length === 0) {
|
|
232
|
+
throw new Error("entries must not be empty");
|
|
233
|
+
}
|
|
234
|
+
if (entries.length > 100) {
|
|
235
|
+
throw new Error("entries must not exceed 100 items");
|
|
236
|
+
}
|
|
237
|
+
const resolvedEntries = entries.map((entry) => {
|
|
238
|
+
let resolvedId = entry.trace_id;
|
|
239
|
+
if (entry.trace_id.startsWith("stv_local_")) {
|
|
240
|
+
const serverId = this.traceIdMap.get(entry.trace_id);
|
|
241
|
+
if (serverId) {
|
|
242
|
+
resolvedId = serverId;
|
|
243
|
+
} else {
|
|
244
|
+
console.warn(
|
|
245
|
+
`[stelvara] Trace ${entry.trace_id} not yet flushed \u2014 batch entry may fail`
|
|
246
|
+
);
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
return { trace_id: resolvedId, outcomes: entry.outcomes };
|
|
250
|
+
});
|
|
251
|
+
const url = `${this.config.endpoint}/outcomes-batch`;
|
|
252
|
+
const response = await fetch(url, {
|
|
253
|
+
method: "POST",
|
|
254
|
+
headers: {
|
|
255
|
+
Authorization: `Bearer ${this.config.apiKey}`,
|
|
256
|
+
"Content-Type": "application/json"
|
|
257
|
+
},
|
|
258
|
+
body: JSON.stringify({ entries: resolvedEntries })
|
|
259
|
+
});
|
|
260
|
+
return await response.json();
|
|
261
|
+
}
|
|
262
|
+
/**
|
|
263
|
+
* Tag all traces in a session with business outcomes.
|
|
264
|
+
* The server resolves trace IDs by session_id.
|
|
265
|
+
*
|
|
266
|
+
* @param sessionId - Session identifier.
|
|
267
|
+
* @param outcomes - Dict of outcome metrics applied to every trace in the session.
|
|
268
|
+
* @returns Response with traces_found, inserted count, and per-trace results.
|
|
269
|
+
*
|
|
270
|
+
* @example
|
|
271
|
+
* const result = await client.tagSessionOutcome('session-uuid', {
|
|
272
|
+
* revenue_impact: 150,
|
|
273
|
+
* customer_satisfied: 1,
|
|
274
|
+
* });
|
|
275
|
+
* console.log(`Tagged ${result.traces_found} traces`);
|
|
276
|
+
*/
|
|
277
|
+
async tagSessionOutcome(sessionId, outcomes) {
|
|
278
|
+
if (!this.config.enabled) return null;
|
|
279
|
+
const url = `${this.config.endpoint}/outcomes-batch/session`;
|
|
280
|
+
const response = await fetch(url, {
|
|
281
|
+
method: "POST",
|
|
282
|
+
headers: {
|
|
283
|
+
Authorization: `Bearer ${this.config.apiKey}`,
|
|
284
|
+
"Content-Type": "application/json"
|
|
285
|
+
},
|
|
286
|
+
body: JSON.stringify({ session_id: sessionId, outcomes })
|
|
287
|
+
});
|
|
288
|
+
return await response.json();
|
|
289
|
+
}
|
|
290
|
+
/**
|
|
291
|
+
* Flush pending traces and shut down the client.
|
|
292
|
+
*/
|
|
293
|
+
async shutdown(timeoutMs) {
|
|
294
|
+
await this.buffer.shutdown(timeoutMs);
|
|
295
|
+
}
|
|
296
|
+
/** Number of traces waiting to be flushed. */
|
|
297
|
+
get pendingCount() {
|
|
298
|
+
return this.buffer.pendingCount;
|
|
299
|
+
}
|
|
300
|
+
/**
|
|
301
|
+
* Map local trace keys to server-assigned trace IDs after flush.
|
|
302
|
+
*/
|
|
303
|
+
registerTraceIds(batch, traceIds) {
|
|
304
|
+
for (let i = 0; i < batch.length; i++) {
|
|
305
|
+
const localKey = batch[i]._localKey;
|
|
306
|
+
if (localKey && traceIds[i]) {
|
|
307
|
+
this.traceIdMap.set(localKey, traceIds[i]);
|
|
308
|
+
}
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
};
|
|
312
|
+
|
|
313
|
+
// src/index.ts
|
|
314
|
+
var _client = null;
|
|
315
|
+
function init(config) {
|
|
316
|
+
if (_client !== null) {
|
|
317
|
+
void _client.shutdown();
|
|
318
|
+
}
|
|
319
|
+
_client = new StelvaraClient(config);
|
|
320
|
+
}
|
|
321
|
+
function captureTrace(payload, options) {
|
|
322
|
+
if (_client === null) {
|
|
323
|
+
throw new Error("Stelvara not initialized. Call init() first.");
|
|
324
|
+
}
|
|
325
|
+
return _client.captureTrace(payload, options);
|
|
326
|
+
}
|
|
327
|
+
function isInitialized() {
|
|
328
|
+
return _client !== null;
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
// src/auto.ts
|
|
332
|
+
var ANTHROPIC_PATTERN = "api.anthropic.com/v1/messages";
|
|
333
|
+
var OPENAI_PATTERN = "api.openai.com/v1/chat/completions";
|
|
334
|
+
var _originalFetch = null;
|
|
335
|
+
var _installed = false;
|
|
336
|
+
var _sessionId = null;
|
|
337
|
+
var _inFlight = false;
|
|
338
|
+
function readEnvVar(name) {
|
|
339
|
+
const g = globalThis;
|
|
340
|
+
if (typeof g.Deno !== "undefined" && g.Deno.env?.get) {
|
|
341
|
+
return g.Deno.env.get(name);
|
|
342
|
+
}
|
|
343
|
+
if (typeof g.process !== "undefined" && g.process.env) {
|
|
344
|
+
return g.process.env[name];
|
|
345
|
+
}
|
|
346
|
+
return void 0;
|
|
347
|
+
}
|
|
348
|
+
function detectProvider(url) {
|
|
349
|
+
if (url.includes(ANTHROPIC_PATTERN)) return "anthropic";
|
|
350
|
+
if (url.includes(OPENAI_PATTERN)) return "openai";
|
|
351
|
+
return null;
|
|
352
|
+
}
|
|
353
|
+
function resolveUrl(input) {
|
|
354
|
+
if (typeof input === "string") return input;
|
|
355
|
+
if (input instanceof URL) return input.href;
|
|
356
|
+
return input.url;
|
|
357
|
+
}
|
|
358
|
+
function parseBodySync(init2) {
|
|
359
|
+
if (!init2?.body || typeof init2.body !== "string") return null;
|
|
360
|
+
try {
|
|
361
|
+
return JSON.parse(init2.body);
|
|
362
|
+
} catch {
|
|
363
|
+
return null;
|
|
364
|
+
}
|
|
365
|
+
}
|
|
366
|
+
function extractLastUserMessage(messages) {
|
|
367
|
+
for (let i = messages.length - 1; i >= 0; i--) {
|
|
368
|
+
const msg = messages[i];
|
|
369
|
+
if (msg?.role !== "user") continue;
|
|
370
|
+
const content = msg.content;
|
|
371
|
+
if (typeof content === "string") return content;
|
|
372
|
+
if (Array.isArray(content)) {
|
|
373
|
+
for (const block of content) {
|
|
374
|
+
const b = block;
|
|
375
|
+
if (b?.type === "text" && typeof b.text === "string") return b.text;
|
|
376
|
+
}
|
|
377
|
+
}
|
|
378
|
+
}
|
|
379
|
+
return "";
|
|
380
|
+
}
|
|
381
|
+
function extractAnthropicTrace(reqBody, resBody, durationMs) {
|
|
382
|
+
const messages = reqBody.messages ?? [];
|
|
383
|
+
const userMsg = extractLastUserMessage(messages);
|
|
384
|
+
const systemMsg = typeof reqBody.system === "string" ? reqBody.system : "";
|
|
385
|
+
const payload = {
|
|
386
|
+
input: { user_message: userMsg },
|
|
387
|
+
model: {
|
|
388
|
+
name: resBody.model ?? reqBody.model ?? "unknown",
|
|
389
|
+
provider: "anthropic"
|
|
390
|
+
},
|
|
391
|
+
performance: { duration_ms: durationMs }
|
|
392
|
+
};
|
|
393
|
+
const prompt = {};
|
|
394
|
+
if (systemMsg) prompt.system = systemMsg;
|
|
395
|
+
if (userMsg) prompt.user = userMsg;
|
|
396
|
+
if (prompt.system || prompt.user) payload.prompt = prompt;
|
|
397
|
+
const contentBlocks = resBody.content ?? [];
|
|
398
|
+
const toolCalls = [];
|
|
399
|
+
for (const block of contentBlocks) {
|
|
400
|
+
if (block.type === "text" && typeof block.text === "string" && !payload.response?.text) {
|
|
401
|
+
payload.response = { ...payload.response, text: block.text };
|
|
402
|
+
}
|
|
403
|
+
if (block.type === "tool_use") {
|
|
404
|
+
toolCalls.push({
|
|
405
|
+
name: block.name ?? "unknown",
|
|
406
|
+
arguments: block.input ?? void 0
|
|
407
|
+
});
|
|
408
|
+
}
|
|
409
|
+
}
|
|
410
|
+
if (toolCalls.length > 0) payload.tool_calls = toolCalls;
|
|
411
|
+
if (typeof resBody.stop_reason === "string") {
|
|
412
|
+
payload.response = { ...payload.response, finish_reason: resBody.stop_reason };
|
|
413
|
+
}
|
|
414
|
+
const usage = resBody.usage;
|
|
415
|
+
if (usage) {
|
|
416
|
+
const inputTokens = usage.input_tokens ?? 0;
|
|
417
|
+
const outputTokens = usage.output_tokens ?? 0;
|
|
418
|
+
const total = inputTokens + outputTokens;
|
|
419
|
+
if (total > 0) {
|
|
420
|
+
payload.response = { ...payload.response, tokens_used: total };
|
|
421
|
+
}
|
|
422
|
+
payload.performance = { duration_ms: durationMs };
|
|
423
|
+
if (inputTokens) payload.performance.tokens_input = inputTokens;
|
|
424
|
+
if (outputTokens) payload.performance.tokens_output = outputTokens;
|
|
425
|
+
}
|
|
426
|
+
return payload;
|
|
427
|
+
}
|
|
428
|
+
function extractOpenAITrace(reqBody, resBody, durationMs) {
|
|
429
|
+
const messages = reqBody.messages ?? [];
|
|
430
|
+
let systemMsg = "";
|
|
431
|
+
let userMsg = "";
|
|
432
|
+
for (const msg of messages) {
|
|
433
|
+
if (msg.role === "system" && typeof msg.content === "string") {
|
|
434
|
+
systemMsg = msg.content;
|
|
435
|
+
}
|
|
436
|
+
}
|
|
437
|
+
userMsg = extractLastUserMessage(messages);
|
|
438
|
+
const payload = {
|
|
439
|
+
input: { user_message: userMsg },
|
|
440
|
+
model: {
|
|
441
|
+
name: resBody.model ?? reqBody.model ?? "unknown",
|
|
442
|
+
provider: "openai"
|
|
443
|
+
},
|
|
444
|
+
performance: { duration_ms: durationMs }
|
|
445
|
+
};
|
|
446
|
+
const prompt = {};
|
|
447
|
+
if (systemMsg) prompt.system = systemMsg;
|
|
448
|
+
if (userMsg) prompt.user = userMsg;
|
|
449
|
+
if (prompt.system || prompt.user) payload.prompt = prompt;
|
|
450
|
+
const choices = resBody.choices ?? [];
|
|
451
|
+
if (choices.length > 0) {
|
|
452
|
+
const choice = choices[0];
|
|
453
|
+
const message = choice.message;
|
|
454
|
+
if (message) {
|
|
455
|
+
if (typeof message.content === "string") {
|
|
456
|
+
payload.response = { ...payload.response, text: message.content };
|
|
457
|
+
}
|
|
458
|
+
const rawToolCalls = message.tool_calls;
|
|
459
|
+
if (rawToolCalls && rawToolCalls.length > 0) {
|
|
460
|
+
payload.tool_calls = rawToolCalls.map((tc) => {
|
|
461
|
+
const fn = tc.function;
|
|
462
|
+
const entry = { name: fn?.name ?? "unknown" };
|
|
463
|
+
if (typeof fn?.arguments === "string") {
|
|
464
|
+
try {
|
|
465
|
+
entry.arguments = JSON.parse(fn.arguments);
|
|
466
|
+
} catch {
|
|
467
|
+
entry.arguments = { raw: fn.arguments };
|
|
468
|
+
}
|
|
469
|
+
}
|
|
470
|
+
return entry;
|
|
471
|
+
});
|
|
472
|
+
}
|
|
473
|
+
}
|
|
474
|
+
if (typeof choice.finish_reason === "string") {
|
|
475
|
+
payload.response = { ...payload.response, finish_reason: choice.finish_reason };
|
|
476
|
+
}
|
|
477
|
+
}
|
|
478
|
+
const usage = resBody.usage;
|
|
479
|
+
if (usage) {
|
|
480
|
+
const promptTokens = usage.prompt_tokens ?? 0;
|
|
481
|
+
const completionTokens = usage.completion_tokens ?? 0;
|
|
482
|
+
const total = usage.total_tokens ?? promptTokens + completionTokens;
|
|
483
|
+
if (total > 0) {
|
|
484
|
+
payload.response = { ...payload.response, tokens_used: total };
|
|
485
|
+
}
|
|
486
|
+
payload.performance = { duration_ms: durationMs };
|
|
487
|
+
if (promptTokens) payload.performance.tokens_input = promptTokens;
|
|
488
|
+
if (completionTokens) payload.performance.tokens_output = completionTokens;
|
|
489
|
+
}
|
|
490
|
+
return payload;
|
|
491
|
+
}
|
|
492
|
+
function makeAnthropicState() {
|
|
493
|
+
return {
|
|
494
|
+
model: "",
|
|
495
|
+
inputTokens: 0,
|
|
496
|
+
outputTokens: 0,
|
|
497
|
+
stopReason: "",
|
|
498
|
+
textContent: "",
|
|
499
|
+
toolCalls: [],
|
|
500
|
+
currentToolName: "",
|
|
501
|
+
currentToolArgs: "",
|
|
502
|
+
done: false
|
|
503
|
+
};
|
|
504
|
+
}
|
|
505
|
+
function makeOpenAIState() {
|
|
506
|
+
return {
|
|
507
|
+
model: "",
|
|
508
|
+
textContent: "",
|
|
509
|
+
toolCallMap: /* @__PURE__ */ new Map(),
|
|
510
|
+
finishReason: "",
|
|
511
|
+
done: false
|
|
512
|
+
};
|
|
513
|
+
}
|
|
514
|
+
function processAnthropicSSE(line, state) {
|
|
515
|
+
if (!line.startsWith("data: ")) return;
|
|
516
|
+
const json = line.slice(6);
|
|
517
|
+
try {
|
|
518
|
+
const event = JSON.parse(json);
|
|
519
|
+
switch (event.type) {
|
|
520
|
+
case "message_start": {
|
|
521
|
+
const msg = event.message;
|
|
522
|
+
if (msg) {
|
|
523
|
+
if (typeof msg.model === "string") state.model = msg.model;
|
|
524
|
+
const usage = msg.usage;
|
|
525
|
+
if (usage?.input_tokens) state.inputTokens = usage.input_tokens;
|
|
526
|
+
}
|
|
527
|
+
break;
|
|
528
|
+
}
|
|
529
|
+
case "content_block_start": {
|
|
530
|
+
const block = event.content_block;
|
|
531
|
+
if (block?.type === "tool_use") {
|
|
532
|
+
state.currentToolName = block.name ?? "unknown";
|
|
533
|
+
state.currentToolArgs = "";
|
|
534
|
+
}
|
|
535
|
+
break;
|
|
536
|
+
}
|
|
537
|
+
case "content_block_delta": {
|
|
538
|
+
const delta = event.delta;
|
|
539
|
+
if (delta?.type === "text_delta" && typeof delta.text === "string") {
|
|
540
|
+
state.textContent += delta.text;
|
|
541
|
+
}
|
|
542
|
+
if (delta?.type === "input_json_delta" && typeof delta.partial_json === "string") {
|
|
543
|
+
state.currentToolArgs += delta.partial_json;
|
|
544
|
+
}
|
|
545
|
+
break;
|
|
546
|
+
}
|
|
547
|
+
case "content_block_stop": {
|
|
548
|
+
if (state.currentToolName) {
|
|
549
|
+
const tc = { name: state.currentToolName };
|
|
550
|
+
if (state.currentToolArgs) {
|
|
551
|
+
try {
|
|
552
|
+
tc.arguments = JSON.parse(state.currentToolArgs);
|
|
553
|
+
} catch {
|
|
554
|
+
tc.arguments = { raw: state.currentToolArgs };
|
|
555
|
+
}
|
|
556
|
+
}
|
|
557
|
+
state.toolCalls.push(tc);
|
|
558
|
+
state.currentToolName = "";
|
|
559
|
+
state.currentToolArgs = "";
|
|
560
|
+
}
|
|
561
|
+
break;
|
|
562
|
+
}
|
|
563
|
+
case "message_delta": {
|
|
564
|
+
const delta = event.delta;
|
|
565
|
+
if (typeof delta?.stop_reason === "string") state.stopReason = delta.stop_reason;
|
|
566
|
+
const usage = event.usage;
|
|
567
|
+
if (usage?.output_tokens) state.outputTokens = usage.output_tokens;
|
|
568
|
+
break;
|
|
569
|
+
}
|
|
570
|
+
case "message_stop":
|
|
571
|
+
state.done = true;
|
|
572
|
+
break;
|
|
573
|
+
}
|
|
574
|
+
} catch {
|
|
575
|
+
}
|
|
576
|
+
}
|
|
577
|
+
function processOpenAISSE(line, state) {
|
|
578
|
+
if (!line.startsWith("data: ")) return;
|
|
579
|
+
const json = line.slice(6).trim();
|
|
580
|
+
if (json === "[DONE]") {
|
|
581
|
+
state.done = true;
|
|
582
|
+
return;
|
|
583
|
+
}
|
|
584
|
+
try {
|
|
585
|
+
const event = JSON.parse(json);
|
|
586
|
+
if (typeof event.model === "string" && !state.model) {
|
|
587
|
+
state.model = event.model;
|
|
588
|
+
}
|
|
589
|
+
const choices = event.choices ?? [];
|
|
590
|
+
if (choices.length > 0) {
|
|
591
|
+
const choice = choices[0];
|
|
592
|
+
const delta = choice.delta;
|
|
593
|
+
if (delta) {
|
|
594
|
+
if (typeof delta.content === "string") {
|
|
595
|
+
state.textContent += delta.content;
|
|
596
|
+
}
|
|
597
|
+
if (typeof choice.finish_reason === "string") {
|
|
598
|
+
state.finishReason = choice.finish_reason;
|
|
599
|
+
}
|
|
600
|
+
const toolCallDeltas = delta.tool_calls;
|
|
601
|
+
if (toolCallDeltas) {
|
|
602
|
+
for (const tcd of toolCallDeltas) {
|
|
603
|
+
const idx = tcd.index ?? 0;
|
|
604
|
+
const existing = state.toolCallMap.get(idx);
|
|
605
|
+
const fn = tcd.function;
|
|
606
|
+
if (!existing) {
|
|
607
|
+
state.toolCallMap.set(idx, {
|
|
608
|
+
name: fn?.name ?? "",
|
|
609
|
+
argumentsRaw: fn?.arguments ?? ""
|
|
610
|
+
});
|
|
611
|
+
} else {
|
|
612
|
+
if (fn?.name) existing.name += fn.name;
|
|
613
|
+
if (fn?.arguments) existing.argumentsRaw += fn.arguments;
|
|
614
|
+
}
|
|
615
|
+
}
|
|
616
|
+
}
|
|
617
|
+
}
|
|
618
|
+
}
|
|
619
|
+
} catch {
|
|
620
|
+
}
|
|
621
|
+
}
|
|
622
|
+
function buildAnthropicStreamTrace(reqBody, state, durationMs) {
|
|
623
|
+
const messages = reqBody.messages ?? [];
|
|
624
|
+
const userMsg = extractLastUserMessage(messages);
|
|
625
|
+
const systemMsg = typeof reqBody.system === "string" ? reqBody.system : "";
|
|
626
|
+
const total = state.inputTokens + state.outputTokens;
|
|
627
|
+
const payload = {
|
|
628
|
+
input: { user_message: userMsg },
|
|
629
|
+
model: { name: state.model || reqBody.model || "unknown", provider: "anthropic" },
|
|
630
|
+
response: {
|
|
631
|
+
text: state.textContent,
|
|
632
|
+
finish_reason: state.stopReason || void 0,
|
|
633
|
+
tokens_used: total > 0 ? total : void 0
|
|
634
|
+
},
|
|
635
|
+
performance: {
|
|
636
|
+
duration_ms: durationMs,
|
|
637
|
+
tokens_input: state.inputTokens || void 0,
|
|
638
|
+
tokens_output: state.outputTokens || void 0
|
|
639
|
+
}
|
|
640
|
+
};
|
|
641
|
+
const prompt = {};
|
|
642
|
+
if (systemMsg) prompt.system = systemMsg;
|
|
643
|
+
if (userMsg) prompt.user = userMsg;
|
|
644
|
+
if (prompt.system || prompt.user) payload.prompt = prompt;
|
|
645
|
+
if (state.toolCalls.length > 0) payload.tool_calls = state.toolCalls;
|
|
646
|
+
return payload;
|
|
647
|
+
}
|
|
648
|
+
function buildOpenAIStreamTrace(reqBody, state, durationMs) {
|
|
649
|
+
const messages = reqBody.messages ?? [];
|
|
650
|
+
let systemMsg = "";
|
|
651
|
+
for (const msg of messages) {
|
|
652
|
+
if (msg.role === "system" && typeof msg.content === "string") {
|
|
653
|
+
systemMsg = msg.content;
|
|
654
|
+
break;
|
|
655
|
+
}
|
|
656
|
+
}
|
|
657
|
+
const userMsg = extractLastUserMessage(messages);
|
|
658
|
+
const payload = {
|
|
659
|
+
input: { user_message: userMsg },
|
|
660
|
+
model: { name: state.model || reqBody.model || "unknown", provider: "openai" },
|
|
661
|
+
response: {
|
|
662
|
+
text: state.textContent || void 0,
|
|
663
|
+
finish_reason: state.finishReason || void 0
|
|
664
|
+
},
|
|
665
|
+
performance: { duration_ms: durationMs }
|
|
666
|
+
};
|
|
667
|
+
const prompt = {};
|
|
668
|
+
if (systemMsg) prompt.system = systemMsg;
|
|
669
|
+
if (userMsg) prompt.user = userMsg;
|
|
670
|
+
if (prompt.system || prompt.user) payload.prompt = prompt;
|
|
671
|
+
if (state.toolCallMap.size > 0) {
|
|
672
|
+
payload.tool_calls = Array.from(state.toolCallMap.values()).map((tc) => {
|
|
673
|
+
const entry = { name: tc.name };
|
|
674
|
+
if (tc.argumentsRaw) {
|
|
675
|
+
try {
|
|
676
|
+
entry.arguments = JSON.parse(tc.argumentsRaw);
|
|
677
|
+
} catch {
|
|
678
|
+
entry.arguments = { raw: tc.argumentsRaw };
|
|
679
|
+
}
|
|
680
|
+
}
|
|
681
|
+
return entry;
|
|
682
|
+
});
|
|
683
|
+
}
|
|
684
|
+
return payload;
|
|
685
|
+
}
|
|
686
|
+
function wrapStreamingResponse(response, provider, reqBody, startTime) {
|
|
687
|
+
const body = response.body;
|
|
688
|
+
if (!body) return response;
|
|
689
|
+
const decoder = new TextDecoder();
|
|
690
|
+
const state = provider === "anthropic" ? makeAnthropicState() : makeOpenAIState();
|
|
691
|
+
let buffer = "";
|
|
692
|
+
const transformedStream = new ReadableStream({
|
|
693
|
+
async start(controller) {
|
|
694
|
+
const reader = body.getReader();
|
|
695
|
+
try {
|
|
696
|
+
for (; ; ) {
|
|
697
|
+
const { done, value } = await reader.read();
|
|
698
|
+
if (done) break;
|
|
699
|
+
controller.enqueue(value);
|
|
700
|
+
buffer += decoder.decode(value, { stream: true });
|
|
701
|
+
const lines = buffer.split("\n");
|
|
702
|
+
buffer = lines.pop() ?? "";
|
|
703
|
+
for (const line of lines) {
|
|
704
|
+
const trimmed = line.trim();
|
|
705
|
+
if (!trimmed) continue;
|
|
706
|
+
if (provider === "anthropic") {
|
|
707
|
+
processAnthropicSSE(trimmed, state);
|
|
708
|
+
} else {
|
|
709
|
+
processOpenAISSE(trimmed, state);
|
|
710
|
+
}
|
|
711
|
+
}
|
|
712
|
+
}
|
|
713
|
+
if (buffer.trim()) {
|
|
714
|
+
if (provider === "anthropic") {
|
|
715
|
+
processAnthropicSSE(buffer.trim(), state);
|
|
716
|
+
} else {
|
|
717
|
+
processOpenAISSE(buffer.trim(), state);
|
|
718
|
+
}
|
|
719
|
+
}
|
|
720
|
+
state.done = true;
|
|
721
|
+
try {
|
|
722
|
+
const durationMs = Date.now() - startTime;
|
|
723
|
+
const tracePayload = provider === "anthropic" ? buildAnthropicStreamTrace(reqBody, state, durationMs) : buildOpenAIStreamTrace(reqBody, state, durationMs);
|
|
724
|
+
const hasToolCalls = (tracePayload.tool_calls?.length ?? 0) > 0;
|
|
725
|
+
const traceType = hasToolCalls ? "tool_use" : "chat";
|
|
726
|
+
if (isInitialized()) {
|
|
727
|
+
captureTrace(tracePayload, {
|
|
728
|
+
sessionId: _sessionId ?? void 0,
|
|
729
|
+
traceType
|
|
730
|
+
});
|
|
731
|
+
}
|
|
732
|
+
} catch {
|
|
733
|
+
}
|
|
734
|
+
controller.close();
|
|
735
|
+
} catch (err) {
|
|
736
|
+
controller.error(err);
|
|
737
|
+
} finally {
|
|
738
|
+
reader.releaseLock();
|
|
739
|
+
}
|
|
740
|
+
}
|
|
741
|
+
});
|
|
742
|
+
return new Response(transformedStream, {
|
|
743
|
+
status: response.status,
|
|
744
|
+
statusText: response.statusText,
|
|
745
|
+
headers: response.headers
|
|
746
|
+
});
|
|
747
|
+
}
|
|
748
|
+
async function patchedFetch(input, init2) {
|
|
749
|
+
if (_inFlight) {
|
|
750
|
+
return _originalFetch(input, init2);
|
|
751
|
+
}
|
|
752
|
+
const url = resolveUrl(input);
|
|
753
|
+
const provider = detectProvider(url);
|
|
754
|
+
if (!provider) {
|
|
755
|
+
return _originalFetch(input, init2);
|
|
756
|
+
}
|
|
757
|
+
_inFlight = true;
|
|
758
|
+
const startTime = Date.now();
|
|
759
|
+
let reqBody = null;
|
|
760
|
+
try {
|
|
761
|
+
reqBody = parseBodySync(init2);
|
|
762
|
+
} catch {
|
|
763
|
+
}
|
|
764
|
+
try {
|
|
765
|
+
const response = await _originalFetch(input, init2);
|
|
766
|
+
const isStreaming = reqBody?.stream === true;
|
|
767
|
+
if (isStreaming && reqBody) {
|
|
768
|
+
try {
|
|
769
|
+
return wrapStreamingResponse(response, provider, reqBody, startTime);
|
|
770
|
+
} catch {
|
|
771
|
+
return response;
|
|
772
|
+
}
|
|
773
|
+
}
|
|
774
|
+
if (reqBody) {
|
|
775
|
+
try {
|
|
776
|
+
const clone = response.clone();
|
|
777
|
+
const resBody = await clone.json();
|
|
778
|
+
const durationMs = Date.now() - startTime;
|
|
779
|
+
const tracePayload = provider === "anthropic" ? extractAnthropicTrace(reqBody, resBody, durationMs) : extractOpenAITrace(reqBody, resBody, durationMs);
|
|
780
|
+
const hasToolCalls = (tracePayload.tool_calls?.length ?? 0) > 0;
|
|
781
|
+
const traceType = hasToolCalls ? "tool_use" : "chat";
|
|
782
|
+
if (isInitialized()) {
|
|
783
|
+
captureTrace(tracePayload, {
|
|
784
|
+
sessionId: _sessionId ?? void 0,
|
|
785
|
+
traceType
|
|
786
|
+
});
|
|
787
|
+
}
|
|
788
|
+
} catch {
|
|
789
|
+
}
|
|
790
|
+
}
|
|
791
|
+
return response;
|
|
792
|
+
} finally {
|
|
793
|
+
_inFlight = false;
|
|
794
|
+
}
|
|
795
|
+
}
|
|
796
|
+
function install(options) {
|
|
797
|
+
if (_installed) return;
|
|
798
|
+
const apiKey = options?.apiKey ?? readEnvVar("STELVARA_API_KEY");
|
|
799
|
+
const agentId = options?.agentId ?? readEnvVar("STELVARA_AGENT_ID");
|
|
800
|
+
if (!isInitialized() && apiKey && agentId) {
|
|
801
|
+
init({ apiKey, agentId, endpoint: options?.endpoint });
|
|
802
|
+
}
|
|
803
|
+
_originalFetch = globalThis.fetch;
|
|
804
|
+
globalThis.fetch = patchedFetch;
|
|
805
|
+
_installed = true;
|
|
806
|
+
}
|
|
807
|
+
function uninstall() {
|
|
808
|
+
if (!_installed || !_originalFetch) return;
|
|
809
|
+
globalThis.fetch = _originalFetch;
|
|
810
|
+
_originalFetch = null;
|
|
811
|
+
_installed = false;
|
|
812
|
+
}
|
|
813
|
+
function setSessionId(id) {
|
|
814
|
+
_sessionId = id;
|
|
815
|
+
}
|
|
816
|
+
function _resetForTesting() {
|
|
817
|
+
if (_installed) uninstall();
|
|
818
|
+
_sessionId = null;
|
|
819
|
+
}
|
|
820
|
+
install();
|
|
821
|
+
export {
|
|
822
|
+
_resetForTesting,
|
|
823
|
+
install,
|
|
824
|
+
setSessionId,
|
|
825
|
+
uninstall
|
|
826
|
+
};
|
|
827
|
+
//# sourceMappingURL=auto.js.map
|