elasticdash-test 0.1.18-alpha-26 → 0.1.18
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +165 -29
- package/dist/index.js +7 -0
- package/dist/index.js.map +1 -1
- package/dist/interceptors/ai-interceptor.d.ts +16 -0
- package/dist/interceptors/ai-interceptor.d.ts.map +1 -1
- package/dist/interceptors/ai-interceptor.js +137 -4
- package/dist/interceptors/ai-interceptor.js.map +1 -1
- package/dist/interceptors/http.d.ts.map +1 -1
- package/dist/interceptors/http.js +1 -0
- package/dist/interceptors/http.js.map +1 -1
- package/dist/interceptors/workflow-ai.d.ts +1 -1
- package/dist/interceptors/workflow-ai.d.ts.map +1 -1
- package/dist/interceptors/workflow-ai.js +64 -22
- package/dist/interceptors/workflow-ai.js.map +1 -1
- package/dist/observability.d.ts +0 -1
- package/dist/observability.d.ts.map +1 -1
- package/dist/observability.js +1 -2
- package/dist/observability.js.map +1 -1
- package/dist/socket-connector.d.ts +0 -1
- package/dist/socket-connector.d.ts.map +1 -1
- package/dist/socket-connector.js +5 -3
- package/dist/socket-connector.js.map +1 -1
- package/package.json +1 -1
- package/src/socket-connector.ts +1 -1
package/dist/index.cjs
CHANGED
|
@@ -1468,6 +1468,7 @@ var init_http = __esm({
|
|
|
1468
1468
|
init_side_effects();
|
|
1469
1469
|
AI_URL_PATTERNS = [
|
|
1470
1470
|
/https?:\/\/api\.openai\.com\/v1\/((chat\/)?completions|embeddings)/,
|
|
1471
|
+
/https?:\/\/api\.anthropic\.com\/v1\/messages/,
|
|
1471
1472
|
/https?:\/\/generativelanguage\.googleapis\.com\/.*\/models\/[^/:]+:(generateContent|streamGenerateContent)/,
|
|
1472
1473
|
/https?:\/\/api\.x\.ai\/v1\/(chat\/)?completions/,
|
|
1473
1474
|
/https?:\/\/api\.moonshot\.ai\/v1\/(chat\/)?completions/
|
|
@@ -1478,12 +1479,39 @@ var init_http = __esm({
|
|
|
1478
1479
|
// src/interceptors/ai-interceptor.ts
|
|
1479
1480
|
var ai_interceptor_exports = {};
|
|
1480
1481
|
__export(ai_interceptor_exports, {
|
|
1482
|
+
consumeCapturedLLMRequest: () => consumeCapturedLLMRequest,
|
|
1481
1483
|
installAIInterceptor: () => installAIInterceptor,
|
|
1482
1484
|
uninstallAIInterceptor: () => uninstallAIInterceptor
|
|
1483
1485
|
});
|
|
1484
1486
|
function isAIWrapperActive() {
|
|
1485
1487
|
return (globalThis[AI_WRAPPER_KEY] ?? 0) > 0;
|
|
1486
1488
|
}
|
|
1489
|
+
function consumeCapturedLLMRequest() {
|
|
1490
|
+
const g5 = globalThis;
|
|
1491
|
+
const req = g5[LLM_REQUEST_KEY];
|
|
1492
|
+
if (req) g5[LLM_REQUEST_KEY] = void 0;
|
|
1493
|
+
return req;
|
|
1494
|
+
}
|
|
1495
|
+
function extractPromptSnippet(body) {
|
|
1496
|
+
let messages;
|
|
1497
|
+
if (Array.isArray(body.messages)) messages = body.messages;
|
|
1498
|
+
else if (Array.isArray(body.contents)) messages = body.contents;
|
|
1499
|
+
if (!messages || messages.length === 0) return void 0;
|
|
1500
|
+
for (let i = messages.length - 1; i >= 0; i--) {
|
|
1501
|
+
const msg = messages[i];
|
|
1502
|
+
if (!msg) continue;
|
|
1503
|
+
if (msg.role === "user") {
|
|
1504
|
+
let content = msg.content;
|
|
1505
|
+
if (Array.isArray(content)) {
|
|
1506
|
+
content = content.map((b) => b && typeof b === "object" ? String(b.text ?? "") : String(b)).filter(Boolean).join("");
|
|
1507
|
+
}
|
|
1508
|
+
if (typeof content === "string") {
|
|
1509
|
+
return content.slice(0, 100);
|
|
1510
|
+
}
|
|
1511
|
+
}
|
|
1512
|
+
}
|
|
1513
|
+
return void 0;
|
|
1514
|
+
}
|
|
1487
1515
|
function extractUsage(provider, body) {
|
|
1488
1516
|
if (provider === "openai" || provider === "grok" || provider === "kimi") {
|
|
1489
1517
|
const u = body.usage;
|
|
@@ -1723,6 +1751,30 @@ async function bufferSSEStream(provider, stream) {
|
|
|
1723
1751
|
}
|
|
1724
1752
|
return completion;
|
|
1725
1753
|
}
|
|
1754
|
+
function extractStreamUsage(provider, rawSSE) {
|
|
1755
|
+
const lines = rawSSE.split("\n");
|
|
1756
|
+
for (let i = lines.length - 1; i >= 0; i--) {
|
|
1757
|
+
const line = lines[i];
|
|
1758
|
+
if (!line.startsWith("data: ")) continue;
|
|
1759
|
+
const data = line.slice(6).trim();
|
|
1760
|
+
if (data === "[DONE]") continue;
|
|
1761
|
+
try {
|
|
1762
|
+
const obj = JSON.parse(data);
|
|
1763
|
+
const usage = extractUsage(provider, obj);
|
|
1764
|
+
if (usage) return usage;
|
|
1765
|
+
if (obj.type === "message_delta") {
|
|
1766
|
+
const u = obj.usage;
|
|
1767
|
+
if (u) return extractUsage("anthropic", { usage: u });
|
|
1768
|
+
}
|
|
1769
|
+
if (obj.type === "message_start") {
|
|
1770
|
+
const msg = obj.message;
|
|
1771
|
+
if (msg?.usage) return extractUsage("anthropic", { usage: msg.usage });
|
|
1772
|
+
}
|
|
1773
|
+
} catch {
|
|
1774
|
+
}
|
|
1775
|
+
}
|
|
1776
|
+
return void 0;
|
|
1777
|
+
}
|
|
1726
1778
|
function synthesizeCompletionJSON(provider, completion) {
|
|
1727
1779
|
if (provider === "gemini") {
|
|
1728
1780
|
return {
|
|
@@ -1803,7 +1855,57 @@ function installAIInterceptor() {
|
|
|
1803
1855
|
const url = typeof input === "string" ? input : input instanceof URL ? input.href : input.url;
|
|
1804
1856
|
const provider = detectProvider2(url);
|
|
1805
1857
|
if (provider && isAIWrapperActive()) {
|
|
1806
|
-
|
|
1858
|
+
let capturedReq;
|
|
1859
|
+
let capturedModel = "unknown";
|
|
1860
|
+
let capturedMessages;
|
|
1861
|
+
let capturedSnippet;
|
|
1862
|
+
try {
|
|
1863
|
+
const rawBody = init?.body;
|
|
1864
|
+
if (rawBody && typeof rawBody === "string") {
|
|
1865
|
+
capturedReq = JSON.parse(rawBody);
|
|
1866
|
+
capturedModel = extractModel2(provider, capturedReq, url);
|
|
1867
|
+
capturedMessages = Array.isArray(capturedReq.messages) ? capturedReq.messages : Array.isArray(capturedReq.contents) ? capturedReq.contents : void 0;
|
|
1868
|
+
capturedSnippet = extractPromptSnippet(capturedReq);
|
|
1869
|
+
}
|
|
1870
|
+
} catch {
|
|
1871
|
+
}
|
|
1872
|
+
const response2 = await originalFetch2(input, init);
|
|
1873
|
+
if (capturedReq) {
|
|
1874
|
+
const captured = {
|
|
1875
|
+
url,
|
|
1876
|
+
provider,
|
|
1877
|
+
model: capturedModel,
|
|
1878
|
+
messages: capturedMessages,
|
|
1879
|
+
body: capturedReq,
|
|
1880
|
+
promptSnippet: capturedSnippet
|
|
1881
|
+
};
|
|
1882
|
+
const isStreaming2 = capturedReq.stream === true;
|
|
1883
|
+
try {
|
|
1884
|
+
const cloned = response2.clone();
|
|
1885
|
+
if (!isStreaming2) {
|
|
1886
|
+
const responseBody = await cloned.json();
|
|
1887
|
+
captured.usage = extractUsage(provider, responseBody);
|
|
1888
|
+
} else if (cloned.body) {
|
|
1889
|
+
try {
|
|
1890
|
+
const decoder = new TextDecoder();
|
|
1891
|
+
const reader = cloned.body.getReader();
|
|
1892
|
+
let rawSSE = "";
|
|
1893
|
+
for (; ; ) {
|
|
1894
|
+
const { done, value } = await reader.read();
|
|
1895
|
+
if (done) break;
|
|
1896
|
+
rawSSE += decoder.decode(value, { stream: true });
|
|
1897
|
+
}
|
|
1898
|
+
reader.releaseLock();
|
|
1899
|
+
captured.usage = extractStreamUsage(provider, rawSSE);
|
|
1900
|
+
} catch {
|
|
1901
|
+
}
|
|
1902
|
+
}
|
|
1903
|
+
} catch {
|
|
1904
|
+
}
|
|
1905
|
+
;
|
|
1906
|
+
globalThis[LLM_REQUEST_KEY] = captured;
|
|
1907
|
+
}
|
|
1908
|
+
return response2;
|
|
1807
1909
|
}
|
|
1808
1910
|
const traceAtCall = getCurrentTrace();
|
|
1809
1911
|
const obsCtx = getObservabilityContext();
|
|
@@ -1828,7 +1930,7 @@ function installAIInterceptor() {
|
|
|
1828
1930
|
} catch {
|
|
1829
1931
|
}
|
|
1830
1932
|
const ctx = getCaptureContext();
|
|
1831
|
-
if (!traceAtCall && !ctx && obsCtx) {
|
|
1933
|
+
if (!traceAtCall && !ctx && obsCtx && !isAIWrapperActive()) {
|
|
1832
1934
|
const id = obsCtx.nextId();
|
|
1833
1935
|
const start = rawDateNow();
|
|
1834
1936
|
const eventInput = { url, provider, model, prompt, messages };
|
|
@@ -1867,7 +1969,7 @@ function installAIInterceptor() {
|
|
|
1867
1969
|
}
|
|
1868
1970
|
return response2;
|
|
1869
1971
|
}
|
|
1870
|
-
if (!ctx && httpCtx) {
|
|
1972
|
+
if (!ctx && httpCtx && !isAIWrapperActive()) {
|
|
1871
1973
|
const id = httpCtx.nextId();
|
|
1872
1974
|
const eventInput = { url, provider, model, prompt, messages };
|
|
1873
1975
|
const frozen = getHttpFrozenEvent(id);
|
|
@@ -2016,7 +2118,7 @@ function uninstallAIInterceptor() {
|
|
|
2016
2118
|
originalFetch2 = null;
|
|
2017
2119
|
}
|
|
2018
2120
|
}
|
|
2019
|
-
var AI_WRAPPER_KEY, AI_PATTERNS2, originalFetch2;
|
|
2121
|
+
var AI_WRAPPER_KEY, LLM_REQUEST_KEY, AI_PATTERNS2, originalFetch2;
|
|
2020
2122
|
var init_ai_interceptor = __esm({
|
|
2021
2123
|
"src/interceptors/ai-interceptor.ts"() {
|
|
2022
2124
|
"use strict";
|
|
@@ -2025,6 +2127,7 @@ var init_ai_interceptor = __esm({
|
|
|
2025
2127
|
init_side_effects();
|
|
2026
2128
|
init_telemetry_push();
|
|
2027
2129
|
AI_WRAPPER_KEY = "__elasticdash_ai_wrapper_depth__";
|
|
2130
|
+
LLM_REQUEST_KEY = "__elasticdash_last_llm_request__";
|
|
2028
2131
|
AI_PATTERNS2 = {
|
|
2029
2132
|
openai: /https?:\/\/api\.openai\.com\/v1\/((chat\/)?completions|embeddings)/,
|
|
2030
2133
|
anthropic: /https?:\/\/api\.anthropic\.com\/v1\/messages/,
|
|
@@ -3057,13 +3160,12 @@ var init_trigger_executor = __esm({
|
|
|
3057
3160
|
function connectToBackend(options) {
|
|
3058
3161
|
if (socket?.connected) return socket;
|
|
3059
3162
|
if (socket) return socket;
|
|
3060
|
-
const { serverUrl, apiKey, sessionId
|
|
3163
|
+
const { serverUrl, apiKey, sessionId } = options;
|
|
3061
3164
|
const cwd = process.cwd();
|
|
3062
3165
|
socket = (0, import_socket.io)(serverUrl, {
|
|
3063
3166
|
auth: {
|
|
3064
3167
|
...apiKey ? { apiKey } : {},
|
|
3065
|
-
sessionId
|
|
3066
|
-
...projectId ? { projectId } : {}
|
|
3168
|
+
sessionId
|
|
3067
3169
|
},
|
|
3068
3170
|
transports: ["websocket", "polling"],
|
|
3069
3171
|
reconnection: true,
|
|
@@ -3076,11 +3178,14 @@ function connectToBackend(options) {
|
|
|
3076
3178
|
const workflows = scanWorkflows(cwd);
|
|
3077
3179
|
socket.emit("register", {
|
|
3078
3180
|
sessionId,
|
|
3079
|
-
...projectId ? { projectId } : {},
|
|
3080
3181
|
tools: tools.map((t) => t.name),
|
|
3081
3182
|
workflows: workflows.map((w) => w.name)
|
|
3082
3183
|
});
|
|
3083
3184
|
});
|
|
3185
|
+
socket.on("auth:ok", (data) => {
|
|
3186
|
+
debugLog(`[elasticdash] Authenticated for project ${data.projectId}`);
|
|
3187
|
+
socket.emit("join", `observability:project:${data.projectId}`);
|
|
3188
|
+
});
|
|
3084
3189
|
socket.on("portal:task", async (task, ack) => {
|
|
3085
3190
|
debugLog(`[elasticdash] Socket received portal task: ${task.taskId} type=${task.type} name=${task.name}`);
|
|
3086
3191
|
try {
|
|
@@ -3157,7 +3262,6 @@ function initObservability(options) {
|
|
|
3157
3262
|
throw new Error("[elasticdash] initObservability: serverUrl is required (set ELASTICDASH_API_URL or pass serverUrl option)");
|
|
3158
3263
|
}
|
|
3159
3264
|
const apiKey = options?.apiKey ?? process.env.ELASTICDASH_API_KEY;
|
|
3160
|
-
const projectId = options?.projectId ?? process.env.ELASTICDASH_PROJECT_ID;
|
|
3161
3265
|
const sessionId = options?.sessionId ?? process.env.ELASTICDASH_SESSION_ID ?? (0, import_node_crypto3.randomUUID)();
|
|
3162
3266
|
const sampleRate = options?.sampleRate ?? 1;
|
|
3163
3267
|
const redactKeys = options?.redactKeys ?? [];
|
|
@@ -3194,7 +3298,7 @@ function initObservability(options) {
|
|
|
3194
3298
|
interceptFetch();
|
|
3195
3299
|
installDBAutoInterceptor().catch(() => {
|
|
3196
3300
|
});
|
|
3197
|
-
connectToBackend({ serverUrl, apiKey, sessionId
|
|
3301
|
+
connectToBackend({ serverUrl, apiKey, sessionId });
|
|
3198
3302
|
heartbeatTimer = setInterval(() => {
|
|
3199
3303
|
batcher.enqueue({
|
|
3200
3304
|
id: ctx.nextId(),
|
|
@@ -4618,6 +4722,7 @@ init_recorder();
|
|
|
4618
4722
|
init_side_effects();
|
|
4619
4723
|
init_telemetry_push();
|
|
4620
4724
|
init_mock_resolver();
|
|
4725
|
+
init_ai_interceptor();
|
|
4621
4726
|
var AI_WRAPPER_KEY2 = "__elasticdash_ai_wrapper_depth__";
|
|
4622
4727
|
var g4 = globalThis;
|
|
4623
4728
|
if (g4[AI_WRAPPER_KEY2] == null) g4[AI_WRAPPER_KEY2] = 0;
|
|
@@ -4646,6 +4751,13 @@ function extractUsage2(output) {
|
|
|
4646
4751
|
totalTokens: u.total_tokens
|
|
4647
4752
|
};
|
|
4648
4753
|
}
|
|
4754
|
+
if (u.inputTokens != null || u.outputTokens != null) {
|
|
4755
|
+
return {
|
|
4756
|
+
inputTokens: u.inputTokens,
|
|
4757
|
+
outputTokens: u.outputTokens,
|
|
4758
|
+
totalTokens: (u.inputTokens ?? 0) + (u.outputTokens ?? 0)
|
|
4759
|
+
};
|
|
4760
|
+
}
|
|
4649
4761
|
}
|
|
4650
4762
|
if (o.usageMetadata && typeof o.usageMetadata === "object") {
|
|
4651
4763
|
const u = o.usageMetadata;
|
|
@@ -4655,8 +4767,30 @@ function extractUsage2(output) {
|
|
|
4655
4767
|
totalTokens: u.totalTokenCount
|
|
4656
4768
|
};
|
|
4657
4769
|
}
|
|
4770
|
+
if (typeof o.tokens === "number" || typeof o.outputTokens === "number") {
|
|
4771
|
+
return {
|
|
4772
|
+
inputTokens: typeof o.inputTokens === "number" ? o.inputTokens : void 0,
|
|
4773
|
+
outputTokens: typeof o.outputTokens === "number" ? o.outputTokens : typeof o.tokens === "number" ? o.tokens : void 0,
|
|
4774
|
+
totalTokens: (typeof o.inputTokens === "number" ? o.inputTokens : 0) + (typeof o.outputTokens === "number" ? o.outputTokens : typeof o.tokens === "number" ? o.tokens : 0)
|
|
4775
|
+
};
|
|
4776
|
+
}
|
|
4658
4777
|
return void 0;
|
|
4659
4778
|
}
|
|
4779
|
+
function enrichFromLLMCapture(input, appUsage, fallbackModel, fallbackProvider) {
|
|
4780
|
+
const captured = consumeCapturedLLMRequest();
|
|
4781
|
+
if (captured) {
|
|
4782
|
+
const enrichedInput = input && typeof input === "object" ? { ...input, llmRequest: captured.body, promptSnippet: captured.promptSnippet } : { originalInput: input, llmRequest: captured.body, promptSnippet: captured.promptSnippet };
|
|
4783
|
+
const usage = appUsage ?? captured.usage;
|
|
4784
|
+
return { input: enrichedInput, usage };
|
|
4785
|
+
}
|
|
4786
|
+
const resolvedModel = fallbackModel || (input && typeof input === "object" ? input.model : void 0);
|
|
4787
|
+
const resolvedProvider = fallbackProvider || (input && typeof input === "object" ? input.provider : void 0);
|
|
4788
|
+
if (resolvedModel || resolvedProvider) {
|
|
4789
|
+
const enrichedInput = input && typeof input === "object" ? { ...input, llmRequest: { model: resolvedModel, provider: resolvedProvider } } : { originalInput: input, llmRequest: { model: resolvedModel, provider: resolvedProvider } };
|
|
4790
|
+
return { input: enrichedInput, usage: appUsage };
|
|
4791
|
+
}
|
|
4792
|
+
return { input, usage: appUsage };
|
|
4793
|
+
}
|
|
4660
4794
|
function isReadableStream2(v) {
|
|
4661
4795
|
return typeof v === "object" && v !== null && typeof v.getReader === "function" && typeof v.tee === "function";
|
|
4662
4796
|
}
|
|
@@ -4710,9 +4844,7 @@ function wrapAsyncIterable2(source, onComplete) {
|
|
|
4710
4844
|
}
|
|
4711
4845
|
};
|
|
4712
4846
|
}
|
|
4713
|
-
function wrapAI(modelName, callFn,
|
|
4714
|
-
const actualModel = options?.model;
|
|
4715
|
-
const actualProvider = options?.provider;
|
|
4847
|
+
function wrapAI(modelName, callFn, _options) {
|
|
4716
4848
|
return async (...args) => {
|
|
4717
4849
|
await tryAutoInitHttpContext();
|
|
4718
4850
|
const ctx = getCaptureContext();
|
|
@@ -4724,18 +4856,18 @@ function wrapAI(modelName, callFn, options) {
|
|
|
4724
4856
|
const start = rawDateNow();
|
|
4725
4857
|
if (!ctx && !httpCtx && obsCtx) {
|
|
4726
4858
|
const id2 = obsCtx.nextId();
|
|
4727
|
-
const
|
|
4728
|
-
const input2 = actualModel || actualProvider ? { ...rawInput && typeof rawInput === "object" ? rawInput : { prompt: rawInput }, ...actualModel ? { model: actualModel } : {}, ...actualProvider ? { provider: actualProvider } : {} } : rawInput;
|
|
4859
|
+
const input2 = args.length === 1 ? args[0] : args;
|
|
4729
4860
|
try {
|
|
4730
4861
|
const output = await callFn(...args);
|
|
4862
|
+
const enriched = enrichFromLLMCapture(input2, extractUsage2(output), _options?.model, _options?.provider);
|
|
4731
4863
|
if (isReadableStream2(output)) {
|
|
4732
4864
|
const [streamForCaller, streamForRecorder] = output.tee();
|
|
4733
4865
|
bufferReadableStream2(streamForRecorder).then((rawText) => {
|
|
4734
4866
|
const durationMs2 = rawDateNow() - start;
|
|
4735
|
-
pushTelemetryEvent({ id: id2, type: "ai", name: modelName, input:
|
|
4867
|
+
pushTelemetryEvent({ id: id2, type: "ai", name: modelName, input: enriched.input, output: null, streamed: true, streamRaw: rawText, timestamp: start, durationMs: durationMs2, ...enriched.usage ? { usage: enriched.usage } : {} });
|
|
4736
4868
|
}).catch(() => {
|
|
4737
4869
|
const durationMs2 = rawDateNow() - start;
|
|
4738
|
-
pushTelemetryEvent({ id: id2, type: "ai", name: modelName, input:
|
|
4870
|
+
pushTelemetryEvent({ id: id2, type: "ai", name: modelName, input: enriched.input, output: null, streamed: true, streamRaw: "", timestamp: start, durationMs: durationMs2, ...enriched.usage ? { usage: enriched.usage } : {} });
|
|
4739
4871
|
});
|
|
4740
4872
|
return streamForCaller;
|
|
4741
4873
|
}
|
|
@@ -4743,30 +4875,30 @@ function wrapAI(modelName, callFn, options) {
|
|
|
4743
4875
|
return wrapAsyncIterable2(output, (chunks) => {
|
|
4744
4876
|
const durationMs2 = rawDateNow() - start;
|
|
4745
4877
|
const rawText = chunks.map((c) => typeof c === "string" ? c : JSON.stringify(c)).join("");
|
|
4746
|
-
pushTelemetryEvent({ id: id2, type: "ai", name: modelName, input:
|
|
4878
|
+
pushTelemetryEvent({ id: id2, type: "ai", name: modelName, input: enriched.input, output: null, streamed: true, streamRaw: rawText, timestamp: start, durationMs: durationMs2, ...enriched.usage ? { usage: enriched.usage } : {} });
|
|
4747
4879
|
});
|
|
4748
4880
|
}
|
|
4749
4881
|
const durationMs = rawDateNow() - start;
|
|
4750
|
-
const usage = extractUsage2(output);
|
|
4751
4882
|
const event = {
|
|
4752
4883
|
id: id2,
|
|
4753
4884
|
type: "ai",
|
|
4754
4885
|
name: modelName,
|
|
4755
|
-
input:
|
|
4886
|
+
input: enriched.input,
|
|
4756
4887
|
output,
|
|
4757
4888
|
timestamp: start,
|
|
4758
4889
|
durationMs,
|
|
4759
|
-
...usage ? { usage } : {}
|
|
4890
|
+
...enriched.usage ? { usage: enriched.usage } : {}
|
|
4760
4891
|
};
|
|
4761
4892
|
pushTelemetryEvent(event);
|
|
4762
4893
|
return output;
|
|
4763
4894
|
} catch (e) {
|
|
4895
|
+
const enriched = enrichFromLLMCapture(input2, void 0, _options?.model, _options?.provider);
|
|
4764
4896
|
const durationMs = rawDateNow() - start;
|
|
4765
4897
|
pushTelemetryEvent({
|
|
4766
4898
|
id: id2,
|
|
4767
4899
|
type: "ai",
|
|
4768
4900
|
name: modelName,
|
|
4769
|
-
input:
|
|
4901
|
+
input: enriched.input,
|
|
4770
4902
|
output: { error: String(e) },
|
|
4771
4903
|
timestamp: start,
|
|
4772
4904
|
durationMs
|
|
@@ -4897,14 +5029,15 @@ function wrapAI(modelName, callFn, options) {
|
|
|
4897
5029
|
const input = httpModifiedInput !== void 0 ? httpModifiedInput : rawHttpInput;
|
|
4898
5030
|
try {
|
|
4899
5031
|
const output = await callFn(...httpEffectiveArgs);
|
|
5032
|
+
const enriched = enrichFromLLMCapture(input, extractUsage2(output), _options?.model, _options?.provider);
|
|
4900
5033
|
if (isReadableStream2(output)) {
|
|
4901
5034
|
const [streamForCaller, streamForRecorder] = output.tee();
|
|
4902
5035
|
bufferReadableStream2(streamForRecorder).then((rawText) => {
|
|
4903
5036
|
const durationMs2 = rawDateNow() - start;
|
|
4904
|
-
pushTelemetryEvent({ id, type: "ai", name: modelName, input, output: null, streamed: true, streamRaw: rawText, timestamp: start, durationMs: durationMs2 });
|
|
5037
|
+
pushTelemetryEvent({ id, type: "ai", name: modelName, input: enriched.input, output: null, streamed: true, streamRaw: rawText, timestamp: start, durationMs: durationMs2, ...enriched.usage ? { usage: enriched.usage } : {} });
|
|
4905
5038
|
}).catch(() => {
|
|
4906
5039
|
const durationMs2 = rawDateNow() - start;
|
|
4907
|
-
pushTelemetryEvent({ id, type: "ai", name: modelName, input, output: null, streamed: true, streamRaw: "", timestamp: start, durationMs: durationMs2 });
|
|
5040
|
+
pushTelemetryEvent({ id, type: "ai", name: modelName, input: enriched.input, output: null, streamed: true, streamRaw: "", timestamp: start, durationMs: durationMs2, ...enriched.usage ? { usage: enriched.usage } : {} });
|
|
4908
5041
|
});
|
|
4909
5042
|
return streamForCaller;
|
|
4910
5043
|
}
|
|
@@ -4912,30 +5045,30 @@ function wrapAI(modelName, callFn, options) {
|
|
|
4912
5045
|
return wrapAsyncIterable2(output, (chunks) => {
|
|
4913
5046
|
const durationMs2 = rawDateNow() - start;
|
|
4914
5047
|
const rawText = chunks.map((c) => typeof c === "string" ? c : JSON.stringify(c)).join("");
|
|
4915
|
-
pushTelemetryEvent({ id, type: "ai", name: modelName, input, output: null, streamed: true, streamRaw: rawText, timestamp: start, durationMs: durationMs2 });
|
|
5048
|
+
pushTelemetryEvent({ id, type: "ai", name: modelName, input: enriched.input, output: null, streamed: true, streamRaw: rawText, timestamp: start, durationMs: durationMs2, ...enriched.usage ? { usage: enriched.usage } : {} });
|
|
4916
5049
|
});
|
|
4917
5050
|
}
|
|
4918
5051
|
const durationMs = rawDateNow() - start;
|
|
4919
|
-
const usage = extractUsage2(output);
|
|
4920
5052
|
const event = {
|
|
4921
5053
|
id,
|
|
4922
5054
|
type: "ai",
|
|
4923
5055
|
name: modelName,
|
|
4924
|
-
input,
|
|
5056
|
+
input: enriched.input,
|
|
4925
5057
|
output,
|
|
4926
5058
|
timestamp: start,
|
|
4927
5059
|
durationMs,
|
|
4928
|
-
...usage ? { usage } : {}
|
|
5060
|
+
...enriched.usage ? { usage: enriched.usage } : {}
|
|
4929
5061
|
};
|
|
4930
5062
|
pushTelemetryEvent(event);
|
|
4931
5063
|
return output;
|
|
4932
5064
|
} catch (e) {
|
|
5065
|
+
const enriched = enrichFromLLMCapture(input, void 0, _options?.model, _options?.provider);
|
|
4933
5066
|
const durationMs = rawDateNow() - start;
|
|
4934
5067
|
pushTelemetryEvent({
|
|
4935
5068
|
id,
|
|
4936
5069
|
type: "ai",
|
|
4937
5070
|
name: modelName,
|
|
4938
|
-
input,
|
|
5071
|
+
input: enriched.input,
|
|
4939
5072
|
output: { error: String(e) },
|
|
4940
5073
|
timestamp: start,
|
|
4941
5074
|
durationMs
|
|
@@ -5440,6 +5573,9 @@ async function startPortalServer(options) {
|
|
|
5440
5573
|
|
|
5441
5574
|
// src/index.ts
|
|
5442
5575
|
init_portal_executor();
|
|
5576
|
+
init_telemetry_push();
|
|
5577
|
+
tryAutoInitHttpContext().catch(() => {
|
|
5578
|
+
});
|
|
5443
5579
|
// Annotate the CommonJS export names for ESM import in node:
|
|
5444
5580
|
0 && (module.exports = {
|
|
5445
5581
|
ReplayController,
|
package/dist/index.js
CHANGED
|
@@ -44,4 +44,11 @@ export { serializeAgentState, deserializeAgentState, extractTaskOutputs, resolve
|
|
|
44
44
|
// Portal (remote rerun queue)
|
|
45
45
|
export { startPortalServer } from './portal-server.js';
|
|
46
46
|
export { executePortalTask } from './portal-executor.js';
|
|
47
|
+
// ─── Eager auto-init ────────────────────────────────────────
|
|
48
|
+
// When ELASTICDASH_API_KEY is set, automatically initialise observability mode
|
|
49
|
+
// at import time so the socket connection is established immediately (e.g. for
|
|
50
|
+
// onboarding detection). tryAutoInitHttpContext is idempotent and deduplicates
|
|
51
|
+
// concurrent calls, so this is safe even if interceptors trigger it again later.
|
|
52
|
+
import { tryAutoInitHttpContext } from './interceptors/telemetry-push.js';
|
|
53
|
+
tryAutoInitHttpContext().catch(() => { });
|
|
47
54
|
//# sourceMappingURL=index.js.map
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,0CAA0C;AAE1C,iBAAiB;AACjB,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,QAAQ,EAAE,UAAU,EAAE,SAAS,EAAE,WAAW,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAA;AACnH,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAA;AACtC,OAAO,EAAE,aAAa,EAAE,MAAM,eAAe,CAAA;AAC7C,OAAO,EAAE,gBAAgB,EAAE,MAAM,qBAAqB,CAAA;AACtD,OAAO,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAA;AAG5C,gBAAgB;AAChB,OAAO,EAAE,iBAAiB,EAAE,iBAAiB,EAAE,eAAe,EAAE,eAAe,EAAE,MAAM,4BAA4B,CAAA;AAGnH,4BAA4B;AAC5B,OAAO,EAAE,aAAa,EAAE,iBAAiB,EAAE,iBAAiB,EAAE,MAAM,uBAAuB,CAAA;AAC3F,OAAO,EAAE,gBAAgB,EAAE,MAAM,qBAAqB,CAAA;AAItD,yBAAyB;AACzB,OAAO,EAAE,QAAQ,EAAE,MAAM,wBAAwB,CAAA;AACjD,OAAO,EAAE,MAAM,EAAE,MAAM,+BAA+B,CAAA;AAEtD,oEAAoE;AACpE,OAAO,EACL,iBAAiB,EACjB,kBAAkB,EAClB,iBAAiB,EACjB,kBAAkB,EAClB,iBAAiB,EACjB,kBAAkB,EAClB,sBAAsB,EACtB,gBAAgB,EAChB,6BAA6B,EAC7B,oBAAoB,EACpB,sBAAsB,GACvB,MAAM,kCAAkC,CAAA;AAGzC,4DAA4D;AAC5D,OAAO,EACL,WAAW,EACX,aAAa,EACb,iBAAiB,EACjB,qBAAqB,EACrB,eAAe,EACf,oBAAoB,EACpB,sBAAsB,GACvB,MAAM,8BAA8B,CAAA;AAUrC,wBAAwB;AACxB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,QAAQ,EAAE,mBAAmB,EAAE,eAAe,EAAE,MAAM,sBAAsB,CAAA;AAC3G,OAAO,EAAE,wBAAwB,EAAE,0BAA0B,EAAE,MAAM,2BAA2B,CAAA;AAEhG,4BAA4B;AAC5B,OAAO,EAAE,cAAc,EAAE,YAAY,EAAE,kBAAkB,EAAE,MAAM,wBAAwB,CAAA;AAGzF,2BAA2B;AAC3B,OAAO,EAAE,eAAe,EAAE,aAAa,EAAE,gBAAgB,EAAE,cAAc,EAAE,MAAM,gCAAgC,CAAA;AAEjH,sCAAsC;AACtC,OAAO,EAAE,oBAAoB,EAAE,sBAAsB,EAAE,MAAM,kCAAkC,CAAA;AAE/F,gBAAgB;AAChB,OAAO,EAAE,iBAAiB,EAAE,qBAAqB,EAAE,UAAU,EAAE,QAAQ,EAAE,YAAY,EAAE,MAAM,oBAAoB,CAAA;AAEjH,OAAO,EAAE,gBAAgB,EAAE,MAAM,wBAAwB,CAAA;AAGzD,OAAO,EAAE,qBAAqB,EAAE,mBAAmB,EAAE,MAAM,sBAAsB,CAAA;AAGjF,qDAAqD;AACrD,OAAO,EAAE,gBAAgB,EAAE,qBAAqB,EAAE,MAAM,uBAAuB,CAAA;AAG/E,YAAY;AACZ,OAAO,EAAE,aAAa,EAAE,kBAAkB,EAAE,MAAM,wBAAwB,CAAA;AAE1E,UAAU;AACV,OAAO,EAAE,cAAc,EAAE,MAAM,cAAc,CAAA;AAC7C,OAAO,EAAE,kBAAkB,EAAE,QAAQ,EAAE,MAAM,qCAAqC,CAAA;AAElF,kBAAkB;AAClB,OAAO,EAAE,WAAW,EAAE,MAAM,sBAAsB,CAAA;AAGlD,yBAAyB;AACzB,OAAO,EAAE,mBAAmB,EAAE,qBAAqB,EAAE,kBAAkB,EAAE,gBAAgB,EAAE,MAAM,uBAAuB,CAAA;AAGxH,8BAA8B;AAC9B,OAAO,EAAE,iBAAiB,EAAE,MAAM,oBAAoB,CAAA;AACtD,OAAO,EAAE,iBAAiB,EAAE,MAAM,sBAAsB,CAAA"}
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,0CAA0C;AAE1C,iBAAiB;AACjB,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,QAAQ,EAAE,UAAU,EAAE,SAAS,EAAE,WAAW,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAA;AACnH,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAA;AACtC,OAAO,EAAE,aAAa,EAAE,MAAM,eAAe,CAAA;AAC7C,OAAO,EAAE,gBAAgB,EAAE,MAAM,qBAAqB,CAAA;AACtD,OAAO,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAA;AAG5C,gBAAgB;AAChB,OAAO,EAAE,iBAAiB,EAAE,iBAAiB,EAAE,eAAe,EAAE,eAAe,EAAE,MAAM,4BAA4B,CAAA;AAGnH,4BAA4B;AAC5B,OAAO,EAAE,aAAa,EAAE,iBAAiB,EAAE,iBAAiB,EAAE,MAAM,uBAAuB,CAAA;AAC3F,OAAO,EAAE,gBAAgB,EAAE,MAAM,qBAAqB,CAAA;AAItD,yBAAyB;AACzB,OAAO,EAAE,QAAQ,EAAE,MAAM,wBAAwB,CAAA;AACjD,OAAO,EAAE,MAAM,EAAE,MAAM,+BAA+B,CAAA;AAEtD,oEAAoE;AACpE,OAAO,EACL,iBAAiB,EACjB,kBAAkB,EAClB,iBAAiB,EACjB,kBAAkB,EAClB,iBAAiB,EACjB,kBAAkB,EAClB,sBAAsB,EACtB,gBAAgB,EAChB,6BAA6B,EAC7B,oBAAoB,EACpB,sBAAsB,GACvB,MAAM,kCAAkC,CAAA;AAGzC,4DAA4D;AAC5D,OAAO,EACL,WAAW,EACX,aAAa,EACb,iBAAiB,EACjB,qBAAqB,EACrB,eAAe,EACf,oBAAoB,EACpB,sBAAsB,GACvB,MAAM,8BAA8B,CAAA;AAUrC,wBAAwB;AACxB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,QAAQ,EAAE,mBAAmB,EAAE,eAAe,EAAE,MAAM,sBAAsB,CAAA;AAC3G,OAAO,EAAE,wBAAwB,EAAE,0BAA0B,EAAE,MAAM,2BAA2B,CAAA;AAEhG,4BAA4B;AAC5B,OAAO,EAAE,cAAc,EAAE,YAAY,EAAE,kBAAkB,EAAE,MAAM,wBAAwB,CAAA;AAGzF,2BAA2B;AAC3B,OAAO,EAAE,eAAe,EAAE,aAAa,EAAE,gBAAgB,EAAE,cAAc,EAAE,MAAM,gCAAgC,CAAA;AAEjH,sCAAsC;AACtC,OAAO,EAAE,oBAAoB,EAAE,sBAAsB,EAAE,MAAM,kCAAkC,CAAA;AAE/F,gBAAgB;AAChB,OAAO,EAAE,iBAAiB,EAAE,qBAAqB,EAAE,UAAU,EAAE,QAAQ,EAAE,YAAY,EAAE,MAAM,oBAAoB,CAAA;AAEjH,OAAO,EAAE,gBAAgB,EAAE,MAAM,wBAAwB,CAAA;AAGzD,OAAO,EAAE,qBAAqB,EAAE,mBAAmB,EAAE,MAAM,sBAAsB,CAAA;AAGjF,qDAAqD;AACrD,OAAO,EAAE,gBAAgB,EAAE,qBAAqB,EAAE,MAAM,uBAAuB,CAAA;AAG/E,YAAY;AACZ,OAAO,EAAE,aAAa,EAAE,kBAAkB,EAAE,MAAM,wBAAwB,CAAA;AAE1E,UAAU;AACV,OAAO,EAAE,cAAc,EAAE,MAAM,cAAc,CAAA;AAC7C,OAAO,EAAE,kBAAkB,EAAE,QAAQ,EAAE,MAAM,qCAAqC,CAAA;AAElF,kBAAkB;AAClB,OAAO,EAAE,WAAW,EAAE,MAAM,sBAAsB,CAAA;AAGlD,yBAAyB;AACzB,OAAO,EAAE,mBAAmB,EAAE,qBAAqB,EAAE,kBAAkB,EAAE,gBAAgB,EAAE,MAAM,uBAAuB,CAAA;AAGxH,8BAA8B;AAC9B,OAAO,EAAE,iBAAiB,EAAE,MAAM,oBAAoB,CAAA;AACtD,OAAO,EAAE,iBAAiB,EAAE,MAAM,sBAAsB,CAAA;AAGxD,+DAA+D;AAC/D,+EAA+E;AAC/E,+EAA+E;AAC/E,+EAA+E;AAC/E,iFAAiF;AACjF,OAAO,EAAE,sBAAsB,EAAE,MAAM,kCAAkC,CAAA;AACzE,sBAAsB,EAAE,CAAC,KAAK,CAAC,GAAG,EAAE,GAAE,CAAC,CAAC,CAAA"}
|
|
@@ -1,3 +1,18 @@
|
|
|
1
|
+
export interface CapturedLLMRequest {
|
|
2
|
+
url: string;
|
|
3
|
+
provider: string;
|
|
4
|
+
model: string;
|
|
5
|
+
messages?: unknown[];
|
|
6
|
+
body: Record<string, unknown>;
|
|
7
|
+
promptSnippet?: string;
|
|
8
|
+
usage?: UsageInfo;
|
|
9
|
+
}
|
|
10
|
+
export declare function consumeCapturedLLMRequest(): CapturedLLMRequest | undefined;
|
|
11
|
+
type UsageInfo = {
|
|
12
|
+
inputTokens?: number;
|
|
13
|
+
outputTokens?: number;
|
|
14
|
+
totalTokens?: number;
|
|
15
|
+
};
|
|
1
16
|
/**
|
|
2
17
|
* Install the AI fetch interceptor. Wraps globalThis.fetch to automatically
|
|
3
18
|
* record LLM steps into the active trace for OpenAI, Gemini, and Grok calls.
|
|
@@ -7,4 +22,5 @@ export declare function installAIInterceptor(): void;
|
|
|
7
22
|
* Uninstall the AI fetch interceptor, restoring globalThis.fetch to its original value.
|
|
8
23
|
*/
|
|
9
24
|
export declare function uninstallAIInterceptor(): void;
|
|
25
|
+
export {};
|
|
10
26
|
//# sourceMappingURL=ai-interceptor.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ai-interceptor.d.ts","sourceRoot":"","sources":["../../src/interceptors/ai-interceptor.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"ai-interceptor.d.ts","sourceRoot":"","sources":["../../src/interceptors/ai-interceptor.ts"],"names":[],"mappings":"AAqBA,MAAM,WAAW,kBAAkB;IACjC,GAAG,EAAE,MAAM,CAAA;IACX,QAAQ,EAAE,MAAM,CAAA;IAChB,KAAK,EAAE,MAAM,CAAA;IACb,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAA;IACpB,IAAI,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;IAC7B,aAAa,CAAC,EAAE,MAAM,CAAA;IACtB,KAAK,CAAC,EAAE,SAAS,CAAA;CAClB;AAED,wBAAgB,yBAAyB,IAAI,kBAAkB,GAAG,SAAS,CAK1E;AA6BD,KAAK,SAAS,GAAG;IAAE,WAAW,CAAC,EAAE,MAAM,CAAC;IAAC,YAAY,CAAC,EAAE,MAAM,CAAC;IAAC,WAAW,CAAC,EAAE,MAAM,CAAA;CAAE,CAAA;AA2YtF;;;GAGG;AACH,wBAAgB,oBAAoB,IAAI,IAAI,CAmU3C;AAED;;GAEG;AACH,wBAAgB,sBAAsB,IAAI,IAAI,CAK7C"}
|
|
@@ -11,6 +11,46 @@ const AI_WRAPPER_KEY = '__elasticdash_ai_wrapper_depth__';
|
|
|
11
11
|
function isAIWrapperActive() {
|
|
12
12
|
return (globalThis[AI_WRAPPER_KEY] ?? 0) > 0;
|
|
13
13
|
}
|
|
14
|
+
/**
|
|
15
|
+
* When inside a wrapAI call, the ai-interceptor captures the actual HTTP
|
|
16
|
+
* request payload and stashes it here so wrapAI can attach it to its event.
|
|
17
|
+
*/
|
|
18
|
+
const LLM_REQUEST_KEY = '__elasticdash_last_llm_request__';
|
|
19
|
+
export function consumeCapturedLLMRequest() {
|
|
20
|
+
const g = globalThis;
|
|
21
|
+
const req = g[LLM_REQUEST_KEY];
|
|
22
|
+
if (req)
|
|
23
|
+
g[LLM_REQUEST_KEY] = undefined;
|
|
24
|
+
return req;
|
|
25
|
+
}
|
|
26
|
+
function extractPromptSnippet(body) {
|
|
27
|
+
let messages;
|
|
28
|
+
if (Array.isArray(body.messages))
|
|
29
|
+
messages = body.messages;
|
|
30
|
+
else if (Array.isArray(body.contents))
|
|
31
|
+
messages = body.contents;
|
|
32
|
+
if (!messages || messages.length === 0)
|
|
33
|
+
return undefined;
|
|
34
|
+
// Find the last user message
|
|
35
|
+
for (let i = messages.length - 1; i >= 0; i--) {
|
|
36
|
+
const msg = messages[i];
|
|
37
|
+
if (!msg)
|
|
38
|
+
continue;
|
|
39
|
+
if (msg.role === 'user') {
|
|
40
|
+
let content = msg.content;
|
|
41
|
+
if (Array.isArray(content)) {
|
|
42
|
+
content = content
|
|
43
|
+
.map((b) => (b && typeof b === 'object' ? String(b.text ?? '') : String(b)))
|
|
44
|
+
.filter(Boolean)
|
|
45
|
+
.join('');
|
|
46
|
+
}
|
|
47
|
+
if (typeof content === 'string') {
|
|
48
|
+
return content.slice(0, 100);
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
return undefined;
|
|
53
|
+
}
|
|
14
54
|
function extractUsage(provider, body) {
|
|
15
55
|
if (provider === 'openai' || provider === 'grok' || provider === 'kimi') {
|
|
16
56
|
const u = body.usage;
|
|
@@ -312,6 +352,40 @@ async function bufferSSEStream(provider, stream) {
|
|
|
312
352
|
}
|
|
313
353
|
return completion;
|
|
314
354
|
}
|
|
355
|
+
/** Extract usage from buffered raw SSE text */
|
|
356
|
+
function extractStreamUsage(provider, rawSSE) {
|
|
357
|
+
const lines = rawSSE.split('\n');
|
|
358
|
+
// Walk backwards to find usage in the final events
|
|
359
|
+
for (let i = lines.length - 1; i >= 0; i--) {
|
|
360
|
+
const line = lines[i];
|
|
361
|
+
if (!line.startsWith('data: '))
|
|
362
|
+
continue;
|
|
363
|
+
const data = line.slice(6).trim();
|
|
364
|
+
if (data === '[DONE]')
|
|
365
|
+
continue;
|
|
366
|
+
try {
|
|
367
|
+
const obj = JSON.parse(data);
|
|
368
|
+
// OpenAI / Grok / Kimi: usage in the final chunk
|
|
369
|
+
const usage = extractUsage(provider, obj);
|
|
370
|
+
if (usage)
|
|
371
|
+
return usage;
|
|
372
|
+
// Anthropic: usage in message_delta event
|
|
373
|
+
if (obj.type === 'message_delta') {
|
|
374
|
+
const u = obj.usage;
|
|
375
|
+
if (u)
|
|
376
|
+
return extractUsage('anthropic', { usage: u });
|
|
377
|
+
}
|
|
378
|
+
// Anthropic: usage in message_start event
|
|
379
|
+
if (obj.type === 'message_start') {
|
|
380
|
+
const msg = obj.message;
|
|
381
|
+
if (msg?.usage)
|
|
382
|
+
return extractUsage('anthropic', { usage: msg.usage });
|
|
383
|
+
}
|
|
384
|
+
}
|
|
385
|
+
catch { /* skip */ }
|
|
386
|
+
}
|
|
387
|
+
return undefined;
|
|
388
|
+
}
|
|
315
389
|
/** Build a minimal non-streaming JSON response body from a completion string (for replay) */
|
|
316
390
|
function synthesizeCompletionJSON(provider, completion) {
|
|
317
391
|
if (provider === 'gemini') {
|
|
@@ -382,9 +456,66 @@ export function installAIInterceptor() {
|
|
|
382
456
|
globalThis.fetch = async function patchedFetch(input, init) {
|
|
383
457
|
const url = typeof input === 'string' ? input : input instanceof URL ? input.href : input.url;
|
|
384
458
|
const provider = detectProvider(url);
|
|
385
|
-
// Skip recording when inside a wrapAI call to avoid duplicate events
|
|
459
|
+
// Skip recording when inside a wrapAI call to avoid duplicate events,
|
|
460
|
+
// but capture the actual HTTP request body and response usage so wrapAI
|
|
461
|
+
// can attach them to its event.
|
|
386
462
|
if (provider && isAIWrapperActive()) {
|
|
387
|
-
|
|
463
|
+
let capturedReq;
|
|
464
|
+
let capturedModel = 'unknown';
|
|
465
|
+
let capturedMessages;
|
|
466
|
+
let capturedSnippet;
|
|
467
|
+
try {
|
|
468
|
+
const rawBody = init?.body;
|
|
469
|
+
if (rawBody && typeof rawBody === 'string') {
|
|
470
|
+
capturedReq = JSON.parse(rawBody);
|
|
471
|
+
capturedModel = extractModel(provider, capturedReq, url);
|
|
472
|
+
capturedMessages = Array.isArray(capturedReq.messages) ? capturedReq.messages : Array.isArray(capturedReq.contents) ? capturedReq.contents : undefined;
|
|
473
|
+
capturedSnippet = extractPromptSnippet(capturedReq);
|
|
474
|
+
}
|
|
475
|
+
}
|
|
476
|
+
catch {
|
|
477
|
+
// Ignore parse errors
|
|
478
|
+
}
|
|
479
|
+
const response = await originalFetch(input, init);
|
|
480
|
+
// Extract usage from the response (clone to avoid consuming the body)
|
|
481
|
+
if (capturedReq) {
|
|
482
|
+
const captured = {
|
|
483
|
+
url, provider, model: capturedModel, messages: capturedMessages,
|
|
484
|
+
body: capturedReq, promptSnippet: capturedSnippet,
|
|
485
|
+
};
|
|
486
|
+
const isStreaming = capturedReq.stream === true;
|
|
487
|
+
try {
|
|
488
|
+
const cloned = response.clone();
|
|
489
|
+
if (!isStreaming) {
|
|
490
|
+
// Non-streaming: parse JSON response for usage
|
|
491
|
+
const responseBody = await cloned.json();
|
|
492
|
+
captured.usage = extractUsage(provider, responseBody);
|
|
493
|
+
}
|
|
494
|
+
else if (cloned.body) {
|
|
495
|
+
// Streaming: read the raw SSE text to extract usage from final events
|
|
496
|
+
try {
|
|
497
|
+
const decoder = new TextDecoder();
|
|
498
|
+
const reader = cloned.body.getReader();
|
|
499
|
+
let rawSSE = '';
|
|
500
|
+
for (;;) {
|
|
501
|
+
const { done, value } = await reader.read();
|
|
502
|
+
if (done)
|
|
503
|
+
break;
|
|
504
|
+
rawSSE += decoder.decode(value, { stream: true });
|
|
505
|
+
}
|
|
506
|
+
reader.releaseLock();
|
|
507
|
+
captured.usage = extractStreamUsage(provider, rawSSE);
|
|
508
|
+
}
|
|
509
|
+
catch { /* stream read failed */ }
|
|
510
|
+
}
|
|
511
|
+
}
|
|
512
|
+
catch {
|
|
513
|
+
// Response body not available — usage won't be captured
|
|
514
|
+
}
|
|
515
|
+
;
|
|
516
|
+
globalThis[LLM_REQUEST_KEY] = captured;
|
|
517
|
+
}
|
|
518
|
+
return response;
|
|
388
519
|
}
|
|
389
520
|
const traceAtCall = getCurrentTrace();
|
|
390
521
|
const obsCtx = getObservabilityContext();
|
|
@@ -417,7 +548,8 @@ export function installAIInterceptor() {
|
|
|
417
548
|
}
|
|
418
549
|
const ctx = getCaptureContext();
|
|
419
550
|
// Observability-only mode: no trace handle, no capture context — record via pushTelemetryEvent
|
|
420
|
-
|
|
551
|
+
// Skip when inside a wrapAI call to avoid duplicate events (wrapAI records its own)
|
|
552
|
+
if (!traceAtCall && !ctx && obsCtx && !isAIWrapperActive()) {
|
|
421
553
|
const id = obsCtx.nextId();
|
|
422
554
|
const start = rawDateNow();
|
|
423
555
|
const eventInput = { url, provider, model, prompt, messages };
|
|
@@ -453,7 +585,8 @@ export function installAIInterceptor() {
|
|
|
453
585
|
return response;
|
|
454
586
|
}
|
|
455
587
|
// HTTP mode (no capture context): replay frozen AI events or execute live + push telemetry
|
|
456
|
-
|
|
588
|
+
// Skip when inside a wrapAI call to avoid duplicate events (wrapAI records its own)
|
|
589
|
+
if (!ctx && httpCtx && !isAIWrapperActive()) {
|
|
457
590
|
const id = httpCtx.nextId();
|
|
458
591
|
const eventInput = { url, provider, model, prompt, messages };
|
|
459
592
|
// Replay frozen step
|