@aui-x/prism 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +11 -0
- package/README.md +195 -0
- package/dist/client-B9WXHjpz.cjs +192 -0
- package/dist/client-B9WXHjpz.cjs.map +1 -0
- package/dist/client-BSsSpkZY.d.cts +157 -0
- package/dist/client-BSsSpkZY.d.cts.map +1 -0
- package/dist/client-BrZstMQX.d.ts +157 -0
- package/dist/client-BrZstMQX.d.ts.map +1 -0
- package/dist/client-C7RiAn7a.js +174 -0
- package/dist/client-C7RiAn7a.js.map +1 -0
- package/dist/core.cjs +6 -0
- package/dist/core.d.cts +2 -0
- package/dist/core.d.ts +2 -0
- package/dist/core.js +3 -0
- package/dist/index.cjs +12 -0
- package/dist/index.d.cts +5 -0
- package/dist/index.d.ts +5 -0
- package/dist/index.js +6 -0
- package/dist/integrations/ai-sdk.cjs +26 -0
- package/dist/integrations/ai-sdk.cjs.map +1 -0
- package/dist/integrations/ai-sdk.d.cts +18 -0
- package/dist/integrations/ai-sdk.d.cts.map +1 -0
- package/dist/integrations/ai-sdk.d.ts +18 -0
- package/dist/integrations/ai-sdk.d.ts.map +1 -0
- package/dist/integrations/ai-sdk.js +25 -0
- package/dist/integrations/ai-sdk.js.map +1 -0
- package/dist/integrations/anthropic.cjs +226 -0
- package/dist/integrations/anthropic.cjs.map +1 -0
- package/dist/integrations/anthropic.d.cts +20 -0
- package/dist/integrations/anthropic.d.cts.map +1 -0
- package/dist/integrations/anthropic.d.ts +20 -0
- package/dist/integrations/anthropic.d.ts.map +1 -0
- package/dist/integrations/anthropic.js +224 -0
- package/dist/integrations/anthropic.js.map +1 -0
- package/dist/integrations/openai.cjs +227 -0
- package/dist/integrations/openai.cjs.map +1 -0
- package/dist/integrations/openai.d.cts +20 -0
- package/dist/integrations/openai.d.cts.map +1 -0
- package/dist/integrations/openai.d.ts +20 -0
- package/dist/integrations/openai.d.ts.map +1 -0
- package/dist/integrations/openai.js +225 -0
- package/dist/integrations/openai.js.map +1 -0
- package/dist/wrapper-7jRyp54U.js +242 -0
- package/dist/wrapper-7jRyp54U.js.map +1 -0
- package/dist/wrapper-ByspXfxS.cjs +247 -0
- package/dist/wrapper-ByspXfxS.cjs.map +1 -0
- package/package.json +103 -0
|
@@ -0,0 +1,227 @@
|
|
|
1
|
+
Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
|
|
2
|
+
|
|
3
|
+
//#region src/integrations/openai.ts
|
|
4
|
+
function truncateInput(messages, maxBytes = 32768) {
|
|
5
|
+
let json = JSON.stringify(messages);
|
|
6
|
+
if (new TextEncoder().encode(json).length <= maxBytes) return messages;
|
|
7
|
+
if (!Array.isArray(messages)) return messages;
|
|
8
|
+
const arr = [...messages];
|
|
9
|
+
while (arr.length > 1) {
|
|
10
|
+
arr.shift();
|
|
11
|
+
json = JSON.stringify(arr);
|
|
12
|
+
if (new TextEncoder().encode(json).length <= maxBytes) return arr;
|
|
13
|
+
}
|
|
14
|
+
return arr;
|
|
15
|
+
}
|
|
16
|
+
function createRootHandle(tracer, opts, model, input, metadata) {
|
|
17
|
+
const parentTraceId = opts?.parentTraceId;
|
|
18
|
+
if (parentTraceId) return {
|
|
19
|
+
isSpanMode: true,
|
|
20
|
+
traceId: parentTraceId,
|
|
21
|
+
handle: tracer.startSpanOnTrace(parentTraceId, {
|
|
22
|
+
name: opts?.name ?? model,
|
|
23
|
+
type: "llm",
|
|
24
|
+
input,
|
|
25
|
+
model,
|
|
26
|
+
provider: "openai",
|
|
27
|
+
metadata
|
|
28
|
+
})
|
|
29
|
+
};
|
|
30
|
+
const handle = tracer.startTrace({
|
|
31
|
+
name: opts?.name ?? model,
|
|
32
|
+
model,
|
|
33
|
+
provider: "openai",
|
|
34
|
+
input,
|
|
35
|
+
metadata,
|
|
36
|
+
tags: opts?.tags,
|
|
37
|
+
endUserId: opts?.endUserId
|
|
38
|
+
});
|
|
39
|
+
return {
|
|
40
|
+
isSpanMode: false,
|
|
41
|
+
traceId: handle.traceId,
|
|
42
|
+
handle
|
|
43
|
+
};
|
|
44
|
+
}
|
|
45
|
+
function createToolSpan(tracer, root, toolCall) {
|
|
46
|
+
if (root.isSpanMode) return tracer.startSpanOnTrace(root.traceId, {
|
|
47
|
+
name: toolCall.name,
|
|
48
|
+
type: "tool",
|
|
49
|
+
input: toolCall.arguments,
|
|
50
|
+
parentSpanId: root.handle.spanId
|
|
51
|
+
});
|
|
52
|
+
return root.handle.startSpan({
|
|
53
|
+
name: toolCall.name,
|
|
54
|
+
type: "tool",
|
|
55
|
+
input: toolCall.arguments
|
|
56
|
+
});
|
|
57
|
+
}
|
|
58
|
+
function endRootHandle(root, result) {
|
|
59
|
+
root.handle.end({
|
|
60
|
+
output: result.output,
|
|
61
|
+
status: "completed",
|
|
62
|
+
totalTokens: result.inputTokens + result.outputTokens,
|
|
63
|
+
promptTokens: result.inputTokens,
|
|
64
|
+
completionTokens: result.outputTokens,
|
|
65
|
+
ttftMs: result.ttftMs
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
function cleanupOnError(root, error) {
|
|
69
|
+
root.handle.end({
|
|
70
|
+
status: "error",
|
|
71
|
+
error: error instanceof Error ? error.message : String(error)
|
|
72
|
+
});
|
|
73
|
+
}
|
|
74
|
+
/**
|
|
75
|
+
* Wraps an OpenAI client to automatically trace `chat.completions.create` calls.
|
|
76
|
+
*
|
|
77
|
+
* Returns a proxy that behaves identically to the original client, but emits
|
|
78
|
+
* trace events for both streaming and non-streaming chat completions.
|
|
79
|
+
*/
|
|
80
|
+
function prismOpenAI(tracer, client, opts) {
|
|
81
|
+
return new Proxy(client, { get(target, prop, receiver) {
|
|
82
|
+
if (prop === "chat") {
|
|
83
|
+
const chat = Reflect.get(target, prop, receiver);
|
|
84
|
+
return new Proxy(chat, { get(chatTarget, chatProp, chatReceiver) {
|
|
85
|
+
if (chatProp === "completions") {
|
|
86
|
+
const completions = Reflect.get(chatTarget, chatProp, chatReceiver);
|
|
87
|
+
return new Proxy(completions, { get(compTarget, compProp, compReceiver) {
|
|
88
|
+
if (compProp === "create") return createInterceptor(tracer, Reflect.get(compTarget, compProp, compReceiver).bind(compTarget), opts);
|
|
89
|
+
return Reflect.get(compTarget, compProp, compReceiver);
|
|
90
|
+
} });
|
|
91
|
+
}
|
|
92
|
+
return Reflect.get(chatTarget, chatProp, chatReceiver);
|
|
93
|
+
} });
|
|
94
|
+
}
|
|
95
|
+
return Reflect.get(target, prop, receiver);
|
|
96
|
+
} });
|
|
97
|
+
}
|
|
98
|
+
function createInterceptor(tracer, originalCreate, opts) {
|
|
99
|
+
return async (params, ...rest) => {
|
|
100
|
+
const model = params.model ?? "unknown";
|
|
101
|
+
const input = truncateInput(params.messages);
|
|
102
|
+
const metadata = opts?.metadata;
|
|
103
|
+
const isStream = !!params.stream;
|
|
104
|
+
const root = createRootHandle(tracer, opts, model, input, metadata);
|
|
105
|
+
if (!isStream) return handleNonStreaming(tracer, root, originalCreate, params, rest);
|
|
106
|
+
return handleStreaming(tracer, root, originalCreate, params, rest);
|
|
107
|
+
};
|
|
108
|
+
}
|
|
109
|
+
async function handleNonStreaming(tracer, root, originalCreate, params, rest) {
|
|
110
|
+
try {
|
|
111
|
+
const result = await originalCreate(params, ...rest);
|
|
112
|
+
const message = result.choices?.[0]?.message;
|
|
113
|
+
const output = message?.content ?? "";
|
|
114
|
+
const inputTokens = result.usage?.prompt_tokens ?? 0;
|
|
115
|
+
const outputTokens = result.usage?.completion_tokens ?? 0;
|
|
116
|
+
if (message?.tool_calls) for (const tc of message.tool_calls) createToolSpan(tracer, root, {
|
|
117
|
+
id: tc.id,
|
|
118
|
+
name: tc.function.name,
|
|
119
|
+
arguments: tc.function.arguments
|
|
120
|
+
}).end({ status: "completed" });
|
|
121
|
+
endRootHandle(root, {
|
|
122
|
+
output,
|
|
123
|
+
inputTokens,
|
|
124
|
+
outputTokens
|
|
125
|
+
});
|
|
126
|
+
return result;
|
|
127
|
+
} catch (error) {
|
|
128
|
+
cleanupOnError(root, error);
|
|
129
|
+
throw error;
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
async function handleStreaming(tracer, root, originalCreate, params, rest) {
|
|
133
|
+
const patchedParams = {
|
|
134
|
+
...params,
|
|
135
|
+
stream_options: {
|
|
136
|
+
...params.stream_options,
|
|
137
|
+
include_usage: true
|
|
138
|
+
}
|
|
139
|
+
};
|
|
140
|
+
try {
|
|
141
|
+
const streamStart = Date.now();
|
|
142
|
+
const stream = await originalCreate(patchedParams, ...rest);
|
|
143
|
+
let output = "";
|
|
144
|
+
let inputTokens = 0;
|
|
145
|
+
let outputTokens = 0;
|
|
146
|
+
let ttftMs;
|
|
147
|
+
let firstChunk = true;
|
|
148
|
+
let finalized = false;
|
|
149
|
+
const toolCallAccum = /* @__PURE__ */ new Map();
|
|
150
|
+
const processChunk = (chunk) => {
|
|
151
|
+
const delta = chunk.choices?.[0]?.delta;
|
|
152
|
+
if (delta?.content) {
|
|
153
|
+
if (firstChunk) {
|
|
154
|
+
ttftMs = Date.now() - streamStart;
|
|
155
|
+
firstChunk = false;
|
|
156
|
+
}
|
|
157
|
+
output += delta.content;
|
|
158
|
+
}
|
|
159
|
+
if (delta?.tool_calls) for (const tc of delta.tool_calls) {
|
|
160
|
+
let accum = toolCallAccum.get(tc.index);
|
|
161
|
+
if (!accum) {
|
|
162
|
+
accum = {
|
|
163
|
+
id: tc.id ?? "",
|
|
164
|
+
name: tc.function?.name ?? "",
|
|
165
|
+
arguments: ""
|
|
166
|
+
};
|
|
167
|
+
toolCallAccum.set(tc.index, accum);
|
|
168
|
+
}
|
|
169
|
+
if (tc.id) accum.id = tc.id;
|
|
170
|
+
if (tc.function?.name) accum.name = tc.function.name;
|
|
171
|
+
if (tc.function?.arguments) accum.arguments += tc.function.arguments;
|
|
172
|
+
}
|
|
173
|
+
if (chunk.usage) {
|
|
174
|
+
inputTokens = chunk.usage.prompt_tokens ?? 0;
|
|
175
|
+
outputTokens = chunk.usage.completion_tokens ?? 0;
|
|
176
|
+
}
|
|
177
|
+
};
|
|
178
|
+
const finalizeStream = () => {
|
|
179
|
+
if (finalized) return;
|
|
180
|
+
finalized = true;
|
|
181
|
+
for (const tc of Array.from(toolCallAccum.values())) createToolSpan(tracer, root, tc).end({ status: "completed" });
|
|
182
|
+
endRootHandle(root, {
|
|
183
|
+
output,
|
|
184
|
+
inputTokens,
|
|
185
|
+
outputTokens,
|
|
186
|
+
ttftMs
|
|
187
|
+
});
|
|
188
|
+
};
|
|
189
|
+
const originalIterator = stream[Symbol.asyncIterator].bind(stream);
|
|
190
|
+
return new Proxy(stream, { get(target, prop, receiver) {
|
|
191
|
+
if (prop === Symbol.asyncIterator) return () => {
|
|
192
|
+
const iter = originalIterator();
|
|
193
|
+
return {
|
|
194
|
+
async next() {
|
|
195
|
+
const result = await iter.next();
|
|
196
|
+
if (!result.done) processChunk(result.value);
|
|
197
|
+
if (result.done) finalizeStream();
|
|
198
|
+
return result;
|
|
199
|
+
},
|
|
200
|
+
async return(value) {
|
|
201
|
+
finalizeStream();
|
|
202
|
+
return iter.return?.(value) ?? {
|
|
203
|
+
done: true,
|
|
204
|
+
value: void 0
|
|
205
|
+
};
|
|
206
|
+
},
|
|
207
|
+
async throw(err) {
|
|
208
|
+
cleanupOnError(root, err);
|
|
209
|
+
return iter.throw?.(err) ?? {
|
|
210
|
+
done: true,
|
|
211
|
+
value: void 0
|
|
212
|
+
};
|
|
213
|
+
}
|
|
214
|
+
};
|
|
215
|
+
};
|
|
216
|
+
if (prop === "tee") return;
|
|
217
|
+
return Reflect.get(target, prop, receiver);
|
|
218
|
+
} });
|
|
219
|
+
} catch (error) {
|
|
220
|
+
cleanupOnError(root, error);
|
|
221
|
+
throw error;
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
//#endregion
|
|
226
|
+
exports.prismOpenAI = prismOpenAI;
|
|
227
|
+
//# sourceMappingURL=openai.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"openai.cjs","names":[],"sources":["../../src/integrations/openai.ts"],"sourcesContent":["import type { AuixPrism, SpanHandle, TraceHandle } from \"../client\";\n\nexport interface OpenAIPrismOptions {\n name?: string;\n tags?: string[];\n metadata?: Record<string, unknown>;\n parentTraceId?: string;\n endUserId?: string;\n}\n\ntype RootHandle =\n | { isSpanMode: true; traceId: string; handle: SpanHandle }\n | { isSpanMode: false; traceId: string; handle: TraceHandle };\n\nfunction truncateInput(messages: unknown, maxBytes = 32768): unknown {\n let json = JSON.stringify(messages);\n if (new TextEncoder().encode(json).length <= maxBytes) {\n return messages;\n }\n\n if (!Array.isArray(messages)) return messages;\n const arr = [...messages];\n while (arr.length > 1) {\n arr.shift();\n json = JSON.stringify(arr);\n if (new TextEncoder().encode(json).length <= maxBytes) {\n return arr;\n }\n }\n return arr;\n}\n\nfunction createRootHandle(\n tracer: AuixPrism,\n opts: OpenAIPrismOptions | undefined,\n model: string,\n input: unknown,\n metadata: Record<string, unknown> | undefined,\n): RootHandle {\n const parentTraceId = opts?.parentTraceId;\n\n if (parentTraceId) {\n const handle = tracer.startSpanOnTrace(parentTraceId, {\n name: opts?.name ?? model,\n type: \"llm\",\n input,\n model,\n provider: \"openai\",\n metadata,\n });\n return { isSpanMode: true, traceId: parentTraceId, handle };\n }\n\n const handle = tracer.startTrace({\n name: opts?.name ?? model,\n model,\n provider: \"openai\",\n input,\n metadata,\n tags: opts?.tags,\n endUserId: opts?.endUserId,\n });\n return { isSpanMode: false, traceId: handle.traceId, handle };\n}\n\nfunction createToolSpan(\n tracer: AuixPrism,\n root: RootHandle,\n toolCall: { id: string; name: string; arguments: string },\n): SpanHandle {\n if (root.isSpanMode) {\n return tracer.startSpanOnTrace(root.traceId, {\n name: toolCall.name,\n type: \"tool\",\n input: toolCall.arguments,\n parentSpanId: root.handle.spanId,\n });\n }\n return root.handle.startSpan({\n name: toolCall.name,\n type: \"tool\",\n input: toolCall.arguments,\n });\n}\n\nfunction endRootHandle(\n root: RootHandle,\n result: {\n output: string;\n inputTokens: number;\n outputTokens: number;\n ttftMs?: number;\n },\n): void {\n root.handle.end({\n output: result.output,\n status: \"completed\",\n totalTokens: result.inputTokens + result.outputTokens,\n promptTokens: result.inputTokens,\n completionTokens: result.outputTokens,\n ttftMs: result.ttftMs,\n });\n}\n\nfunction cleanupOnError(root: RootHandle, error: unknown): void {\n root.handle.end({\n status: \"error\",\n error: error instanceof Error ? error.message : String(error),\n });\n}\n\ninterface ToolCallDelta {\n index: number;\n id?: string;\n function?: { name?: string; arguments?: string };\n}\n\ninterface AccumulatedToolCall {\n id: string;\n name: string;\n arguments: string;\n}\n\n/**\n * Wraps an OpenAI client to automatically trace `chat.completions.create` calls.\n *\n * Returns a proxy that behaves identically to the original client, but emits\n * trace events for both streaming and non-streaming chat completions.\n */\nexport function prismOpenAI<T extends object>(\n tracer: AuixPrism,\n client: T,\n opts?: OpenAIPrismOptions,\n): T {\n return new Proxy(client, {\n get(target, prop, receiver) {\n if (prop === \"chat\") {\n const chat = Reflect.get(target, prop, receiver) as object;\n return new Proxy(chat, {\n get(chatTarget, chatProp, chatReceiver) {\n if (chatProp === \"completions\") {\n const completions = Reflect.get(\n chatTarget,\n chatProp,\n chatReceiver,\n );\n return new Proxy(completions, {\n get(compTarget, compProp, compReceiver) {\n if (compProp === \"create\") {\n return createInterceptor(\n tracer,\n Reflect.get(compTarget, compProp, compReceiver).bind(\n compTarget,\n ),\n opts,\n );\n }\n return Reflect.get(compTarget, compProp, compReceiver);\n },\n });\n }\n return Reflect.get(chatTarget, chatProp, chatReceiver);\n },\n });\n }\n return Reflect.get(target, prop, receiver);\n },\n });\n}\n\nfunction createInterceptor(\n tracer: AuixPrism,\n originalCreate: (...args: unknown[]) => Promise<unknown>,\n opts?: OpenAIPrismOptions,\n) {\n return async (params: Record<string, unknown>, ...rest: unknown[]) => {\n const model = (params.model as string) ?? \"unknown\";\n const input = truncateInput(params.messages);\n const metadata = opts?.metadata;\n const isStream = !!params.stream;\n\n const root = createRootHandle(tracer, opts, model, input, metadata);\n\n if (!isStream) {\n return handleNonStreaming(tracer, root, originalCreate, params, rest);\n }\n\n return handleStreaming(tracer, root, originalCreate, params, rest);\n };\n}\n\nasync function handleNonStreaming(\n tracer: AuixPrism,\n root: RootHandle,\n originalCreate: (...args: unknown[]) => Promise<unknown>,\n params: Record<string, unknown>,\n rest: unknown[],\n): Promise<unknown> {\n try {\n const result = (await originalCreate(params, ...rest)) as {\n choices?: Array<{\n message?: {\n content?: string | null;\n tool_calls?: Array<{\n id: string;\n function: { name: string; arguments: string };\n }>;\n };\n }>;\n usage?: {\n prompt_tokens?: number;\n completion_tokens?: number;\n total_tokens?: number;\n };\n };\n\n const message = result.choices?.[0]?.message;\n const output = message?.content ?? \"\";\n const inputTokens = result.usage?.prompt_tokens ?? 0;\n const outputTokens = result.usage?.completion_tokens ?? 0;\n\n if (message?.tool_calls) {\n for (const tc of message.tool_calls) {\n const span = createToolSpan(tracer, root, {\n id: tc.id,\n name: tc.function.name,\n arguments: tc.function.arguments,\n });\n span.end({ status: \"completed\" });\n }\n }\n\n endRootHandle(root, { output, inputTokens, outputTokens });\n return result;\n } catch (error) {\n cleanupOnError(root, error);\n throw error;\n }\n}\n\nasync function handleStreaming(\n tracer: AuixPrism,\n root: RootHandle,\n originalCreate: (...args: unknown[]) => Promise<unknown>,\n params: Record<string, unknown>,\n rest: unknown[],\n): Promise<unknown> {\n // Auto-inject stream_options to get usage data in the final chunk\n const patchedParams = {\n ...params,\n stream_options: {\n ...(params.stream_options as Record<string, unknown> | undefined),\n include_usage: true,\n },\n };\n\n try {\n const streamStart = Date.now();\n const stream = await originalCreate(patchedParams, ...rest);\n\n let output = \"\";\n let inputTokens = 0;\n let outputTokens = 0;\n let ttftMs: number | undefined;\n let firstChunk = true;\n let finalized = false;\n const toolCallAccum = new Map<number, AccumulatedToolCall>();\n\n const processChunk = (chunk: StreamChunk) => {\n const delta = chunk.choices?.[0]?.delta;\n\n if (delta?.content) {\n if (firstChunk) {\n ttftMs = Date.now() - streamStart;\n firstChunk = false;\n }\n output += delta.content;\n }\n\n if (delta?.tool_calls) {\n for (const tc of delta.tool_calls as ToolCallDelta[]) {\n let accum = toolCallAccum.get(tc.index);\n if (!accum) {\n accum = {\n id: tc.id ?? \"\",\n name: tc.function?.name ?? \"\",\n arguments: \"\",\n };\n toolCallAccum.set(tc.index, accum);\n }\n if (tc.id) accum.id = tc.id;\n if (tc.function?.name) accum.name = tc.function.name;\n if (tc.function?.arguments) accum.arguments += tc.function.arguments;\n }\n }\n\n if (chunk.usage) {\n inputTokens = chunk.usage.prompt_tokens ?? 0;\n outputTokens = chunk.usage.completion_tokens ?? 0;\n }\n };\n\n const finalizeStream = () => {\n if (finalized) return;\n finalized = true;\n\n for (const tc of Array.from(toolCallAccum.values())) {\n const span = createToolSpan(tracer, root, tc);\n span.end({ status: \"completed\" });\n }\n\n endRootHandle(root, { output, inputTokens, outputTokens, ttftMs });\n };\n\n const originalIterator = (\n stream as { [Symbol.asyncIterator](): AsyncIterator<unknown> }\n )[Symbol.asyncIterator].bind(stream);\n\n return new Proxy(stream as object, {\n get(target, prop, receiver) {\n if (prop === Symbol.asyncIterator) {\n return () => {\n const iter = originalIterator();\n return {\n async next() {\n const result = await iter.next();\n if (!result.done) {\n processChunk(result.value as StreamChunk);\n }\n if (result.done) {\n finalizeStream();\n }\n return result;\n },\n async return(value?: unknown) {\n finalizeStream();\n return iter.return?.(value) ?? { done: true, value: undefined };\n },\n async throw(err?: unknown) {\n cleanupOnError(root, err);\n return iter.throw?.(err) ?? { done: true, value: undefined };\n },\n };\n };\n }\n if (prop === \"tee\") {\n return undefined;\n }\n return Reflect.get(target, prop, receiver);\n },\n });\n } catch (error) {\n cleanupOnError(root, error);\n throw error;\n }\n}\n\ninterface StreamChunk {\n choices?: Array<{\n delta?: {\n content?: string;\n tool_calls?: ToolCallDelta[];\n };\n }>;\n usage?: {\n prompt_tokens?: number;\n completion_tokens?: number;\n };\n}\n"],"mappings":";;;AAcA,SAAS,cAAc,UAAmB,WAAW,OAAgB;CACnE,IAAI,OAAO,KAAK,UAAU,SAAS;AACnC,KAAI,IAAI,aAAa,CAAC,OAAO,KAAK,CAAC,UAAU,SAC3C,QAAO;AAGT,KAAI,CAAC,MAAM,QAAQ,SAAS,CAAE,QAAO;CACrC,MAAM,MAAM,CAAC,GAAG,SAAS;AACzB,QAAO,IAAI,SAAS,GAAG;AACrB,MAAI,OAAO;AACX,SAAO,KAAK,UAAU,IAAI;AAC1B,MAAI,IAAI,aAAa,CAAC,OAAO,KAAK,CAAC,UAAU,SAC3C,QAAO;;AAGX,QAAO;;AAGT,SAAS,iBACP,QACA,MACA,OACA,OACA,UACY;CACZ,MAAM,gBAAgB,MAAM;AAE5B,KAAI,cASF,QAAO;EAAE,YAAY;EAAM,SAAS;EAAe,QARpC,OAAO,iBAAiB,eAAe;GACpD,MAAM,MAAM,QAAQ;GACpB,MAAM;GACN;GACA;GACA,UAAU;GACV;GACD,CAAC;EACyD;CAG7D,MAAM,SAAS,OAAO,WAAW;EAC/B,MAAM,MAAM,QAAQ;EACpB;EACA,UAAU;EACV;EACA;EACA,MAAM,MAAM;EACZ,WAAW,MAAM;EAClB,CAAC;AACF,QAAO;EAAE,YAAY;EAAO,SAAS,OAAO;EAAS;EAAQ;;AAG/D,SAAS,eACP,QACA,MACA,UACY;AACZ,KAAI,KAAK,WACP,QAAO,OAAO,iBAAiB,KAAK,SAAS;EAC3C,MAAM,SAAS;EACf,MAAM;EACN,OAAO,SAAS;EAChB,cAAc,KAAK,OAAO;EAC3B,CAAC;AAEJ,QAAO,KAAK,OAAO,UAAU;EAC3B,MAAM,SAAS;EACf,MAAM;EACN,OAAO,SAAS;EACjB,CAAC;;AAGJ,SAAS,cACP,MACA,QAMM;AACN,MAAK,OAAO,IAAI;EACd,QAAQ,OAAO;EACf,QAAQ;EACR,aAAa,OAAO,cAAc,OAAO;EACzC,cAAc,OAAO;EACrB,kBAAkB,OAAO;EACzB,QAAQ,OAAO;EAChB,CAAC;;AAGJ,SAAS,eAAe,MAAkB,OAAsB;AAC9D,MAAK,OAAO,IAAI;EACd,QAAQ;EACR,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;EAC9D,CAAC;;;;;;;;AAqBJ,SAAgB,YACd,QACA,QACA,MACG;AACH,QAAO,IAAI,MAAM,QAAQ,EACvB,IAAI,QAAQ,MAAM,UAAU;AAC1B,MAAI,SAAS,QAAQ;GACnB,MAAM,OAAO,QAAQ,IAAI,QAAQ,MAAM,SAAS;AAChD,UAAO,IAAI,MAAM,MAAM,EACrB,IAAI,YAAY,UAAU,cAAc;AACtC,QAAI,aAAa,eAAe;KAC9B,MAAM,cAAc,QAAQ,IAC1B,YACA,UACA,aACD;AACD,YAAO,IAAI,MAAM,aAAa,EAC5B,IAAI,YAAY,UAAU,cAAc;AACtC,UAAI,aAAa,SACf,QAAO,kBACL,QACA,QAAQ,IAAI,YAAY,UAAU,aAAa,CAAC,KAC9C,WACD,EACD,KACD;AAEH,aAAO,QAAQ,IAAI,YAAY,UAAU,aAAa;QAEzD,CAAC;;AAEJ,WAAO,QAAQ,IAAI,YAAY,UAAU,aAAa;MAEzD,CAAC;;AAEJ,SAAO,QAAQ,IAAI,QAAQ,MAAM,SAAS;IAE7C,CAAC;;AAGJ,SAAS,kBACP,QACA,gBACA,MACA;AACA,QAAO,OAAO,QAAiC,GAAG,SAAoB;EACpE,MAAM,QAAS,OAAO,SAAoB;EAC1C,MAAM,QAAQ,cAAc,OAAO,SAAS;EAC5C,MAAM,WAAW,MAAM;EACvB,MAAM,WAAW,CAAC,CAAC,OAAO;EAE1B,MAAM,OAAO,iBAAiB,QAAQ,MAAM,OAAO,OAAO,SAAS;AAEnE,MAAI,CAAC,SACH,QAAO,mBAAmB,QAAQ,MAAM,gBAAgB,QAAQ,KAAK;AAGvE,SAAO,gBAAgB,QAAQ,MAAM,gBAAgB,QAAQ,KAAK;;;AAItE,eAAe,mBACb,QACA,MACA,gBACA,QACA,MACkB;AAClB,KAAI;EACF,MAAM,SAAU,MAAM,eAAe,QAAQ,GAAG,KAAK;EAiBrD,MAAM,UAAU,OAAO,UAAU,IAAI;EACrC,MAAM,SAAS,SAAS,WAAW;EACnC,MAAM,cAAc,OAAO,OAAO,iBAAiB;EACnD,MAAM,eAAe,OAAO,OAAO,qBAAqB;AAExD,MAAI,SAAS,WACX,MAAK,MAAM,MAAM,QAAQ,WAMvB,CALa,eAAe,QAAQ,MAAM;GACxC,IAAI,GAAG;GACP,MAAM,GAAG,SAAS;GAClB,WAAW,GAAG,SAAS;GACxB,CAAC,CACG,IAAI,EAAE,QAAQ,aAAa,CAAC;AAIrC,gBAAc,MAAM;GAAE;GAAQ;GAAa;GAAc,CAAC;AAC1D,SAAO;UACA,OAAO;AACd,iBAAe,MAAM,MAAM;AAC3B,QAAM;;;AAIV,eAAe,gBACb,QACA,MACA,gBACA,QACA,MACkB;CAElB,MAAM,gBAAgB;EACpB,GAAG;EACH,gBAAgB;GACd,GAAI,OAAO;GACX,eAAe;GAChB;EACF;AAED,KAAI;EACF,MAAM,cAAc,KAAK,KAAK;EAC9B,MAAM,SAAS,MAAM,eAAe,eAAe,GAAG,KAAK;EAE3D,IAAI,SAAS;EACb,IAAI,cAAc;EAClB,IAAI,eAAe;EACnB,IAAI;EACJ,IAAI,aAAa;EACjB,IAAI,YAAY;EAChB,MAAM,gCAAgB,IAAI,KAAkC;EAE5D,MAAM,gBAAgB,UAAuB;GAC3C,MAAM,QAAQ,MAAM,UAAU,IAAI;AAElC,OAAI,OAAO,SAAS;AAClB,QAAI,YAAY;AACd,cAAS,KAAK,KAAK,GAAG;AACtB,kBAAa;;AAEf,cAAU,MAAM;;AAGlB,OAAI,OAAO,WACT,MAAK,MAAM,MAAM,MAAM,YAA+B;IACpD,IAAI,QAAQ,cAAc,IAAI,GAAG,MAAM;AACvC,QAAI,CAAC,OAAO;AACV,aAAQ;MACN,IAAI,GAAG,MAAM;MACb,MAAM,GAAG,UAAU,QAAQ;MAC3B,WAAW;MACZ;AACD,mBAAc,IAAI,GAAG,OAAO,MAAM;;AAEpC,QAAI,GAAG,GAAI,OAAM,KAAK,GAAG;AACzB,QAAI,GAAG,UAAU,KAAM,OAAM,OAAO,GAAG,SAAS;AAChD,QAAI,GAAG,UAAU,UAAW,OAAM,aAAa,GAAG,SAAS;;AAI/D,OAAI,MAAM,OAAO;AACf,kBAAc,MAAM,MAAM,iBAAiB;AAC3C,mBAAe,MAAM,MAAM,qBAAqB;;;EAIpD,MAAM,uBAAuB;AAC3B,OAAI,UAAW;AACf,eAAY;AAEZ,QAAK,MAAM,MAAM,MAAM,KAAK,cAAc,QAAQ,CAAC,CAEjD,CADa,eAAe,QAAQ,MAAM,GAAG,CACxC,IAAI,EAAE,QAAQ,aAAa,CAAC;AAGnC,iBAAc,MAAM;IAAE;IAAQ;IAAa;IAAc;IAAQ,CAAC;;EAGpE,MAAM,mBACJ,OACA,OAAO,eAAe,KAAK,OAAO;AAEpC,SAAO,IAAI,MAAM,QAAkB,EACjC,IAAI,QAAQ,MAAM,UAAU;AAC1B,OAAI,SAAS,OAAO,cAClB,cAAa;IACX,MAAM,OAAO,kBAAkB;AAC/B,WAAO;KACL,MAAM,OAAO;MACX,MAAM,SAAS,MAAM,KAAK,MAAM;AAChC,UAAI,CAAC,OAAO,KACV,cAAa,OAAO,MAAqB;AAE3C,UAAI,OAAO,KACT,iBAAgB;AAElB,aAAO;;KAET,MAAM,OAAO,OAAiB;AAC5B,sBAAgB;AAChB,aAAO,KAAK,SAAS,MAAM,IAAI;OAAE,MAAM;OAAM,OAAO;OAAW;;KAEjE,MAAM,MAAM,KAAe;AACzB,qBAAe,MAAM,IAAI;AACzB,aAAO,KAAK,QAAQ,IAAI,IAAI;OAAE,MAAM;OAAM,OAAO;OAAW;;KAE/D;;AAGL,OAAI,SAAS,MACX;AAEF,UAAO,QAAQ,IAAI,QAAQ,MAAM,SAAS;KAE7C,CAAC;UACK,OAAO;AACd,iBAAe,MAAM,MAAM;AAC3B,QAAM"}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import { t as AuixPrism } from "../client-BSsSpkZY.cjs";
|
|
2
|
+
|
|
3
|
+
//#region src/integrations/openai.d.ts
|
|
4
|
+
interface OpenAIPrismOptions {
|
|
5
|
+
name?: string;
|
|
6
|
+
tags?: string[];
|
|
7
|
+
metadata?: Record<string, unknown>;
|
|
8
|
+
parentTraceId?: string;
|
|
9
|
+
endUserId?: string;
|
|
10
|
+
}
|
|
11
|
+
/**
|
|
12
|
+
* Wraps an OpenAI client to automatically trace `chat.completions.create` calls.
|
|
13
|
+
*
|
|
14
|
+
* Returns a proxy that behaves identically to the original client, but emits
|
|
15
|
+
* trace events for both streaming and non-streaming chat completions.
|
|
16
|
+
*/
|
|
17
|
+
declare function prismOpenAI<T extends object>(tracer: AuixPrism, client: T, opts?: OpenAIPrismOptions): T;
|
|
18
|
+
//#endregion
|
|
19
|
+
export { OpenAIPrismOptions, prismOpenAI };
|
|
20
|
+
//# sourceMappingURL=openai.d.cts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"openai.d.cts","names":[],"sources":["../../src/integrations/openai.ts"],"mappings":";;;UAEiB,kBAAA;EACf,IAAA;EACA,IAAA;EACA,QAAA,GAAW,MAAA;EACX,aAAA;EACA,SAAA;AAAA;;;;;;;iBA0Hc,WAAA,kBAAA,CACd,MAAA,EAAQ,SAAA,EACR,MAAA,EAAQ,CAAA,EACR,IAAA,GAAO,kBAAA,GACN,CAAA"}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import { t as AuixPrism } from "../client-BrZstMQX.js";
|
|
2
|
+
|
|
3
|
+
//#region src/integrations/openai.d.ts
|
|
4
|
+
interface OpenAIPrismOptions {
|
|
5
|
+
name?: string;
|
|
6
|
+
tags?: string[];
|
|
7
|
+
metadata?: Record<string, unknown>;
|
|
8
|
+
parentTraceId?: string;
|
|
9
|
+
endUserId?: string;
|
|
10
|
+
}
|
|
11
|
+
/**
|
|
12
|
+
* Wraps an OpenAI client to automatically trace `chat.completions.create` calls.
|
|
13
|
+
*
|
|
14
|
+
* Returns a proxy that behaves identically to the original client, but emits
|
|
15
|
+
* trace events for both streaming and non-streaming chat completions.
|
|
16
|
+
*/
|
|
17
|
+
declare function prismOpenAI<T extends object>(tracer: AuixPrism, client: T, opts?: OpenAIPrismOptions): T;
|
|
18
|
+
//#endregion
|
|
19
|
+
export { OpenAIPrismOptions, prismOpenAI };
|
|
20
|
+
//# sourceMappingURL=openai.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"openai.d.ts","names":[],"sources":["../../src/integrations/openai.ts"],"mappings":";;;UAEiB,kBAAA;EACf,IAAA;EACA,IAAA;EACA,QAAA,GAAW,MAAA;EACX,aAAA;EACA,SAAA;AAAA;;;;;;;iBA0Hc,WAAA,kBAAA,CACd,MAAA,EAAQ,SAAA,EACR,MAAA,EAAQ,CAAA,EACR,IAAA,GAAO,kBAAA,GACN,CAAA"}
|
|
@@ -0,0 +1,225 @@
|
|
|
1
|
+
//#region src/integrations/openai.ts
|
|
2
|
+
function truncateInput(messages, maxBytes = 32768) {
|
|
3
|
+
let json = JSON.stringify(messages);
|
|
4
|
+
if (new TextEncoder().encode(json).length <= maxBytes) return messages;
|
|
5
|
+
if (!Array.isArray(messages)) return messages;
|
|
6
|
+
const arr = [...messages];
|
|
7
|
+
while (arr.length > 1) {
|
|
8
|
+
arr.shift();
|
|
9
|
+
json = JSON.stringify(arr);
|
|
10
|
+
if (new TextEncoder().encode(json).length <= maxBytes) return arr;
|
|
11
|
+
}
|
|
12
|
+
return arr;
|
|
13
|
+
}
|
|
14
|
+
function createRootHandle(tracer, opts, model, input, metadata) {
|
|
15
|
+
const parentTraceId = opts?.parentTraceId;
|
|
16
|
+
if (parentTraceId) return {
|
|
17
|
+
isSpanMode: true,
|
|
18
|
+
traceId: parentTraceId,
|
|
19
|
+
handle: tracer.startSpanOnTrace(parentTraceId, {
|
|
20
|
+
name: opts?.name ?? model,
|
|
21
|
+
type: "llm",
|
|
22
|
+
input,
|
|
23
|
+
model,
|
|
24
|
+
provider: "openai",
|
|
25
|
+
metadata
|
|
26
|
+
})
|
|
27
|
+
};
|
|
28
|
+
const handle = tracer.startTrace({
|
|
29
|
+
name: opts?.name ?? model,
|
|
30
|
+
model,
|
|
31
|
+
provider: "openai",
|
|
32
|
+
input,
|
|
33
|
+
metadata,
|
|
34
|
+
tags: opts?.tags,
|
|
35
|
+
endUserId: opts?.endUserId
|
|
36
|
+
});
|
|
37
|
+
return {
|
|
38
|
+
isSpanMode: false,
|
|
39
|
+
traceId: handle.traceId,
|
|
40
|
+
handle
|
|
41
|
+
};
|
|
42
|
+
}
|
|
43
|
+
function createToolSpan(tracer, root, toolCall) {
|
|
44
|
+
if (root.isSpanMode) return tracer.startSpanOnTrace(root.traceId, {
|
|
45
|
+
name: toolCall.name,
|
|
46
|
+
type: "tool",
|
|
47
|
+
input: toolCall.arguments,
|
|
48
|
+
parentSpanId: root.handle.spanId
|
|
49
|
+
});
|
|
50
|
+
return root.handle.startSpan({
|
|
51
|
+
name: toolCall.name,
|
|
52
|
+
type: "tool",
|
|
53
|
+
input: toolCall.arguments
|
|
54
|
+
});
|
|
55
|
+
}
|
|
56
|
+
function endRootHandle(root, result) {
|
|
57
|
+
root.handle.end({
|
|
58
|
+
output: result.output,
|
|
59
|
+
status: "completed",
|
|
60
|
+
totalTokens: result.inputTokens + result.outputTokens,
|
|
61
|
+
promptTokens: result.inputTokens,
|
|
62
|
+
completionTokens: result.outputTokens,
|
|
63
|
+
ttftMs: result.ttftMs
|
|
64
|
+
});
|
|
65
|
+
}
|
|
66
|
+
function cleanupOnError(root, error) {
|
|
67
|
+
root.handle.end({
|
|
68
|
+
status: "error",
|
|
69
|
+
error: error instanceof Error ? error.message : String(error)
|
|
70
|
+
});
|
|
71
|
+
}
|
|
72
|
+
/**
|
|
73
|
+
* Wraps an OpenAI client to automatically trace `chat.completions.create` calls.
|
|
74
|
+
*
|
|
75
|
+
* Returns a proxy that behaves identically to the original client, but emits
|
|
76
|
+
* trace events for both streaming and non-streaming chat completions.
|
|
77
|
+
*/
|
|
78
|
+
function prismOpenAI(tracer, client, opts) {
|
|
79
|
+
return new Proxy(client, { get(target, prop, receiver) {
|
|
80
|
+
if (prop === "chat") {
|
|
81
|
+
const chat = Reflect.get(target, prop, receiver);
|
|
82
|
+
return new Proxy(chat, { get(chatTarget, chatProp, chatReceiver) {
|
|
83
|
+
if (chatProp === "completions") {
|
|
84
|
+
const completions = Reflect.get(chatTarget, chatProp, chatReceiver);
|
|
85
|
+
return new Proxy(completions, { get(compTarget, compProp, compReceiver) {
|
|
86
|
+
if (compProp === "create") return createInterceptor(tracer, Reflect.get(compTarget, compProp, compReceiver).bind(compTarget), opts);
|
|
87
|
+
return Reflect.get(compTarget, compProp, compReceiver);
|
|
88
|
+
} });
|
|
89
|
+
}
|
|
90
|
+
return Reflect.get(chatTarget, chatProp, chatReceiver);
|
|
91
|
+
} });
|
|
92
|
+
}
|
|
93
|
+
return Reflect.get(target, prop, receiver);
|
|
94
|
+
} });
|
|
95
|
+
}
|
|
96
|
+
function createInterceptor(tracer, originalCreate, opts) {
|
|
97
|
+
return async (params, ...rest) => {
|
|
98
|
+
const model = params.model ?? "unknown";
|
|
99
|
+
const input = truncateInput(params.messages);
|
|
100
|
+
const metadata = opts?.metadata;
|
|
101
|
+
const isStream = !!params.stream;
|
|
102
|
+
const root = createRootHandle(tracer, opts, model, input, metadata);
|
|
103
|
+
if (!isStream) return handleNonStreaming(tracer, root, originalCreate, params, rest);
|
|
104
|
+
return handleStreaming(tracer, root, originalCreate, params, rest);
|
|
105
|
+
};
|
|
106
|
+
}
|
|
107
|
+
async function handleNonStreaming(tracer, root, originalCreate, params, rest) {
|
|
108
|
+
try {
|
|
109
|
+
const result = await originalCreate(params, ...rest);
|
|
110
|
+
const message = result.choices?.[0]?.message;
|
|
111
|
+
const output = message?.content ?? "";
|
|
112
|
+
const inputTokens = result.usage?.prompt_tokens ?? 0;
|
|
113
|
+
const outputTokens = result.usage?.completion_tokens ?? 0;
|
|
114
|
+
if (message?.tool_calls) for (const tc of message.tool_calls) createToolSpan(tracer, root, {
|
|
115
|
+
id: tc.id,
|
|
116
|
+
name: tc.function.name,
|
|
117
|
+
arguments: tc.function.arguments
|
|
118
|
+
}).end({ status: "completed" });
|
|
119
|
+
endRootHandle(root, {
|
|
120
|
+
output,
|
|
121
|
+
inputTokens,
|
|
122
|
+
outputTokens
|
|
123
|
+
});
|
|
124
|
+
return result;
|
|
125
|
+
} catch (error) {
|
|
126
|
+
cleanupOnError(root, error);
|
|
127
|
+
throw error;
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
async function handleStreaming(tracer, root, originalCreate, params, rest) {
|
|
131
|
+
const patchedParams = {
|
|
132
|
+
...params,
|
|
133
|
+
stream_options: {
|
|
134
|
+
...params.stream_options,
|
|
135
|
+
include_usage: true
|
|
136
|
+
}
|
|
137
|
+
};
|
|
138
|
+
try {
|
|
139
|
+
const streamStart = Date.now();
|
|
140
|
+
const stream = await originalCreate(patchedParams, ...rest);
|
|
141
|
+
let output = "";
|
|
142
|
+
let inputTokens = 0;
|
|
143
|
+
let outputTokens = 0;
|
|
144
|
+
let ttftMs;
|
|
145
|
+
let firstChunk = true;
|
|
146
|
+
let finalized = false;
|
|
147
|
+
const toolCallAccum = /* @__PURE__ */ new Map();
|
|
148
|
+
const processChunk = (chunk) => {
|
|
149
|
+
const delta = chunk.choices?.[0]?.delta;
|
|
150
|
+
if (delta?.content) {
|
|
151
|
+
if (firstChunk) {
|
|
152
|
+
ttftMs = Date.now() - streamStart;
|
|
153
|
+
firstChunk = false;
|
|
154
|
+
}
|
|
155
|
+
output += delta.content;
|
|
156
|
+
}
|
|
157
|
+
if (delta?.tool_calls) for (const tc of delta.tool_calls) {
|
|
158
|
+
let accum = toolCallAccum.get(tc.index);
|
|
159
|
+
if (!accum) {
|
|
160
|
+
accum = {
|
|
161
|
+
id: tc.id ?? "",
|
|
162
|
+
name: tc.function?.name ?? "",
|
|
163
|
+
arguments: ""
|
|
164
|
+
};
|
|
165
|
+
toolCallAccum.set(tc.index, accum);
|
|
166
|
+
}
|
|
167
|
+
if (tc.id) accum.id = tc.id;
|
|
168
|
+
if (tc.function?.name) accum.name = tc.function.name;
|
|
169
|
+
if (tc.function?.arguments) accum.arguments += tc.function.arguments;
|
|
170
|
+
}
|
|
171
|
+
if (chunk.usage) {
|
|
172
|
+
inputTokens = chunk.usage.prompt_tokens ?? 0;
|
|
173
|
+
outputTokens = chunk.usage.completion_tokens ?? 0;
|
|
174
|
+
}
|
|
175
|
+
};
|
|
176
|
+
const finalizeStream = () => {
|
|
177
|
+
if (finalized) return;
|
|
178
|
+
finalized = true;
|
|
179
|
+
for (const tc of Array.from(toolCallAccum.values())) createToolSpan(tracer, root, tc).end({ status: "completed" });
|
|
180
|
+
endRootHandle(root, {
|
|
181
|
+
output,
|
|
182
|
+
inputTokens,
|
|
183
|
+
outputTokens,
|
|
184
|
+
ttftMs
|
|
185
|
+
});
|
|
186
|
+
};
|
|
187
|
+
const originalIterator = stream[Symbol.asyncIterator].bind(stream);
|
|
188
|
+
return new Proxy(stream, { get(target, prop, receiver) {
|
|
189
|
+
if (prop === Symbol.asyncIterator) return () => {
|
|
190
|
+
const iter = originalIterator();
|
|
191
|
+
return {
|
|
192
|
+
async next() {
|
|
193
|
+
const result = await iter.next();
|
|
194
|
+
if (!result.done) processChunk(result.value);
|
|
195
|
+
if (result.done) finalizeStream();
|
|
196
|
+
return result;
|
|
197
|
+
},
|
|
198
|
+
async return(value) {
|
|
199
|
+
finalizeStream();
|
|
200
|
+
return iter.return?.(value) ?? {
|
|
201
|
+
done: true,
|
|
202
|
+
value: void 0
|
|
203
|
+
};
|
|
204
|
+
},
|
|
205
|
+
async throw(err) {
|
|
206
|
+
cleanupOnError(root, err);
|
|
207
|
+
return iter.throw?.(err) ?? {
|
|
208
|
+
done: true,
|
|
209
|
+
value: void 0
|
|
210
|
+
};
|
|
211
|
+
}
|
|
212
|
+
};
|
|
213
|
+
};
|
|
214
|
+
if (prop === "tee") return;
|
|
215
|
+
return Reflect.get(target, prop, receiver);
|
|
216
|
+
} });
|
|
217
|
+
} catch (error) {
|
|
218
|
+
cleanupOnError(root, error);
|
|
219
|
+
throw error;
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
//#endregion
|
|
224
|
+
export { prismOpenAI };
|
|
225
|
+
//# sourceMappingURL=openai.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"openai.js","names":[],"sources":["../../src/integrations/openai.ts"],"sourcesContent":["import type { AuixPrism, SpanHandle, TraceHandle } from \"../client\";\n\nexport interface OpenAIPrismOptions {\n name?: string;\n tags?: string[];\n metadata?: Record<string, unknown>;\n parentTraceId?: string;\n endUserId?: string;\n}\n\ntype RootHandle =\n | { isSpanMode: true; traceId: string; handle: SpanHandle }\n | { isSpanMode: false; traceId: string; handle: TraceHandle };\n\nfunction truncateInput(messages: unknown, maxBytes = 32768): unknown {\n let json = JSON.stringify(messages);\n if (new TextEncoder().encode(json).length <= maxBytes) {\n return messages;\n }\n\n if (!Array.isArray(messages)) return messages;\n const arr = [...messages];\n while (arr.length > 1) {\n arr.shift();\n json = JSON.stringify(arr);\n if (new TextEncoder().encode(json).length <= maxBytes) {\n return arr;\n }\n }\n return arr;\n}\n\nfunction createRootHandle(\n tracer: AuixPrism,\n opts: OpenAIPrismOptions | undefined,\n model: string,\n input: unknown,\n metadata: Record<string, unknown> | undefined,\n): RootHandle {\n const parentTraceId = opts?.parentTraceId;\n\n if (parentTraceId) {\n const handle = tracer.startSpanOnTrace(parentTraceId, {\n name: opts?.name ?? model,\n type: \"llm\",\n input,\n model,\n provider: \"openai\",\n metadata,\n });\n return { isSpanMode: true, traceId: parentTraceId, handle };\n }\n\n const handle = tracer.startTrace({\n name: opts?.name ?? model,\n model,\n provider: \"openai\",\n input,\n metadata,\n tags: opts?.tags,\n endUserId: opts?.endUserId,\n });\n return { isSpanMode: false, traceId: handle.traceId, handle };\n}\n\nfunction createToolSpan(\n tracer: AuixPrism,\n root: RootHandle,\n toolCall: { id: string; name: string; arguments: string },\n): SpanHandle {\n if (root.isSpanMode) {\n return tracer.startSpanOnTrace(root.traceId, {\n name: toolCall.name,\n type: \"tool\",\n input: toolCall.arguments,\n parentSpanId: root.handle.spanId,\n });\n }\n return root.handle.startSpan({\n name: toolCall.name,\n type: \"tool\",\n input: toolCall.arguments,\n });\n}\n\nfunction endRootHandle(\n root: RootHandle,\n result: {\n output: string;\n inputTokens: number;\n outputTokens: number;\n ttftMs?: number;\n },\n): void {\n root.handle.end({\n output: result.output,\n status: \"completed\",\n totalTokens: result.inputTokens + result.outputTokens,\n promptTokens: result.inputTokens,\n completionTokens: result.outputTokens,\n ttftMs: result.ttftMs,\n });\n}\n\nfunction cleanupOnError(root: RootHandle, error: unknown): void {\n root.handle.end({\n status: \"error\",\n error: error instanceof Error ? error.message : String(error),\n });\n}\n\ninterface ToolCallDelta {\n index: number;\n id?: string;\n function?: { name?: string; arguments?: string };\n}\n\ninterface AccumulatedToolCall {\n id: string;\n name: string;\n arguments: string;\n}\n\n/**\n * Wraps an OpenAI client to automatically trace `chat.completions.create` calls.\n *\n * Returns a proxy that behaves identically to the original client, but emits\n * trace events for both streaming and non-streaming chat completions.\n */\nexport function prismOpenAI<T extends object>(\n tracer: AuixPrism,\n client: T,\n opts?: OpenAIPrismOptions,\n): T {\n return new Proxy(client, {\n get(target, prop, receiver) {\n if (prop === \"chat\") {\n const chat = Reflect.get(target, prop, receiver) as object;\n return new Proxy(chat, {\n get(chatTarget, chatProp, chatReceiver) {\n if (chatProp === \"completions\") {\n const completions = Reflect.get(\n chatTarget,\n chatProp,\n chatReceiver,\n );\n return new Proxy(completions, {\n get(compTarget, compProp, compReceiver) {\n if (compProp === \"create\") {\n return createInterceptor(\n tracer,\n Reflect.get(compTarget, compProp, compReceiver).bind(\n compTarget,\n ),\n opts,\n );\n }\n return Reflect.get(compTarget, compProp, compReceiver);\n },\n });\n }\n return Reflect.get(chatTarget, chatProp, chatReceiver);\n },\n });\n }\n return Reflect.get(target, prop, receiver);\n },\n });\n}\n\nfunction createInterceptor(\n tracer: AuixPrism,\n originalCreate: (...args: unknown[]) => Promise<unknown>,\n opts?: OpenAIPrismOptions,\n) {\n return async (params: Record<string, unknown>, ...rest: unknown[]) => {\n const model = (params.model as string) ?? \"unknown\";\n const input = truncateInput(params.messages);\n const metadata = opts?.metadata;\n const isStream = !!params.stream;\n\n const root = createRootHandle(tracer, opts, model, input, metadata);\n\n if (!isStream) {\n return handleNonStreaming(tracer, root, originalCreate, params, rest);\n }\n\n return handleStreaming(tracer, root, originalCreate, params, rest);\n };\n}\n\nasync function handleNonStreaming(\n tracer: AuixPrism,\n root: RootHandle,\n originalCreate: (...args: unknown[]) => Promise<unknown>,\n params: Record<string, unknown>,\n rest: unknown[],\n): Promise<unknown> {\n try {\n const result = (await originalCreate(params, ...rest)) as {\n choices?: Array<{\n message?: {\n content?: string | null;\n tool_calls?: Array<{\n id: string;\n function: { name: string; arguments: string };\n }>;\n };\n }>;\n usage?: {\n prompt_tokens?: number;\n completion_tokens?: number;\n total_tokens?: number;\n };\n };\n\n const message = result.choices?.[0]?.message;\n const output = message?.content ?? \"\";\n const inputTokens = result.usage?.prompt_tokens ?? 0;\n const outputTokens = result.usage?.completion_tokens ?? 0;\n\n if (message?.tool_calls) {\n for (const tc of message.tool_calls) {\n const span = createToolSpan(tracer, root, {\n id: tc.id,\n name: tc.function.name,\n arguments: tc.function.arguments,\n });\n span.end({ status: \"completed\" });\n }\n }\n\n endRootHandle(root, { output, inputTokens, outputTokens });\n return result;\n } catch (error) {\n cleanupOnError(root, error);\n throw error;\n }\n}\n\nasync function handleStreaming(\n tracer: AuixPrism,\n root: RootHandle,\n originalCreate: (...args: unknown[]) => Promise<unknown>,\n params: Record<string, unknown>,\n rest: unknown[],\n): Promise<unknown> {\n // Auto-inject stream_options to get usage data in the final chunk\n const patchedParams = {\n ...params,\n stream_options: {\n ...(params.stream_options as Record<string, unknown> | undefined),\n include_usage: true,\n },\n };\n\n try {\n const streamStart = Date.now();\n const stream = await originalCreate(patchedParams, ...rest);\n\n let output = \"\";\n let inputTokens = 0;\n let outputTokens = 0;\n let ttftMs: number | undefined;\n let firstChunk = true;\n let finalized = false;\n const toolCallAccum = new Map<number, AccumulatedToolCall>();\n\n const processChunk = (chunk: StreamChunk) => {\n const delta = chunk.choices?.[0]?.delta;\n\n if (delta?.content) {\n if (firstChunk) {\n ttftMs = Date.now() - streamStart;\n firstChunk = false;\n }\n output += delta.content;\n }\n\n if (delta?.tool_calls) {\n for (const tc of delta.tool_calls as ToolCallDelta[]) {\n let accum = toolCallAccum.get(tc.index);\n if (!accum) {\n accum = {\n id: tc.id ?? \"\",\n name: tc.function?.name ?? \"\",\n arguments: \"\",\n };\n toolCallAccum.set(tc.index, accum);\n }\n if (tc.id) accum.id = tc.id;\n if (tc.function?.name) accum.name = tc.function.name;\n if (tc.function?.arguments) accum.arguments += tc.function.arguments;\n }\n }\n\n if (chunk.usage) {\n inputTokens = chunk.usage.prompt_tokens ?? 0;\n outputTokens = chunk.usage.completion_tokens ?? 0;\n }\n };\n\n const finalizeStream = () => {\n if (finalized) return;\n finalized = true;\n\n for (const tc of Array.from(toolCallAccum.values())) {\n const span = createToolSpan(tracer, root, tc);\n span.end({ status: \"completed\" });\n }\n\n endRootHandle(root, { output, inputTokens, outputTokens, ttftMs });\n };\n\n const originalIterator = (\n stream as { [Symbol.asyncIterator](): AsyncIterator<unknown> }\n )[Symbol.asyncIterator].bind(stream);\n\n return new Proxy(stream as object, {\n get(target, prop, receiver) {\n if (prop === Symbol.asyncIterator) {\n return () => {\n const iter = originalIterator();\n return {\n async next() {\n const result = await iter.next();\n if (!result.done) {\n processChunk(result.value as StreamChunk);\n }\n if (result.done) {\n finalizeStream();\n }\n return result;\n },\n async return(value?: unknown) {\n finalizeStream();\n return iter.return?.(value) ?? { done: true, value: undefined };\n },\n async throw(err?: unknown) {\n cleanupOnError(root, err);\n return iter.throw?.(err) ?? { done: true, value: undefined };\n },\n };\n };\n }\n if (prop === \"tee\") {\n return undefined;\n }\n return Reflect.get(target, prop, receiver);\n },\n });\n } catch (error) {\n cleanupOnError(root, error);\n throw error;\n }\n}\n\ninterface StreamChunk {\n choices?: Array<{\n delta?: {\n content?: string;\n tool_calls?: ToolCallDelta[];\n };\n }>;\n usage?: {\n prompt_tokens?: number;\n completion_tokens?: number;\n };\n}\n"],"mappings":";AAcA,SAAS,cAAc,UAAmB,WAAW,OAAgB;CACnE,IAAI,OAAO,KAAK,UAAU,SAAS;AACnC,KAAI,IAAI,aAAa,CAAC,OAAO,KAAK,CAAC,UAAU,SAC3C,QAAO;AAGT,KAAI,CAAC,MAAM,QAAQ,SAAS,CAAE,QAAO;CACrC,MAAM,MAAM,CAAC,GAAG,SAAS;AACzB,QAAO,IAAI,SAAS,GAAG;AACrB,MAAI,OAAO;AACX,SAAO,KAAK,UAAU,IAAI;AAC1B,MAAI,IAAI,aAAa,CAAC,OAAO,KAAK,CAAC,UAAU,SAC3C,QAAO;;AAGX,QAAO;;AAGT,SAAS,iBACP,QACA,MACA,OACA,OACA,UACY;CACZ,MAAM,gBAAgB,MAAM;AAE5B,KAAI,cASF,QAAO;EAAE,YAAY;EAAM,SAAS;EAAe,QARpC,OAAO,iBAAiB,eAAe;GACpD,MAAM,MAAM,QAAQ;GACpB,MAAM;GACN;GACA;GACA,UAAU;GACV;GACD,CAAC;EACyD;CAG7D,MAAM,SAAS,OAAO,WAAW;EAC/B,MAAM,MAAM,QAAQ;EACpB;EACA,UAAU;EACV;EACA;EACA,MAAM,MAAM;EACZ,WAAW,MAAM;EAClB,CAAC;AACF,QAAO;EAAE,YAAY;EAAO,SAAS,OAAO;EAAS;EAAQ;;AAG/D,SAAS,eACP,QACA,MACA,UACY;AACZ,KAAI,KAAK,WACP,QAAO,OAAO,iBAAiB,KAAK,SAAS;EAC3C,MAAM,SAAS;EACf,MAAM;EACN,OAAO,SAAS;EAChB,cAAc,KAAK,OAAO;EAC3B,CAAC;AAEJ,QAAO,KAAK,OAAO,UAAU;EAC3B,MAAM,SAAS;EACf,MAAM;EACN,OAAO,SAAS;EACjB,CAAC;;AAGJ,SAAS,cACP,MACA,QAMM;AACN,MAAK,OAAO,IAAI;EACd,QAAQ,OAAO;EACf,QAAQ;EACR,aAAa,OAAO,cAAc,OAAO;EACzC,cAAc,OAAO;EACrB,kBAAkB,OAAO;EACzB,QAAQ,OAAO;EAChB,CAAC;;AAGJ,SAAS,eAAe,MAAkB,OAAsB;AAC9D,MAAK,OAAO,IAAI;EACd,QAAQ;EACR,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;EAC9D,CAAC;;;;;;;;AAqBJ,SAAgB,YACd,QACA,QACA,MACG;AACH,QAAO,IAAI,MAAM,QAAQ,EACvB,IAAI,QAAQ,MAAM,UAAU;AAC1B,MAAI,SAAS,QAAQ;GACnB,MAAM,OAAO,QAAQ,IAAI,QAAQ,MAAM,SAAS;AAChD,UAAO,IAAI,MAAM,MAAM,EACrB,IAAI,YAAY,UAAU,cAAc;AACtC,QAAI,aAAa,eAAe;KAC9B,MAAM,cAAc,QAAQ,IAC1B,YACA,UACA,aACD;AACD,YAAO,IAAI,MAAM,aAAa,EAC5B,IAAI,YAAY,UAAU,cAAc;AACtC,UAAI,aAAa,SACf,QAAO,kBACL,QACA,QAAQ,IAAI,YAAY,UAAU,aAAa,CAAC,KAC9C,WACD,EACD,KACD;AAEH,aAAO,QAAQ,IAAI,YAAY,UAAU,aAAa;QAEzD,CAAC;;AAEJ,WAAO,QAAQ,IAAI,YAAY,UAAU,aAAa;MAEzD,CAAC;;AAEJ,SAAO,QAAQ,IAAI,QAAQ,MAAM,SAAS;IAE7C,CAAC;;AAGJ,SAAS,kBACP,QACA,gBACA,MACA;AACA,QAAO,OAAO,QAAiC,GAAG,SAAoB;EACpE,MAAM,QAAS,OAAO,SAAoB;EAC1C,MAAM,QAAQ,cAAc,OAAO,SAAS;EAC5C,MAAM,WAAW,MAAM;EACvB,MAAM,WAAW,CAAC,CAAC,OAAO;EAE1B,MAAM,OAAO,iBAAiB,QAAQ,MAAM,OAAO,OAAO,SAAS;AAEnE,MAAI,CAAC,SACH,QAAO,mBAAmB,QAAQ,MAAM,gBAAgB,QAAQ,KAAK;AAGvE,SAAO,gBAAgB,QAAQ,MAAM,gBAAgB,QAAQ,KAAK;;;AAItE,eAAe,mBACb,QACA,MACA,gBACA,QACA,MACkB;AAClB,KAAI;EACF,MAAM,SAAU,MAAM,eAAe,QAAQ,GAAG,KAAK;EAiBrD,MAAM,UAAU,OAAO,UAAU,IAAI;EACrC,MAAM,SAAS,SAAS,WAAW;EACnC,MAAM,cAAc,OAAO,OAAO,iBAAiB;EACnD,MAAM,eAAe,OAAO,OAAO,qBAAqB;AAExD,MAAI,SAAS,WACX,MAAK,MAAM,MAAM,QAAQ,WAMvB,CALa,eAAe,QAAQ,MAAM;GACxC,IAAI,GAAG;GACP,MAAM,GAAG,SAAS;GAClB,WAAW,GAAG,SAAS;GACxB,CAAC,CACG,IAAI,EAAE,QAAQ,aAAa,CAAC;AAIrC,gBAAc,MAAM;GAAE;GAAQ;GAAa;GAAc,CAAC;AAC1D,SAAO;UACA,OAAO;AACd,iBAAe,MAAM,MAAM;AAC3B,QAAM;;;AAIV,eAAe,gBACb,QACA,MACA,gBACA,QACA,MACkB;CAElB,MAAM,gBAAgB;EACpB,GAAG;EACH,gBAAgB;GACd,GAAI,OAAO;GACX,eAAe;GAChB;EACF;AAED,KAAI;EACF,MAAM,cAAc,KAAK,KAAK;EAC9B,MAAM,SAAS,MAAM,eAAe,eAAe,GAAG,KAAK;EAE3D,IAAI,SAAS;EACb,IAAI,cAAc;EAClB,IAAI,eAAe;EACnB,IAAI;EACJ,IAAI,aAAa;EACjB,IAAI,YAAY;EAChB,MAAM,gCAAgB,IAAI,KAAkC;EAE5D,MAAM,gBAAgB,UAAuB;GAC3C,MAAM,QAAQ,MAAM,UAAU,IAAI;AAElC,OAAI,OAAO,SAAS;AAClB,QAAI,YAAY;AACd,cAAS,KAAK,KAAK,GAAG;AACtB,kBAAa;;AAEf,cAAU,MAAM;;AAGlB,OAAI,OAAO,WACT,MAAK,MAAM,MAAM,MAAM,YAA+B;IACpD,IAAI,QAAQ,cAAc,IAAI,GAAG,MAAM;AACvC,QAAI,CAAC,OAAO;AACV,aAAQ;MACN,IAAI,GAAG,MAAM;MACb,MAAM,GAAG,UAAU,QAAQ;MAC3B,WAAW;MACZ;AACD,mBAAc,IAAI,GAAG,OAAO,MAAM;;AAEpC,QAAI,GAAG,GAAI,OAAM,KAAK,GAAG;AACzB,QAAI,GAAG,UAAU,KAAM,OAAM,OAAO,GAAG,SAAS;AAChD,QAAI,GAAG,UAAU,UAAW,OAAM,aAAa,GAAG,SAAS;;AAI/D,OAAI,MAAM,OAAO;AACf,kBAAc,MAAM,MAAM,iBAAiB;AAC3C,mBAAe,MAAM,MAAM,qBAAqB;;;EAIpD,MAAM,uBAAuB;AAC3B,OAAI,UAAW;AACf,eAAY;AAEZ,QAAK,MAAM,MAAM,MAAM,KAAK,cAAc,QAAQ,CAAC,CAEjD,CADa,eAAe,QAAQ,MAAM,GAAG,CACxC,IAAI,EAAE,QAAQ,aAAa,CAAC;AAGnC,iBAAc,MAAM;IAAE;IAAQ;IAAa;IAAc;IAAQ,CAAC;;EAGpE,MAAM,mBACJ,OACA,OAAO,eAAe,KAAK,OAAO;AAEpC,SAAO,IAAI,MAAM,QAAkB,EACjC,IAAI,QAAQ,MAAM,UAAU;AAC1B,OAAI,SAAS,OAAO,cAClB,cAAa;IACX,MAAM,OAAO,kBAAkB;AAC/B,WAAO;KACL,MAAM,OAAO;MACX,MAAM,SAAS,MAAM,KAAK,MAAM;AAChC,UAAI,CAAC,OAAO,KACV,cAAa,OAAO,MAAqB;AAE3C,UAAI,OAAO,KACT,iBAAgB;AAElB,aAAO;;KAET,MAAM,OAAO,OAAiB;AAC5B,sBAAgB;AAChB,aAAO,KAAK,SAAS,MAAM,IAAI;OAAE,MAAM;OAAM,OAAO;OAAW;;KAEjE,MAAM,MAAM,KAAe;AACzB,qBAAe,MAAM,IAAI;AACzB,aAAO,KAAK,QAAQ,IAAI,IAAI;OAAE,MAAM;OAAM,OAAO;OAAW;;KAE/D;;AAGL,OAAI,SAAS,MACX;AAEF,UAAO,QAAQ,IAAI,QAAQ,MAAM,SAAS;KAE7C,CAAC;UACK,OAAO;AACd,iBAAe,MAAM,MAAM;AAC3B,QAAM"}
|