@aui-x/prism 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +11 -0
- package/README.md +195 -0
- package/dist/client-B9WXHjpz.cjs +192 -0
- package/dist/client-B9WXHjpz.cjs.map +1 -0
- package/dist/client-BSsSpkZY.d.cts +157 -0
- package/dist/client-BSsSpkZY.d.cts.map +1 -0
- package/dist/client-BrZstMQX.d.ts +157 -0
- package/dist/client-BrZstMQX.d.ts.map +1 -0
- package/dist/client-C7RiAn7a.js +174 -0
- package/dist/client-C7RiAn7a.js.map +1 -0
- package/dist/core.cjs +6 -0
- package/dist/core.d.cts +2 -0
- package/dist/core.d.ts +2 -0
- package/dist/core.js +3 -0
- package/dist/index.cjs +12 -0
- package/dist/index.d.cts +5 -0
- package/dist/index.d.ts +5 -0
- package/dist/index.js +6 -0
- package/dist/integrations/ai-sdk.cjs +26 -0
- package/dist/integrations/ai-sdk.cjs.map +1 -0
- package/dist/integrations/ai-sdk.d.cts +18 -0
- package/dist/integrations/ai-sdk.d.cts.map +1 -0
- package/dist/integrations/ai-sdk.d.ts +18 -0
- package/dist/integrations/ai-sdk.d.ts.map +1 -0
- package/dist/integrations/ai-sdk.js +25 -0
- package/dist/integrations/ai-sdk.js.map +1 -0
- package/dist/integrations/anthropic.cjs +226 -0
- package/dist/integrations/anthropic.cjs.map +1 -0
- package/dist/integrations/anthropic.d.cts +20 -0
- package/dist/integrations/anthropic.d.cts.map +1 -0
- package/dist/integrations/anthropic.d.ts +20 -0
- package/dist/integrations/anthropic.d.ts.map +1 -0
- package/dist/integrations/anthropic.js +224 -0
- package/dist/integrations/anthropic.js.map +1 -0
- package/dist/integrations/openai.cjs +227 -0
- package/dist/integrations/openai.cjs.map +1 -0
- package/dist/integrations/openai.d.cts +20 -0
- package/dist/integrations/openai.d.cts.map +1 -0
- package/dist/integrations/openai.d.ts +20 -0
- package/dist/integrations/openai.d.ts.map +1 -0
- package/dist/integrations/openai.js +225 -0
- package/dist/integrations/openai.js.map +1 -0
- package/dist/wrapper-7jRyp54U.js +242 -0
- package/dist/wrapper-7jRyp54U.js.map +1 -0
- package/dist/wrapper-ByspXfxS.cjs +247 -0
- package/dist/wrapper-ByspXfxS.cjs.map +1 -0
- package/package.json +103 -0
|
@@ -0,0 +1,242 @@
|
|
|
1
|
+
import { wrapLanguageModel } from "ai";
|
|
2
|
+
|
|
3
|
+
//#region src/wrapper.ts
|
|
4
|
+
function wrapModelWithMiddleware(tracer, model, opts) {
|
|
5
|
+
return wrapLanguageModel({
|
|
6
|
+
model,
|
|
7
|
+
middleware: createPrismMiddleware(tracer, opts)
|
|
8
|
+
});
|
|
9
|
+
}
|
|
10
|
+
function extractTextFromContent(content) {
|
|
11
|
+
return content.filter((c) => c.type === "text" && typeof c.text === "string").map((c) => c.text).join("");
|
|
12
|
+
}
|
|
13
|
+
function truncateInput(prompt, maxBytes = 32768) {
|
|
14
|
+
let json = JSON.stringify(prompt);
|
|
15
|
+
if (new TextEncoder().encode(json).length <= maxBytes) return prompt;
|
|
16
|
+
const arr = [...prompt];
|
|
17
|
+
while (arr.length > 1) {
|
|
18
|
+
arr.shift();
|
|
19
|
+
json = JSON.stringify(arr);
|
|
20
|
+
if (new TextEncoder().encode(json).length <= maxBytes) return arr;
|
|
21
|
+
}
|
|
22
|
+
return arr;
|
|
23
|
+
}
|
|
24
|
+
function extractModelConfig(params) {
|
|
25
|
+
const keys = [
|
|
26
|
+
"temperature",
|
|
27
|
+
"maxOutputTokens",
|
|
28
|
+
"topP",
|
|
29
|
+
"topK",
|
|
30
|
+
"frequencyPenalty",
|
|
31
|
+
"presencePenalty",
|
|
32
|
+
"stopSequences"
|
|
33
|
+
];
|
|
34
|
+
const config = {};
|
|
35
|
+
for (const key of keys) if (params[key] !== void 0) config[key] = params[key];
|
|
36
|
+
return Object.keys(config).length > 0 ? config : void 0;
|
|
37
|
+
}
|
|
38
|
+
function extractToolOutput(output) {
|
|
39
|
+
if (output && typeof output === "object" && "type" in output) {
|
|
40
|
+
const o = output;
|
|
41
|
+
if (o.type === "text" || o.type === "json") return o.value;
|
|
42
|
+
if (o.type === "denied") return `[denied] ${o.reason ?? ""}`;
|
|
43
|
+
}
|
|
44
|
+
return output;
|
|
45
|
+
}
|
|
46
|
+
function resolveToolResults(prompt, pendingToolSpans) {
|
|
47
|
+
if (pendingToolSpans.size === 0) return;
|
|
48
|
+
for (const msg of prompt) {
|
|
49
|
+
if (msg.role !== "tool") continue;
|
|
50
|
+
for (const part of msg.content) {
|
|
51
|
+
if (part.type !== "tool-result") continue;
|
|
52
|
+
const span = pendingToolSpans.get(part.toolCallId);
|
|
53
|
+
if (span) {
|
|
54
|
+
span.end({
|
|
55
|
+
output: extractToolOutput(part.output),
|
|
56
|
+
status: "completed"
|
|
57
|
+
});
|
|
58
|
+
pendingToolSpans.delete(part.toolCallId);
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
function createRootHandle(tracer, opts, modelId, provider, input, metadata) {
|
|
64
|
+
const parentTraceId = opts?.parentTraceId;
|
|
65
|
+
if (parentTraceId) return {
|
|
66
|
+
isSpanMode: true,
|
|
67
|
+
traceId: parentTraceId,
|
|
68
|
+
handle: tracer.startSpanOnTrace(parentTraceId, {
|
|
69
|
+
name: opts?.name ?? modelId,
|
|
70
|
+
type: "llm",
|
|
71
|
+
input,
|
|
72
|
+
model: modelId,
|
|
73
|
+
provider,
|
|
74
|
+
metadata
|
|
75
|
+
})
|
|
76
|
+
};
|
|
77
|
+
const handle = tracer.startTrace({
|
|
78
|
+
name: opts?.name ?? modelId,
|
|
79
|
+
model: modelId,
|
|
80
|
+
provider,
|
|
81
|
+
input,
|
|
82
|
+
metadata,
|
|
83
|
+
tags: opts?.tags,
|
|
84
|
+
endUserId: opts?.endUserId
|
|
85
|
+
});
|
|
86
|
+
return {
|
|
87
|
+
isSpanMode: false,
|
|
88
|
+
traceId: handle.traceId,
|
|
89
|
+
handle
|
|
90
|
+
};
|
|
91
|
+
}
|
|
92
|
+
function createToolSpan(tracer, root, toolCall) {
|
|
93
|
+
if (root.isSpanMode) return tracer.startSpanOnTrace(root.traceId, {
|
|
94
|
+
name: toolCall.toolName,
|
|
95
|
+
type: "tool",
|
|
96
|
+
input: toolCall.input,
|
|
97
|
+
parentSpanId: root.handle.spanId
|
|
98
|
+
});
|
|
99
|
+
return root.handle.startSpan({
|
|
100
|
+
name: toolCall.toolName,
|
|
101
|
+
type: "tool",
|
|
102
|
+
input: toolCall.input
|
|
103
|
+
});
|
|
104
|
+
}
|
|
105
|
+
function endRootHandle(root, result) {
|
|
106
|
+
root.handle.end({
|
|
107
|
+
output: result.output,
|
|
108
|
+
status: "completed",
|
|
109
|
+
totalTokens: result.inputTokens + result.outputTokens,
|
|
110
|
+
promptTokens: result.inputTokens,
|
|
111
|
+
completionTokens: result.outputTokens,
|
|
112
|
+
ttftMs: result.ttftMs
|
|
113
|
+
});
|
|
114
|
+
}
|
|
115
|
+
function cleanupOnError(root, pendingToolSpans, error) {
|
|
116
|
+
for (const span of pendingToolSpans.values()) span.end({ status: "error" });
|
|
117
|
+
pendingToolSpans.clear();
|
|
118
|
+
root.handle.end({
|
|
119
|
+
status: "error",
|
|
120
|
+
error: error instanceof Error ? error.message : String(error)
|
|
121
|
+
});
|
|
122
|
+
}
|
|
123
|
+
function buildMetadata(opts, params) {
|
|
124
|
+
const modelConfig = extractModelConfig(params);
|
|
125
|
+
if (modelConfig) return {
|
|
126
|
+
...opts?.metadata,
|
|
127
|
+
modelConfig
|
|
128
|
+
};
|
|
129
|
+
return opts?.metadata;
|
|
130
|
+
}
|
|
131
|
+
function createPrismMiddleware(tracer, opts) {
|
|
132
|
+
const pendingToolSpans = /* @__PURE__ */ new Map();
|
|
133
|
+
return {
|
|
134
|
+
specificationVersion: "v3",
|
|
135
|
+
wrapGenerate: async ({ doGenerate, params, model }) => {
|
|
136
|
+
const input = truncateInput(params.prompt);
|
|
137
|
+
const metadata = buildMetadata(opts, params);
|
|
138
|
+
resolveToolResults(params.prompt, pendingToolSpans);
|
|
139
|
+
const root = createRootHandle(tracer, opts, model.modelId, model.provider, input, metadata);
|
|
140
|
+
try {
|
|
141
|
+
const result = await doGenerate();
|
|
142
|
+
const content = result.content;
|
|
143
|
+
const output = extractTextFromContent(content);
|
|
144
|
+
const inputTokens = result.usage.inputTokens.total ?? 0;
|
|
145
|
+
const outputTokens = result.usage.outputTokens.total ?? 0;
|
|
146
|
+
for (const part of content) if (part.type === "tool-call" && part.toolCallId) {
|
|
147
|
+
const span = createToolSpan(tracer, root, {
|
|
148
|
+
id: part.toolCallId,
|
|
149
|
+
toolName: part.toolName ?? "tool",
|
|
150
|
+
input: part.input ?? ""
|
|
151
|
+
});
|
|
152
|
+
if (root.isSpanMode) pendingToolSpans.set(part.toolCallId, span);
|
|
153
|
+
else span.end({ status: "completed" });
|
|
154
|
+
}
|
|
155
|
+
endRootHandle(root, {
|
|
156
|
+
output,
|
|
157
|
+
inputTokens,
|
|
158
|
+
outputTokens
|
|
159
|
+
});
|
|
160
|
+
return result;
|
|
161
|
+
} catch (error) {
|
|
162
|
+
cleanupOnError(root, pendingToolSpans, error);
|
|
163
|
+
throw error;
|
|
164
|
+
}
|
|
165
|
+
},
|
|
166
|
+
wrapStream: async ({ doStream, params, model }) => {
|
|
167
|
+
const input = truncateInput(params.prompt);
|
|
168
|
+
const metadata = buildMetadata(opts, params);
|
|
169
|
+
resolveToolResults(params.prompt, pendingToolSpans);
|
|
170
|
+
const root = createRootHandle(tracer, opts, model.modelId, model.provider, input, metadata);
|
|
171
|
+
try {
|
|
172
|
+
const streamStart = Date.now();
|
|
173
|
+
const { stream, ...rest } = await doStream();
|
|
174
|
+
let output = "";
|
|
175
|
+
let inputTokens = 0;
|
|
176
|
+
let outputTokens = 0;
|
|
177
|
+
let ttftMs;
|
|
178
|
+
let firstChunk = true;
|
|
179
|
+
const toolCalls = [];
|
|
180
|
+
let currentToolInput = null;
|
|
181
|
+
const transformStream = new TransformStream({
|
|
182
|
+
transform(chunk, controller) {
|
|
183
|
+
switch (chunk.type) {
|
|
184
|
+
case "text-delta":
|
|
185
|
+
if (firstChunk) {
|
|
186
|
+
ttftMs = Date.now() - streamStart;
|
|
187
|
+
firstChunk = false;
|
|
188
|
+
}
|
|
189
|
+
output += chunk.delta;
|
|
190
|
+
break;
|
|
191
|
+
case "tool-input-start":
|
|
192
|
+
currentToolInput = {
|
|
193
|
+
id: chunk.id,
|
|
194
|
+
toolName: chunk.toolName,
|
|
195
|
+
input: ""
|
|
196
|
+
};
|
|
197
|
+
break;
|
|
198
|
+
case "tool-input-delta":
|
|
199
|
+
if (currentToolInput && currentToolInput.id === chunk.id) currentToolInput.input += chunk.delta;
|
|
200
|
+
break;
|
|
201
|
+
case "tool-input-end":
|
|
202
|
+
if (currentToolInput && currentToolInput.id === chunk.id) {
|
|
203
|
+
toolCalls.push(currentToolInput);
|
|
204
|
+
currentToolInput = null;
|
|
205
|
+
}
|
|
206
|
+
break;
|
|
207
|
+
case "finish":
|
|
208
|
+
inputTokens = chunk.usage.inputTokens.total ?? 0;
|
|
209
|
+
outputTokens = chunk.usage.outputTokens.total ?? 0;
|
|
210
|
+
break;
|
|
211
|
+
}
|
|
212
|
+
controller.enqueue(chunk);
|
|
213
|
+
},
|
|
214
|
+
flush() {
|
|
215
|
+
for (const tc of toolCalls) {
|
|
216
|
+
const span = createToolSpan(tracer, root, tc);
|
|
217
|
+
if (root.isSpanMode) pendingToolSpans.set(tc.id, span);
|
|
218
|
+
else span.end({ status: "completed" });
|
|
219
|
+
}
|
|
220
|
+
endRootHandle(root, {
|
|
221
|
+
output,
|
|
222
|
+
inputTokens,
|
|
223
|
+
outputTokens,
|
|
224
|
+
ttftMs
|
|
225
|
+
});
|
|
226
|
+
}
|
|
227
|
+
});
|
|
228
|
+
return {
|
|
229
|
+
stream: stream.pipeThrough(transformStream),
|
|
230
|
+
...rest
|
|
231
|
+
};
|
|
232
|
+
} catch (error) {
|
|
233
|
+
cleanupOnError(root, pendingToolSpans, error);
|
|
234
|
+
throw error;
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
};
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
//#endregion
|
|
241
|
+
export { wrapModelWithMiddleware as t };
|
|
242
|
+
//# sourceMappingURL=wrapper-7jRyp54U.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"wrapper-7jRyp54U.js","names":[],"sources":["../src/wrapper.ts"],"sourcesContent":["import type {\n LanguageModelV3,\n LanguageModelV3Middleware,\n LanguageModelV3Prompt,\n} from \"@ai-sdk/provider\";\nimport { wrapLanguageModel } from \"ai\";\nimport type { AuixPrism, SpanHandle, TraceHandle } from \"./client\";\nimport type { AISDKPrismOptions } from \"./integrations/ai-sdk\";\n\ntype MiddlewareOptions = AISDKPrismOptions & { parentTraceId?: string };\n\nexport function wrapModelWithMiddleware(\n tracer: AuixPrism,\n model: LanguageModelV3,\n opts?: MiddlewareOptions,\n): LanguageModelV3 {\n return wrapLanguageModel({\n model,\n middleware: createPrismMiddleware(tracer, opts),\n });\n}\n\ntype ContentPart = {\n type: string;\n text?: string;\n toolName?: string;\n toolCallId?: string;\n input?: string;\n [key: string]: unknown;\n};\n\nfunction extractTextFromContent(content: ContentPart[]): string {\n return content\n .filter((c) => c.type === \"text\" && typeof c.text === \"string\")\n .map((c) => c.text)\n .join(\"\");\n}\n\nfunction truncateInput(\n prompt: LanguageModelV3Prompt,\n maxBytes = 32768,\n): unknown {\n let json = JSON.stringify(prompt);\n if (new TextEncoder().encode(json).length <= maxBytes) {\n return prompt;\n }\n\n const arr = [...prompt];\n while (arr.length > 1) {\n arr.shift();\n json = JSON.stringify(arr);\n if (new TextEncoder().encode(json).length <= maxBytes) {\n return arr;\n }\n }\n return arr;\n}\n\nfunction extractModelConfig(params: Record<string, unknown>) {\n const keys = [\n \"temperature\",\n \"maxOutputTokens\",\n \"topP\",\n \"topK\",\n \"frequencyPenalty\",\n \"presencePenalty\",\n \"stopSequences\",\n ] as const;\n\n const config: Record<string, unknown> = {};\n for (const key of keys) {\n if (params[key] !== undefined) {\n config[key] = params[key];\n }\n }\n return Object.keys(config).length > 0 ? config : undefined;\n}\n\nfunction extractToolOutput(output: unknown): unknown {\n if (output && typeof output === \"object\" && \"type\" in output) {\n const o = output as { type: string; value?: unknown; reason?: string };\n if (o.type === \"text\" || o.type === \"json\") return o.value;\n if (o.type === \"denied\") return `[denied] ${o.reason ?? \"\"}`;\n }\n return output;\n}\n\nfunction resolveToolResults(\n prompt: LanguageModelV3Prompt,\n pendingToolSpans: Map<string, SpanHandle>,\n): void {\n if (pendingToolSpans.size === 0) return;\n\n for (const msg of prompt) {\n if (msg.role !== \"tool\") continue;\n for (const part of msg.content) {\n if (part.type !== \"tool-result\") continue;\n const span = pendingToolSpans.get(part.toolCallId);\n if (span) {\n span.end({\n output: extractToolOutput(part.output),\n status: \"completed\",\n });\n pendingToolSpans.delete(part.toolCallId);\n }\n }\n }\n}\n\ntype RootHandle =\n | { isSpanMode: true; traceId: string; handle: SpanHandle }\n | { isSpanMode: false; traceId: string; handle: TraceHandle };\n\nfunction createRootHandle(\n tracer: AuixPrism,\n opts: MiddlewareOptions | undefined,\n modelId: string,\n provider: string,\n input: unknown,\n metadata: Record<string, unknown> | undefined,\n): RootHandle {\n const parentTraceId = opts?.parentTraceId;\n\n if (parentTraceId) {\n const handle = tracer.startSpanOnTrace(parentTraceId, {\n name: opts?.name ?? modelId,\n type: \"llm\",\n input,\n model: modelId,\n provider,\n metadata,\n });\n return { isSpanMode: true, traceId: parentTraceId, handle };\n }\n\n const handle = tracer.startTrace({\n name: opts?.name ?? modelId,\n model: modelId,\n provider,\n input,\n metadata,\n tags: opts?.tags,\n endUserId: opts?.endUserId,\n });\n return { isSpanMode: false, traceId: handle.traceId, handle };\n}\n\nfunction createToolSpan(\n tracer: AuixPrism,\n root: RootHandle,\n toolCall: { id: string; toolName: string; input: string },\n): SpanHandle {\n if (root.isSpanMode) {\n return tracer.startSpanOnTrace(root.traceId, {\n name: toolCall.toolName,\n type: \"tool\",\n input: toolCall.input,\n parentSpanId: root.handle.spanId,\n });\n }\n return root.handle.startSpan({\n name: toolCall.toolName,\n type: \"tool\",\n input: toolCall.input,\n });\n}\n\nfunction endRootHandle(\n root: RootHandle,\n result: {\n output: string;\n inputTokens: number;\n outputTokens: number;\n ttftMs?: number;\n },\n): void {\n root.handle.end({\n output: result.output,\n status: \"completed\",\n totalTokens: result.inputTokens + result.outputTokens,\n promptTokens: result.inputTokens,\n completionTokens: result.outputTokens,\n ttftMs: result.ttftMs,\n });\n}\n\nfunction cleanupOnError(\n root: RootHandle,\n pendingToolSpans: Map<string, SpanHandle>,\n error: unknown,\n): void {\n for (const span of pendingToolSpans.values()) {\n span.end({ status: \"error\" });\n }\n pendingToolSpans.clear();\n\n root.handle.end({\n status: \"error\",\n error: error instanceof Error ? error.message : String(error),\n });\n}\n\nfunction buildMetadata(\n opts: MiddlewareOptions | undefined,\n params: Record<string, unknown>,\n): Record<string, unknown> | undefined {\n const modelConfig = extractModelConfig(params);\n if (modelConfig) return { ...opts?.metadata, modelConfig };\n return opts?.metadata;\n}\n\nfunction createPrismMiddleware(\n tracer: AuixPrism,\n opts?: MiddlewareOptions,\n): LanguageModelV3Middleware {\n const pendingToolSpans = new Map<string, SpanHandle>();\n\n return {\n specificationVersion: \"v3\",\n\n wrapGenerate: async ({ doGenerate, params, model }) => {\n const input = truncateInput(params.prompt);\n const metadata = buildMetadata(opts, params);\n\n resolveToolResults(params.prompt, pendingToolSpans);\n\n const root = createRootHandle(\n tracer,\n opts,\n model.modelId,\n model.provider,\n input,\n metadata,\n );\n\n try {\n const result = await doGenerate();\n const content = result.content as ContentPart[];\n const output = extractTextFromContent(content);\n const inputTokens = result.usage.inputTokens.total ?? 0;\n const outputTokens = result.usage.outputTokens.total ?? 0;\n\n for (const part of content) {\n if (part.type === \"tool-call\" && part.toolCallId) {\n const span = createToolSpan(tracer, root, {\n id: part.toolCallId,\n toolName: part.toolName ?? \"tool\",\n input: part.input ?? \"\",\n });\n if (root.isSpanMode) {\n pendingToolSpans.set(part.toolCallId, span);\n } else {\n span.end({ status: \"completed\" });\n }\n }\n }\n\n endRootHandle(root, { output, inputTokens, outputTokens });\n return result;\n } catch (error) {\n cleanupOnError(root, pendingToolSpans, error);\n throw error;\n }\n },\n\n wrapStream: async ({ doStream, params, model }) => {\n const input = truncateInput(params.prompt);\n const metadata = buildMetadata(opts, params);\n\n resolveToolResults(params.prompt, pendingToolSpans);\n\n const root = createRootHandle(\n tracer,\n opts,\n model.modelId,\n model.provider,\n input,\n metadata,\n );\n\n try {\n const streamStart = Date.now();\n const { stream, ...rest } = await doStream();\n\n let output = \"\";\n let inputTokens = 0;\n let outputTokens = 0;\n let ttftMs: number | undefined;\n let firstChunk = true;\n const toolCalls: Array<{\n id: string;\n toolName: string;\n input: string;\n }> = [];\n let currentToolInput: {\n id: string;\n toolName: string;\n input: string;\n } | null = null;\n\n const transformStream = new TransformStream({\n transform(chunk, controller) {\n switch (chunk.type) {\n case \"text-delta\":\n if (firstChunk) {\n ttftMs = Date.now() - streamStart;\n firstChunk = false;\n }\n output += chunk.delta;\n break;\n case \"tool-input-start\":\n currentToolInput = {\n id: chunk.id,\n toolName: chunk.toolName,\n input: \"\",\n };\n break;\n case \"tool-input-delta\":\n if (currentToolInput && currentToolInput.id === chunk.id) {\n currentToolInput.input += chunk.delta;\n }\n break;\n case \"tool-input-end\":\n if (currentToolInput && currentToolInput.id === chunk.id) {\n toolCalls.push(currentToolInput);\n currentToolInput = null;\n }\n break;\n case \"finish\":\n inputTokens = chunk.usage.inputTokens.total ?? 0;\n outputTokens = chunk.usage.outputTokens.total ?? 0;\n break;\n }\n controller.enqueue(chunk);\n },\n flush() {\n for (const tc of toolCalls) {\n const span = createToolSpan(tracer, root, tc);\n if (root.isSpanMode) {\n pendingToolSpans.set(tc.id, span);\n } else {\n span.end({ status: \"completed\" });\n }\n }\n\n endRootHandle(root, {\n output,\n inputTokens,\n outputTokens,\n ttftMs,\n });\n },\n });\n\n return {\n stream: stream.pipeThrough(transformStream),\n ...rest,\n };\n } catch (error) {\n cleanupOnError(root, pendingToolSpans, error);\n throw error;\n }\n },\n };\n}\n"],"mappings":";;;AAWA,SAAgB,wBACd,QACA,OACA,MACiB;AACjB,QAAO,kBAAkB;EACvB;EACA,YAAY,sBAAsB,QAAQ,KAAK;EAChD,CAAC;;AAYJ,SAAS,uBAAuB,SAAgC;AAC9D,QAAO,QACJ,QAAQ,MAAM,EAAE,SAAS,UAAU,OAAO,EAAE,SAAS,SAAS,CAC9D,KAAK,MAAM,EAAE,KAAK,CAClB,KAAK,GAAG;;AAGb,SAAS,cACP,QACA,WAAW,OACF;CACT,IAAI,OAAO,KAAK,UAAU,OAAO;AACjC,KAAI,IAAI,aAAa,CAAC,OAAO,KAAK,CAAC,UAAU,SAC3C,QAAO;CAGT,MAAM,MAAM,CAAC,GAAG,OAAO;AACvB,QAAO,IAAI,SAAS,GAAG;AACrB,MAAI,OAAO;AACX,SAAO,KAAK,UAAU,IAAI;AAC1B,MAAI,IAAI,aAAa,CAAC,OAAO,KAAK,CAAC,UAAU,SAC3C,QAAO;;AAGX,QAAO;;AAGT,SAAS,mBAAmB,QAAiC;CAC3D,MAAM,OAAO;EACX;EACA;EACA;EACA;EACA;EACA;EACA;EACD;CAED,MAAM,SAAkC,EAAE;AAC1C,MAAK,MAAM,OAAO,KAChB,KAAI,OAAO,SAAS,OAClB,QAAO,OAAO,OAAO;AAGzB,QAAO,OAAO,KAAK,OAAO,CAAC,SAAS,IAAI,SAAS;;AAGnD,SAAS,kBAAkB,QAA0B;AACnD,KAAI,UAAU,OAAO,WAAW,YAAY,UAAU,QAAQ;EAC5D,MAAM,IAAI;AACV,MAAI,EAAE,SAAS,UAAU,EAAE,SAAS,OAAQ,QAAO,EAAE;AACrD,MAAI,EAAE,SAAS,SAAU,QAAO,YAAY,EAAE,UAAU;;AAE1D,QAAO;;AAGT,SAAS,mBACP,QACA,kBACM;AACN,KAAI,iBAAiB,SAAS,EAAG;AAEjC,MAAK,MAAM,OAAO,QAAQ;AACxB,MAAI,IAAI,SAAS,OAAQ;AACzB,OAAK,MAAM,QAAQ,IAAI,SAAS;AAC9B,OAAI,KAAK,SAAS,cAAe;GACjC,MAAM,OAAO,iBAAiB,IAAI,KAAK,WAAW;AAClD,OAAI,MAAM;AACR,SAAK,IAAI;KACP,QAAQ,kBAAkB,KAAK,OAAO;KACtC,QAAQ;KACT,CAAC;AACF,qBAAiB,OAAO,KAAK,WAAW;;;;;AAUhD,SAAS,iBACP,QACA,MACA,SACA,UACA,OACA,UACY;CACZ,MAAM,gBAAgB,MAAM;AAE5B,KAAI,cASF,QAAO;EAAE,YAAY;EAAM,SAAS;EAAe,QARpC,OAAO,iBAAiB,eAAe;GACpD,MAAM,MAAM,QAAQ;GACpB,MAAM;GACN;GACA,OAAO;GACP;GACA;GACD,CAAC;EACyD;CAG7D,MAAM,SAAS,OAAO,WAAW;EAC/B,MAAM,MAAM,QAAQ;EACpB,OAAO;EACP;EACA;EACA;EACA,MAAM,MAAM;EACZ,WAAW,MAAM;EAClB,CAAC;AACF,QAAO;EAAE,YAAY;EAAO,SAAS,OAAO;EAAS;EAAQ;;AAG/D,SAAS,eACP,QACA,MACA,UACY;AACZ,KAAI,KAAK,WACP,QAAO,OAAO,iBAAiB,KAAK,SAAS;EAC3C,MAAM,SAAS;EACf,MAAM;EACN,OAAO,SAAS;EAChB,cAAc,KAAK,OAAO;EAC3B,CAAC;AAEJ,QAAO,KAAK,OAAO,UAAU;EAC3B,MAAM,SAAS;EACf,MAAM;EACN,OAAO,SAAS;EACjB,CAAC;;AAGJ,SAAS,cACP,MACA,QAMM;AACN,MAAK,OAAO,IAAI;EACd,QAAQ,OAAO;EACf,QAAQ;EACR,aAAa,OAAO,cAAc,OAAO;EACzC,cAAc,OAAO;EACrB,kBAAkB,OAAO;EACzB,QAAQ,OAAO;EAChB,CAAC;;AAGJ,SAAS,eACP,MACA,kBACA,OACM;AACN,MAAK,MAAM,QAAQ,iBAAiB,QAAQ,CAC1C,MAAK,IAAI,EAAE,QAAQ,SAAS,CAAC;AAE/B,kBAAiB,OAAO;AAExB,MAAK,OAAO,IAAI;EACd,QAAQ;EACR,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;EAC9D,CAAC;;AAGJ,SAAS,cACP,MACA,QACqC;CACrC,MAAM,cAAc,mBAAmB,OAAO;AAC9C,KAAI,YAAa,QAAO;EAAE,GAAG,MAAM;EAAU;EAAa;AAC1D,QAAO,MAAM;;AAGf,SAAS,sBACP,QACA,MAC2B;CAC3B,MAAM,mCAAmB,IAAI,KAAyB;AAEtD,QAAO;EACL,sBAAsB;EAEtB,cAAc,OAAO,EAAE,YAAY,QAAQ,YAAY;GACrD,MAAM,QAAQ,cAAc,OAAO,OAAO;GAC1C,MAAM,WAAW,cAAc,MAAM,OAAO;AAE5C,sBAAmB,OAAO,QAAQ,iBAAiB;GAEnD,MAAM,OAAO,iBACX,QACA,MACA,MAAM,SACN,MAAM,UACN,OACA,SACD;AAED,OAAI;IACF,MAAM,SAAS,MAAM,YAAY;IACjC,MAAM,UAAU,OAAO;IACvB,MAAM,SAAS,uBAAuB,QAAQ;IAC9C,MAAM,cAAc,OAAO,MAAM,YAAY,SAAS;IACtD,MAAM,eAAe,OAAO,MAAM,aAAa,SAAS;AAExD,SAAK,MAAM,QAAQ,QACjB,KAAI,KAAK,SAAS,eAAe,KAAK,YAAY;KAChD,MAAM,OAAO,eAAe,QAAQ,MAAM;MACxC,IAAI,KAAK;MACT,UAAU,KAAK,YAAY;MAC3B,OAAO,KAAK,SAAS;MACtB,CAAC;AACF,SAAI,KAAK,WACP,kBAAiB,IAAI,KAAK,YAAY,KAAK;SAE3C,MAAK,IAAI,EAAE,QAAQ,aAAa,CAAC;;AAKvC,kBAAc,MAAM;KAAE;KAAQ;KAAa;KAAc,CAAC;AAC1D,WAAO;YACA,OAAO;AACd,mBAAe,MAAM,kBAAkB,MAAM;AAC7C,UAAM;;;EAIV,YAAY,OAAO,EAAE,UAAU,QAAQ,YAAY;GACjD,MAAM,QAAQ,cAAc,OAAO,OAAO;GAC1C,MAAM,WAAW,cAAc,MAAM,OAAO;AAE5C,sBAAmB,OAAO,QAAQ,iBAAiB;GAEnD,MAAM,OAAO,iBACX,QACA,MACA,MAAM,SACN,MAAM,UACN,OACA,SACD;AAED,OAAI;IACF,MAAM,cAAc,KAAK,KAAK;IAC9B,MAAM,EAAE,QAAQ,GAAG,SAAS,MAAM,UAAU;IAE5C,IAAI,SAAS;IACb,IAAI,cAAc;IAClB,IAAI,eAAe;IACnB,IAAI;IACJ,IAAI,aAAa;IACjB,MAAM,YAID,EAAE;IACP,IAAI,mBAIO;IAEX,MAAM,kBAAkB,IAAI,gBAAgB;KAC1C,UAAU,OAAO,YAAY;AAC3B,cAAQ,MAAM,MAAd;OACE,KAAK;AACH,YAAI,YAAY;AACd,kBAAS,KAAK,KAAK,GAAG;AACtB,sBAAa;;AAEf,kBAAU,MAAM;AAChB;OACF,KAAK;AACH,2BAAmB;SACjB,IAAI,MAAM;SACV,UAAU,MAAM;SAChB,OAAO;SACR;AACD;OACF,KAAK;AACH,YAAI,oBAAoB,iBAAiB,OAAO,MAAM,GACpD,kBAAiB,SAAS,MAAM;AAElC;OACF,KAAK;AACH,YAAI,oBAAoB,iBAAiB,OAAO,MAAM,IAAI;AACxD,mBAAU,KAAK,iBAAiB;AAChC,4BAAmB;;AAErB;OACF,KAAK;AACH,sBAAc,MAAM,MAAM,YAAY,SAAS;AAC/C,uBAAe,MAAM,MAAM,aAAa,SAAS;AACjD;;AAEJ,iBAAW,QAAQ,MAAM;;KAE3B,QAAQ;AACN,WAAK,MAAM,MAAM,WAAW;OAC1B,MAAM,OAAO,eAAe,QAAQ,MAAM,GAAG;AAC7C,WAAI,KAAK,WACP,kBAAiB,IAAI,GAAG,IAAI,KAAK;WAEjC,MAAK,IAAI,EAAE,QAAQ,aAAa,CAAC;;AAIrC,oBAAc,MAAM;OAClB;OACA;OACA;OACA;OACD,CAAC;;KAEL,CAAC;AAEF,WAAO;KACL,QAAQ,OAAO,YAAY,gBAAgB;KAC3C,GAAG;KACJ;YACM,OAAO;AACd,mBAAe,MAAM,kBAAkB,MAAM;AAC7C,UAAM;;;EAGX"}
|
|
@@ -0,0 +1,247 @@
|
|
|
1
|
+
let ai = require("ai");
|
|
2
|
+
|
|
3
|
+
//#region src/wrapper.ts
|
|
4
|
+
function wrapModelWithMiddleware(tracer, model, opts) {
|
|
5
|
+
return (0, ai.wrapLanguageModel)({
|
|
6
|
+
model,
|
|
7
|
+
middleware: createPrismMiddleware(tracer, opts)
|
|
8
|
+
});
|
|
9
|
+
}
|
|
10
|
+
function extractTextFromContent(content) {
|
|
11
|
+
return content.filter((c) => c.type === "text" && typeof c.text === "string").map((c) => c.text).join("");
|
|
12
|
+
}
|
|
13
|
+
function truncateInput(prompt, maxBytes = 32768) {
|
|
14
|
+
let json = JSON.stringify(prompt);
|
|
15
|
+
if (new TextEncoder().encode(json).length <= maxBytes) return prompt;
|
|
16
|
+
const arr = [...prompt];
|
|
17
|
+
while (arr.length > 1) {
|
|
18
|
+
arr.shift();
|
|
19
|
+
json = JSON.stringify(arr);
|
|
20
|
+
if (new TextEncoder().encode(json).length <= maxBytes) return arr;
|
|
21
|
+
}
|
|
22
|
+
return arr;
|
|
23
|
+
}
|
|
24
|
+
function extractModelConfig(params) {
|
|
25
|
+
const keys = [
|
|
26
|
+
"temperature",
|
|
27
|
+
"maxOutputTokens",
|
|
28
|
+
"topP",
|
|
29
|
+
"topK",
|
|
30
|
+
"frequencyPenalty",
|
|
31
|
+
"presencePenalty",
|
|
32
|
+
"stopSequences"
|
|
33
|
+
];
|
|
34
|
+
const config = {};
|
|
35
|
+
for (const key of keys) if (params[key] !== void 0) config[key] = params[key];
|
|
36
|
+
return Object.keys(config).length > 0 ? config : void 0;
|
|
37
|
+
}
|
|
38
|
+
function extractToolOutput(output) {
|
|
39
|
+
if (output && typeof output === "object" && "type" in output) {
|
|
40
|
+
const o = output;
|
|
41
|
+
if (o.type === "text" || o.type === "json") return o.value;
|
|
42
|
+
if (o.type === "denied") return `[denied] ${o.reason ?? ""}`;
|
|
43
|
+
}
|
|
44
|
+
return output;
|
|
45
|
+
}
|
|
46
|
+
function resolveToolResults(prompt, pendingToolSpans) {
|
|
47
|
+
if (pendingToolSpans.size === 0) return;
|
|
48
|
+
for (const msg of prompt) {
|
|
49
|
+
if (msg.role !== "tool") continue;
|
|
50
|
+
for (const part of msg.content) {
|
|
51
|
+
if (part.type !== "tool-result") continue;
|
|
52
|
+
const span = pendingToolSpans.get(part.toolCallId);
|
|
53
|
+
if (span) {
|
|
54
|
+
span.end({
|
|
55
|
+
output: extractToolOutput(part.output),
|
|
56
|
+
status: "completed"
|
|
57
|
+
});
|
|
58
|
+
pendingToolSpans.delete(part.toolCallId);
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
function createRootHandle(tracer, opts, modelId, provider, input, metadata) {
|
|
64
|
+
const parentTraceId = opts?.parentTraceId;
|
|
65
|
+
if (parentTraceId) return {
|
|
66
|
+
isSpanMode: true,
|
|
67
|
+
traceId: parentTraceId,
|
|
68
|
+
handle: tracer.startSpanOnTrace(parentTraceId, {
|
|
69
|
+
name: opts?.name ?? modelId,
|
|
70
|
+
type: "llm",
|
|
71
|
+
input,
|
|
72
|
+
model: modelId,
|
|
73
|
+
provider,
|
|
74
|
+
metadata
|
|
75
|
+
})
|
|
76
|
+
};
|
|
77
|
+
const handle = tracer.startTrace({
|
|
78
|
+
name: opts?.name ?? modelId,
|
|
79
|
+
model: modelId,
|
|
80
|
+
provider,
|
|
81
|
+
input,
|
|
82
|
+
metadata,
|
|
83
|
+
tags: opts?.tags,
|
|
84
|
+
endUserId: opts?.endUserId
|
|
85
|
+
});
|
|
86
|
+
return {
|
|
87
|
+
isSpanMode: false,
|
|
88
|
+
traceId: handle.traceId,
|
|
89
|
+
handle
|
|
90
|
+
};
|
|
91
|
+
}
|
|
92
|
+
function createToolSpan(tracer, root, toolCall) {
|
|
93
|
+
if (root.isSpanMode) return tracer.startSpanOnTrace(root.traceId, {
|
|
94
|
+
name: toolCall.toolName,
|
|
95
|
+
type: "tool",
|
|
96
|
+
input: toolCall.input,
|
|
97
|
+
parentSpanId: root.handle.spanId
|
|
98
|
+
});
|
|
99
|
+
return root.handle.startSpan({
|
|
100
|
+
name: toolCall.toolName,
|
|
101
|
+
type: "tool",
|
|
102
|
+
input: toolCall.input
|
|
103
|
+
});
|
|
104
|
+
}
|
|
105
|
+
function endRootHandle(root, result) {
|
|
106
|
+
root.handle.end({
|
|
107
|
+
output: result.output,
|
|
108
|
+
status: "completed",
|
|
109
|
+
totalTokens: result.inputTokens + result.outputTokens,
|
|
110
|
+
promptTokens: result.inputTokens,
|
|
111
|
+
completionTokens: result.outputTokens,
|
|
112
|
+
ttftMs: result.ttftMs
|
|
113
|
+
});
|
|
114
|
+
}
|
|
115
|
+
function cleanupOnError(root, pendingToolSpans, error) {
|
|
116
|
+
for (const span of pendingToolSpans.values()) span.end({ status: "error" });
|
|
117
|
+
pendingToolSpans.clear();
|
|
118
|
+
root.handle.end({
|
|
119
|
+
status: "error",
|
|
120
|
+
error: error instanceof Error ? error.message : String(error)
|
|
121
|
+
});
|
|
122
|
+
}
|
|
123
|
+
function buildMetadata(opts, params) {
|
|
124
|
+
const modelConfig = extractModelConfig(params);
|
|
125
|
+
if (modelConfig) return {
|
|
126
|
+
...opts?.metadata,
|
|
127
|
+
modelConfig
|
|
128
|
+
};
|
|
129
|
+
return opts?.metadata;
|
|
130
|
+
}
|
|
131
|
+
function createPrismMiddleware(tracer, opts) {
|
|
132
|
+
const pendingToolSpans = /* @__PURE__ */ new Map();
|
|
133
|
+
return {
|
|
134
|
+
specificationVersion: "v3",
|
|
135
|
+
wrapGenerate: async ({ doGenerate, params, model }) => {
|
|
136
|
+
const input = truncateInput(params.prompt);
|
|
137
|
+
const metadata = buildMetadata(opts, params);
|
|
138
|
+
resolveToolResults(params.prompt, pendingToolSpans);
|
|
139
|
+
const root = createRootHandle(tracer, opts, model.modelId, model.provider, input, metadata);
|
|
140
|
+
try {
|
|
141
|
+
const result = await doGenerate();
|
|
142
|
+
const content = result.content;
|
|
143
|
+
const output = extractTextFromContent(content);
|
|
144
|
+
const inputTokens = result.usage.inputTokens.total ?? 0;
|
|
145
|
+
const outputTokens = result.usage.outputTokens.total ?? 0;
|
|
146
|
+
for (const part of content) if (part.type === "tool-call" && part.toolCallId) {
|
|
147
|
+
const span = createToolSpan(tracer, root, {
|
|
148
|
+
id: part.toolCallId,
|
|
149
|
+
toolName: part.toolName ?? "tool",
|
|
150
|
+
input: part.input ?? ""
|
|
151
|
+
});
|
|
152
|
+
if (root.isSpanMode) pendingToolSpans.set(part.toolCallId, span);
|
|
153
|
+
else span.end({ status: "completed" });
|
|
154
|
+
}
|
|
155
|
+
endRootHandle(root, {
|
|
156
|
+
output,
|
|
157
|
+
inputTokens,
|
|
158
|
+
outputTokens
|
|
159
|
+
});
|
|
160
|
+
return result;
|
|
161
|
+
} catch (error) {
|
|
162
|
+
cleanupOnError(root, pendingToolSpans, error);
|
|
163
|
+
throw error;
|
|
164
|
+
}
|
|
165
|
+
},
|
|
166
|
+
wrapStream: async ({ doStream, params, model }) => {
|
|
167
|
+
const input = truncateInput(params.prompt);
|
|
168
|
+
const metadata = buildMetadata(opts, params);
|
|
169
|
+
resolveToolResults(params.prompt, pendingToolSpans);
|
|
170
|
+
const root = createRootHandle(tracer, opts, model.modelId, model.provider, input, metadata);
|
|
171
|
+
try {
|
|
172
|
+
const streamStart = Date.now();
|
|
173
|
+
const { stream, ...rest } = await doStream();
|
|
174
|
+
let output = "";
|
|
175
|
+
let inputTokens = 0;
|
|
176
|
+
let outputTokens = 0;
|
|
177
|
+
let ttftMs;
|
|
178
|
+
let firstChunk = true;
|
|
179
|
+
const toolCalls = [];
|
|
180
|
+
let currentToolInput = null;
|
|
181
|
+
const transformStream = new TransformStream({
|
|
182
|
+
transform(chunk, controller) {
|
|
183
|
+
switch (chunk.type) {
|
|
184
|
+
case "text-delta":
|
|
185
|
+
if (firstChunk) {
|
|
186
|
+
ttftMs = Date.now() - streamStart;
|
|
187
|
+
firstChunk = false;
|
|
188
|
+
}
|
|
189
|
+
output += chunk.delta;
|
|
190
|
+
break;
|
|
191
|
+
case "tool-input-start":
|
|
192
|
+
currentToolInput = {
|
|
193
|
+
id: chunk.id,
|
|
194
|
+
toolName: chunk.toolName,
|
|
195
|
+
input: ""
|
|
196
|
+
};
|
|
197
|
+
break;
|
|
198
|
+
case "tool-input-delta":
|
|
199
|
+
if (currentToolInput && currentToolInput.id === chunk.id) currentToolInput.input += chunk.delta;
|
|
200
|
+
break;
|
|
201
|
+
case "tool-input-end":
|
|
202
|
+
if (currentToolInput && currentToolInput.id === chunk.id) {
|
|
203
|
+
toolCalls.push(currentToolInput);
|
|
204
|
+
currentToolInput = null;
|
|
205
|
+
}
|
|
206
|
+
break;
|
|
207
|
+
case "finish":
|
|
208
|
+
inputTokens = chunk.usage.inputTokens.total ?? 0;
|
|
209
|
+
outputTokens = chunk.usage.outputTokens.total ?? 0;
|
|
210
|
+
break;
|
|
211
|
+
}
|
|
212
|
+
controller.enqueue(chunk);
|
|
213
|
+
},
|
|
214
|
+
flush() {
|
|
215
|
+
for (const tc of toolCalls) {
|
|
216
|
+
const span = createToolSpan(tracer, root, tc);
|
|
217
|
+
if (root.isSpanMode) pendingToolSpans.set(tc.id, span);
|
|
218
|
+
else span.end({ status: "completed" });
|
|
219
|
+
}
|
|
220
|
+
endRootHandle(root, {
|
|
221
|
+
output,
|
|
222
|
+
inputTokens,
|
|
223
|
+
outputTokens,
|
|
224
|
+
ttftMs
|
|
225
|
+
});
|
|
226
|
+
}
|
|
227
|
+
});
|
|
228
|
+
return {
|
|
229
|
+
stream: stream.pipeThrough(transformStream),
|
|
230
|
+
...rest
|
|
231
|
+
};
|
|
232
|
+
} catch (error) {
|
|
233
|
+
cleanupOnError(root, pendingToolSpans, error);
|
|
234
|
+
throw error;
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
};
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
//#endregion
|
|
241
|
+
Object.defineProperty(exports, 'wrapModelWithMiddleware', {
|
|
242
|
+
enumerable: true,
|
|
243
|
+
get: function () {
|
|
244
|
+
return wrapModelWithMiddleware;
|
|
245
|
+
}
|
|
246
|
+
});
|
|
247
|
+
//# sourceMappingURL=wrapper-ByspXfxS.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"wrapper-ByspXfxS.cjs","names":[],"sources":["../src/wrapper.ts"],"sourcesContent":["import type {\n LanguageModelV3,\n LanguageModelV3Middleware,\n LanguageModelV3Prompt,\n} from \"@ai-sdk/provider\";\nimport { wrapLanguageModel } from \"ai\";\nimport type { AuixPrism, SpanHandle, TraceHandle } from \"./client\";\nimport type { AISDKPrismOptions } from \"./integrations/ai-sdk\";\n\ntype MiddlewareOptions = AISDKPrismOptions & { parentTraceId?: string };\n\nexport function wrapModelWithMiddleware(\n tracer: AuixPrism,\n model: LanguageModelV3,\n opts?: MiddlewareOptions,\n): LanguageModelV3 {\n return wrapLanguageModel({\n model,\n middleware: createPrismMiddleware(tracer, opts),\n });\n}\n\ntype ContentPart = {\n type: string;\n text?: string;\n toolName?: string;\n toolCallId?: string;\n input?: string;\n [key: string]: unknown;\n};\n\nfunction extractTextFromContent(content: ContentPart[]): string {\n return content\n .filter((c) => c.type === \"text\" && typeof c.text === \"string\")\n .map((c) => c.text)\n .join(\"\");\n}\n\nfunction truncateInput(\n prompt: LanguageModelV3Prompt,\n maxBytes = 32768,\n): unknown {\n let json = JSON.stringify(prompt);\n if (new TextEncoder().encode(json).length <= maxBytes) {\n return prompt;\n }\n\n const arr = [...prompt];\n while (arr.length > 1) {\n arr.shift();\n json = JSON.stringify(arr);\n if (new TextEncoder().encode(json).length <= maxBytes) {\n return arr;\n }\n }\n return arr;\n}\n\nfunction extractModelConfig(params: Record<string, unknown>) {\n const keys = [\n \"temperature\",\n \"maxOutputTokens\",\n \"topP\",\n \"topK\",\n \"frequencyPenalty\",\n \"presencePenalty\",\n \"stopSequences\",\n ] as const;\n\n const config: Record<string, unknown> = {};\n for (const key of keys) {\n if (params[key] !== undefined) {\n config[key] = params[key];\n }\n }\n return Object.keys(config).length > 0 ? config : undefined;\n}\n\nfunction extractToolOutput(output: unknown): unknown {\n if (output && typeof output === \"object\" && \"type\" in output) {\n const o = output as { type: string; value?: unknown; reason?: string };\n if (o.type === \"text\" || o.type === \"json\") return o.value;\n if (o.type === \"denied\") return `[denied] ${o.reason ?? \"\"}`;\n }\n return output;\n}\n\nfunction resolveToolResults(\n prompt: LanguageModelV3Prompt,\n pendingToolSpans: Map<string, SpanHandle>,\n): void {\n if (pendingToolSpans.size === 0) return;\n\n for (const msg of prompt) {\n if (msg.role !== \"tool\") continue;\n for (const part of msg.content) {\n if (part.type !== \"tool-result\") continue;\n const span = pendingToolSpans.get(part.toolCallId);\n if (span) {\n span.end({\n output: extractToolOutput(part.output),\n status: \"completed\",\n });\n pendingToolSpans.delete(part.toolCallId);\n }\n }\n }\n}\n\ntype RootHandle =\n | { isSpanMode: true; traceId: string; handle: SpanHandle }\n | { isSpanMode: false; traceId: string; handle: TraceHandle };\n\nfunction createRootHandle(\n tracer: AuixPrism,\n opts: MiddlewareOptions | undefined,\n modelId: string,\n provider: string,\n input: unknown,\n metadata: Record<string, unknown> | undefined,\n): RootHandle {\n const parentTraceId = opts?.parentTraceId;\n\n if (parentTraceId) {\n const handle = tracer.startSpanOnTrace(parentTraceId, {\n name: opts?.name ?? modelId,\n type: \"llm\",\n input,\n model: modelId,\n provider,\n metadata,\n });\n return { isSpanMode: true, traceId: parentTraceId, handle };\n }\n\n const handle = tracer.startTrace({\n name: opts?.name ?? modelId,\n model: modelId,\n provider,\n input,\n metadata,\n tags: opts?.tags,\n endUserId: opts?.endUserId,\n });\n return { isSpanMode: false, traceId: handle.traceId, handle };\n}\n\nfunction createToolSpan(\n tracer: AuixPrism,\n root: RootHandle,\n toolCall: { id: string; toolName: string; input: string },\n): SpanHandle {\n if (root.isSpanMode) {\n return tracer.startSpanOnTrace(root.traceId, {\n name: toolCall.toolName,\n type: \"tool\",\n input: toolCall.input,\n parentSpanId: root.handle.spanId,\n });\n }\n return root.handle.startSpan({\n name: toolCall.toolName,\n type: \"tool\",\n input: toolCall.input,\n });\n}\n\nfunction endRootHandle(\n root: RootHandle,\n result: {\n output: string;\n inputTokens: number;\n outputTokens: number;\n ttftMs?: number;\n },\n): void {\n root.handle.end({\n output: result.output,\n status: \"completed\",\n totalTokens: result.inputTokens + result.outputTokens,\n promptTokens: result.inputTokens,\n completionTokens: result.outputTokens,\n ttftMs: result.ttftMs,\n });\n}\n\nfunction cleanupOnError(\n root: RootHandle,\n pendingToolSpans: Map<string, SpanHandle>,\n error: unknown,\n): void {\n for (const span of pendingToolSpans.values()) {\n span.end({ status: \"error\" });\n }\n pendingToolSpans.clear();\n\n root.handle.end({\n status: \"error\",\n error: error instanceof Error ? error.message : String(error),\n });\n}\n\nfunction buildMetadata(\n opts: MiddlewareOptions | undefined,\n params: Record<string, unknown>,\n): Record<string, unknown> | undefined {\n const modelConfig = extractModelConfig(params);\n if (modelConfig) return { ...opts?.metadata, modelConfig };\n return opts?.metadata;\n}\n\nfunction createPrismMiddleware(\n tracer: AuixPrism,\n opts?: MiddlewareOptions,\n): LanguageModelV3Middleware {\n const pendingToolSpans = new Map<string, SpanHandle>();\n\n return {\n specificationVersion: \"v3\",\n\n wrapGenerate: async ({ doGenerate, params, model }) => {\n const input = truncateInput(params.prompt);\n const metadata = buildMetadata(opts, params);\n\n resolveToolResults(params.prompt, pendingToolSpans);\n\n const root = createRootHandle(\n tracer,\n opts,\n model.modelId,\n model.provider,\n input,\n metadata,\n );\n\n try {\n const result = await doGenerate();\n const content = result.content as ContentPart[];\n const output = extractTextFromContent(content);\n const inputTokens = result.usage.inputTokens.total ?? 0;\n const outputTokens = result.usage.outputTokens.total ?? 0;\n\n for (const part of content) {\n if (part.type === \"tool-call\" && part.toolCallId) {\n const span = createToolSpan(tracer, root, {\n id: part.toolCallId,\n toolName: part.toolName ?? \"tool\",\n input: part.input ?? \"\",\n });\n if (root.isSpanMode) {\n pendingToolSpans.set(part.toolCallId, span);\n } else {\n span.end({ status: \"completed\" });\n }\n }\n }\n\n endRootHandle(root, { output, inputTokens, outputTokens });\n return result;\n } catch (error) {\n cleanupOnError(root, pendingToolSpans, error);\n throw error;\n }\n },\n\n wrapStream: async ({ doStream, params, model }) => {\n const input = truncateInput(params.prompt);\n const metadata = buildMetadata(opts, params);\n\n resolveToolResults(params.prompt, pendingToolSpans);\n\n const root = createRootHandle(\n tracer,\n opts,\n model.modelId,\n model.provider,\n input,\n metadata,\n );\n\n try {\n const streamStart = Date.now();\n const { stream, ...rest } = await doStream();\n\n let output = \"\";\n let inputTokens = 0;\n let outputTokens = 0;\n let ttftMs: number | undefined;\n let firstChunk = true;\n const toolCalls: Array<{\n id: string;\n toolName: string;\n input: string;\n }> = [];\n let currentToolInput: {\n id: string;\n toolName: string;\n input: string;\n } | null = null;\n\n const transformStream = new TransformStream({\n transform(chunk, controller) {\n switch (chunk.type) {\n case \"text-delta\":\n if (firstChunk) {\n ttftMs = Date.now() - streamStart;\n firstChunk = false;\n }\n output += chunk.delta;\n break;\n case \"tool-input-start\":\n currentToolInput = {\n id: chunk.id,\n toolName: chunk.toolName,\n input: \"\",\n };\n break;\n case \"tool-input-delta\":\n if (currentToolInput && currentToolInput.id === chunk.id) {\n currentToolInput.input += chunk.delta;\n }\n break;\n case \"tool-input-end\":\n if (currentToolInput && currentToolInput.id === chunk.id) {\n toolCalls.push(currentToolInput);\n currentToolInput = null;\n }\n break;\n case \"finish\":\n inputTokens = chunk.usage.inputTokens.total ?? 0;\n outputTokens = chunk.usage.outputTokens.total ?? 0;\n break;\n }\n controller.enqueue(chunk);\n },\n flush() {\n for (const tc of toolCalls) {\n const span = createToolSpan(tracer, root, tc);\n if (root.isSpanMode) {\n pendingToolSpans.set(tc.id, span);\n } else {\n span.end({ status: \"completed\" });\n }\n }\n\n endRootHandle(root, {\n output,\n inputTokens,\n outputTokens,\n ttftMs,\n });\n },\n });\n\n return {\n stream: stream.pipeThrough(transformStream),\n ...rest,\n };\n } catch (error) {\n cleanupOnError(root, pendingToolSpans, error);\n throw error;\n }\n },\n };\n}\n"],"mappings":";;;AAWA,SAAgB,wBACd,QACA,OACA,MACiB;AACjB,kCAAyB;EACvB;EACA,YAAY,sBAAsB,QAAQ,KAAK;EAChD,CAAC;;AAYJ,SAAS,uBAAuB,SAAgC;AAC9D,QAAO,QACJ,QAAQ,MAAM,EAAE,SAAS,UAAU,OAAO,EAAE,SAAS,SAAS,CAC9D,KAAK,MAAM,EAAE,KAAK,CAClB,KAAK,GAAG;;AAGb,SAAS,cACP,QACA,WAAW,OACF;CACT,IAAI,OAAO,KAAK,UAAU,OAAO;AACjC,KAAI,IAAI,aAAa,CAAC,OAAO,KAAK,CAAC,UAAU,SAC3C,QAAO;CAGT,MAAM,MAAM,CAAC,GAAG,OAAO;AACvB,QAAO,IAAI,SAAS,GAAG;AACrB,MAAI,OAAO;AACX,SAAO,KAAK,UAAU,IAAI;AAC1B,MAAI,IAAI,aAAa,CAAC,OAAO,KAAK,CAAC,UAAU,SAC3C,QAAO;;AAGX,QAAO;;AAGT,SAAS,mBAAmB,QAAiC;CAC3D,MAAM,OAAO;EACX;EACA;EACA;EACA;EACA;EACA;EACA;EACD;CAED,MAAM,SAAkC,EAAE;AAC1C,MAAK,MAAM,OAAO,KAChB,KAAI,OAAO,SAAS,OAClB,QAAO,OAAO,OAAO;AAGzB,QAAO,OAAO,KAAK,OAAO,CAAC,SAAS,IAAI,SAAS;;AAGnD,SAAS,kBAAkB,QAA0B;AACnD,KAAI,UAAU,OAAO,WAAW,YAAY,UAAU,QAAQ;EAC5D,MAAM,IAAI;AACV,MAAI,EAAE,SAAS,UAAU,EAAE,SAAS,OAAQ,QAAO,EAAE;AACrD,MAAI,EAAE,SAAS,SAAU,QAAO,YAAY,EAAE,UAAU;;AAE1D,QAAO;;AAGT,SAAS,mBACP,QACA,kBACM;AACN,KAAI,iBAAiB,SAAS,EAAG;AAEjC,MAAK,MAAM,OAAO,QAAQ;AACxB,MAAI,IAAI,SAAS,OAAQ;AACzB,OAAK,MAAM,QAAQ,IAAI,SAAS;AAC9B,OAAI,KAAK,SAAS,cAAe;GACjC,MAAM,OAAO,iBAAiB,IAAI,KAAK,WAAW;AAClD,OAAI,MAAM;AACR,SAAK,IAAI;KACP,QAAQ,kBAAkB,KAAK,OAAO;KACtC,QAAQ;KACT,CAAC;AACF,qBAAiB,OAAO,KAAK,WAAW;;;;;AAUhD,SAAS,iBACP,QACA,MACA,SACA,UACA,OACA,UACY;CACZ,MAAM,gBAAgB,MAAM;AAE5B,KAAI,cASF,QAAO;EAAE,YAAY;EAAM,SAAS;EAAe,QARpC,OAAO,iBAAiB,eAAe;GACpD,MAAM,MAAM,QAAQ;GACpB,MAAM;GACN;GACA,OAAO;GACP;GACA;GACD,CAAC;EACyD;CAG7D,MAAM,SAAS,OAAO,WAAW;EAC/B,MAAM,MAAM,QAAQ;EACpB,OAAO;EACP;EACA;EACA;EACA,MAAM,MAAM;EACZ,WAAW,MAAM;EAClB,CAAC;AACF,QAAO;EAAE,YAAY;EAAO,SAAS,OAAO;EAAS;EAAQ;;AAG/D,SAAS,eACP,QACA,MACA,UACY;AACZ,KAAI,KAAK,WACP,QAAO,OAAO,iBAAiB,KAAK,SAAS;EAC3C,MAAM,SAAS;EACf,MAAM;EACN,OAAO,SAAS;EAChB,cAAc,KAAK,OAAO;EAC3B,CAAC;AAEJ,QAAO,KAAK,OAAO,UAAU;EAC3B,MAAM,SAAS;EACf,MAAM;EACN,OAAO,SAAS;EACjB,CAAC;;AAGJ,SAAS,cACP,MACA,QAMM;AACN,MAAK,OAAO,IAAI;EACd,QAAQ,OAAO;EACf,QAAQ;EACR,aAAa,OAAO,cAAc,OAAO;EACzC,cAAc,OAAO;EACrB,kBAAkB,OAAO;EACzB,QAAQ,OAAO;EAChB,CAAC;;AAGJ,SAAS,eACP,MACA,kBACA,OACM;AACN,MAAK,MAAM,QAAQ,iBAAiB,QAAQ,CAC1C,MAAK,IAAI,EAAE,QAAQ,SAAS,CAAC;AAE/B,kBAAiB,OAAO;AAExB,MAAK,OAAO,IAAI;EACd,QAAQ;EACR,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;EAC9D,CAAC;;AAGJ,SAAS,cACP,MACA,QACqC;CACrC,MAAM,cAAc,mBAAmB,OAAO;AAC9C,KAAI,YAAa,QAAO;EAAE,GAAG,MAAM;EAAU;EAAa;AAC1D,QAAO,MAAM;;AAGf,SAAS,sBACP,QACA,MAC2B;CAC3B,MAAM,mCAAmB,IAAI,KAAyB;AAEtD,QAAO;EACL,sBAAsB;EAEtB,cAAc,OAAO,EAAE,YAAY,QAAQ,YAAY;GACrD,MAAM,QAAQ,cAAc,OAAO,OAAO;GAC1C,MAAM,WAAW,cAAc,MAAM,OAAO;AAE5C,sBAAmB,OAAO,QAAQ,iBAAiB;GAEnD,MAAM,OAAO,iBACX,QACA,MACA,MAAM,SACN,MAAM,UACN,OACA,SACD;AAED,OAAI;IACF,MAAM,SAAS,MAAM,YAAY;IACjC,MAAM,UAAU,OAAO;IACvB,MAAM,SAAS,uBAAuB,QAAQ;IAC9C,MAAM,cAAc,OAAO,MAAM,YAAY,SAAS;IACtD,MAAM,eAAe,OAAO,MAAM,aAAa,SAAS;AAExD,SAAK,MAAM,QAAQ,QACjB,KAAI,KAAK,SAAS,eAAe,KAAK,YAAY;KAChD,MAAM,OAAO,eAAe,QAAQ,MAAM;MACxC,IAAI,KAAK;MACT,UAAU,KAAK,YAAY;MAC3B,OAAO,KAAK,SAAS;MACtB,CAAC;AACF,SAAI,KAAK,WACP,kBAAiB,IAAI,KAAK,YAAY,KAAK;SAE3C,MAAK,IAAI,EAAE,QAAQ,aAAa,CAAC;;AAKvC,kBAAc,MAAM;KAAE;KAAQ;KAAa;KAAc,CAAC;AAC1D,WAAO;YACA,OAAO;AACd,mBAAe,MAAM,kBAAkB,MAAM;AAC7C,UAAM;;;EAIV,YAAY,OAAO,EAAE,UAAU,QAAQ,YAAY;GACjD,MAAM,QAAQ,cAAc,OAAO,OAAO;GAC1C,MAAM,WAAW,cAAc,MAAM,OAAO;AAE5C,sBAAmB,OAAO,QAAQ,iBAAiB;GAEnD,MAAM,OAAO,iBACX,QACA,MACA,MAAM,SACN,MAAM,UACN,OACA,SACD;AAED,OAAI;IACF,MAAM,cAAc,KAAK,KAAK;IAC9B,MAAM,EAAE,QAAQ,GAAG,SAAS,MAAM,UAAU;IAE5C,IAAI,SAAS;IACb,IAAI,cAAc;IAClB,IAAI,eAAe;IACnB,IAAI;IACJ,IAAI,aAAa;IACjB,MAAM,YAID,EAAE;IACP,IAAI,mBAIO;IAEX,MAAM,kBAAkB,IAAI,gBAAgB;KAC1C,UAAU,OAAO,YAAY;AAC3B,cAAQ,MAAM,MAAd;OACE,KAAK;AACH,YAAI,YAAY;AACd,kBAAS,KAAK,KAAK,GAAG;AACtB,sBAAa;;AAEf,kBAAU,MAAM;AAChB;OACF,KAAK;AACH,2BAAmB;SACjB,IAAI,MAAM;SACV,UAAU,MAAM;SAChB,OAAO;SACR;AACD;OACF,KAAK;AACH,YAAI,oBAAoB,iBAAiB,OAAO,MAAM,GACpD,kBAAiB,SAAS,MAAM;AAElC;OACF,KAAK;AACH,YAAI,oBAAoB,iBAAiB,OAAO,MAAM,IAAI;AACxD,mBAAU,KAAK,iBAAiB;AAChC,4BAAmB;;AAErB;OACF,KAAK;AACH,sBAAc,MAAM,MAAM,YAAY,SAAS;AAC/C,uBAAe,MAAM,MAAM,aAAa,SAAS;AACjD;;AAEJ,iBAAW,QAAQ,MAAM;;KAE3B,QAAQ;AACN,WAAK,MAAM,MAAM,WAAW;OAC1B,MAAM,OAAO,eAAe,QAAQ,MAAM,GAAG;AAC7C,WAAI,KAAK,WACP,kBAAiB,IAAI,GAAG,IAAI,KAAK;WAEjC,MAAK,IAAI,EAAE,QAAQ,aAAa,CAAC;;AAIrC,oBAAc,MAAM;OAClB;OACA;OACA;OACA;OACD,CAAC;;KAEL,CAAC;AAEF,WAAO;KACL,QAAQ,OAAO,YAAY,gBAAgB;KAC3C,GAAG;KACJ;YACM,OAAO;AACd,mBAAe,MAAM,kBAAkB,MAAM;AAC7C,UAAM;;;EAGX"}
|
package/package.json
ADDED
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@aui-x/prism",
|
|
3
|
+
"version": "0.0.1",
|
|
4
|
+
"description": "LLM tracing SDK for auix — AI SDK middleware + manual tracing",
|
|
5
|
+
"sideEffects": false,
|
|
6
|
+
"type": "module",
|
|
7
|
+
"exports": {
|
|
8
|
+
".": {
|
|
9
|
+
"import": {
|
|
10
|
+
"types": "./dist/index.d.ts",
|
|
11
|
+
"default": "./dist/index.js"
|
|
12
|
+
},
|
|
13
|
+
"require": {
|
|
14
|
+
"types": "./dist/index.d.cts",
|
|
15
|
+
"default": "./dist/index.cjs"
|
|
16
|
+
}
|
|
17
|
+
},
|
|
18
|
+
"./core": {
|
|
19
|
+
"import": {
|
|
20
|
+
"types": "./dist/core.d.ts",
|
|
21
|
+
"default": "./dist/core.js"
|
|
22
|
+
},
|
|
23
|
+
"require": {
|
|
24
|
+
"types": "./dist/core.d.cts",
|
|
25
|
+
"default": "./dist/core.cjs"
|
|
26
|
+
}
|
|
27
|
+
},
|
|
28
|
+
"./integrations/ai-sdk": {
|
|
29
|
+
"import": {
|
|
30
|
+
"types": "./dist/integrations/ai-sdk.d.ts",
|
|
31
|
+
"default": "./dist/integrations/ai-sdk.js"
|
|
32
|
+
},
|
|
33
|
+
"require": {
|
|
34
|
+
"types": "./dist/integrations/ai-sdk.d.cts",
|
|
35
|
+
"default": "./dist/integrations/ai-sdk.cjs"
|
|
36
|
+
}
|
|
37
|
+
},
|
|
38
|
+
"./integrations/openai": {
|
|
39
|
+
"import": {
|
|
40
|
+
"types": "./dist/integrations/openai.d.ts",
|
|
41
|
+
"default": "./dist/integrations/openai.js"
|
|
42
|
+
},
|
|
43
|
+
"require": {
|
|
44
|
+
"types": "./dist/integrations/openai.d.cts",
|
|
45
|
+
"default": "./dist/integrations/openai.cjs"
|
|
46
|
+
}
|
|
47
|
+
},
|
|
48
|
+
"./integrations/anthropic": {
|
|
49
|
+
"import": {
|
|
50
|
+
"types": "./dist/integrations/anthropic.d.ts",
|
|
51
|
+
"default": "./dist/integrations/anthropic.js"
|
|
52
|
+
},
|
|
53
|
+
"require": {
|
|
54
|
+
"types": "./dist/integrations/anthropic.d.cts",
|
|
55
|
+
"default": "./dist/integrations/anthropic.cjs"
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
},
|
|
59
|
+
"main": "./dist/index.cjs",
|
|
60
|
+
"module": "./dist/index.js",
|
|
61
|
+
"types": "./dist/index.d.ts",
|
|
62
|
+
"files": [
|
|
63
|
+
"dist",
|
|
64
|
+
"LICENSE",
|
|
65
|
+
"README.md"
|
|
66
|
+
],
|
|
67
|
+
"scripts": {
|
|
68
|
+
"build": "tsdown",
|
|
69
|
+
"ts-check": "tsc --noEmit"
|
|
70
|
+
},
|
|
71
|
+
"license": "SEE LICENSE IN LICENSE",
|
|
72
|
+
"homepage": "https://auix.dev",
|
|
73
|
+
"keywords": [
|
|
74
|
+
"ai",
|
|
75
|
+
"tracing",
|
|
76
|
+
"observability",
|
|
77
|
+
"ai-sdk",
|
|
78
|
+
"llm"
|
|
79
|
+
],
|
|
80
|
+
"peerDependencies": {
|
|
81
|
+
"@ai-sdk/provider": ">=2.0.0",
|
|
82
|
+
"ai": ">=6.0.0",
|
|
83
|
+
"openai": ">=4.0.0",
|
|
84
|
+
"@anthropic-ai/sdk": ">=0.30.0"
|
|
85
|
+
},
|
|
86
|
+
"peerDependenciesMeta": {
|
|
87
|
+
"@ai-sdk/provider": {
|
|
88
|
+
"optional": true
|
|
89
|
+
},
|
|
90
|
+
"openai": {
|
|
91
|
+
"optional": true
|
|
92
|
+
},
|
|
93
|
+
"@anthropic-ai/sdk": {
|
|
94
|
+
"optional": true
|
|
95
|
+
}
|
|
96
|
+
},
|
|
97
|
+
"devDependencies": {
|
|
98
|
+
"@ai-sdk/provider": "catalog:",
|
|
99
|
+
"@auix/tsconfig": "workspace:*",
|
|
100
|
+
"ai": "catalog:",
|
|
101
|
+
"tsdown": "catalog:"
|
|
102
|
+
}
|
|
103
|
+
}
|