@pentatonic-ai/ai-agent-sdk 0.3.0-beta.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/wrapper.js ADDED
@@ -0,0 +1,329 @@
1
+ import { Session } from "./session.js";
2
+ import { normalizeResponse } from "./normalizer.js";
3
+ import { rewriteUrls } from "./tracking.js";
4
+
5
+ /**
6
+ * Detect the client type by duck-typing its shape.
7
+ */
8
+ function detectClientType(client) {
9
+ if (client?.chat?.completions?.create) return "openai";
10
+ if (client?.messages?.create) return "anthropic";
11
+ if (typeof client?.run === "function") return "workers-ai";
12
+ return "unknown";
13
+ }
14
+
15
+ /**
16
+ * Wrap any supported LLM client with automatic usage tracking.
17
+ * Auto-detects OpenAI, Anthropic, and Workers AI clients.
18
+ */
19
+ export function wrapClient(clientConfig, client, sessionOpts = {}) {
20
+ // Resolve sessionId once so all calls share the same session
21
+ sessionOpts._resolvedSessionId =
22
+ sessionOpts.sessionId || crypto.randomUUID();
23
+
24
+ // Shared session accumulates usage and tool calls across rounds
25
+ sessionOpts._session = new Session(clientConfig, {
26
+ sessionId: sessionOpts._resolvedSessionId,
27
+ metadata: sessionOpts.metadata,
28
+ });
29
+
30
+ const type = detectClientType(client);
31
+
32
+ if (type === "openai") return wrapOpenAI(clientConfig, client, sessionOpts);
33
+ if (type === "anthropic")
34
+ return wrapAnthropic(clientConfig, client, sessionOpts);
35
+ if (type === "workers-ai")
36
+ return wrapWorkersAI(clientConfig, client, sessionOpts);
37
+
38
+ throw new Error(
39
+ "Unsupported client: expected OpenAI (chat.completions.create), " +
40
+ "Anthropic (messages.create), or Workers AI (run) client"
41
+ );
42
+ }
43
+
44
+ // --- OpenAI ---
45
+
46
+ function wrapOpenAI(clientConfig, client, sessionOpts) {
47
+ return new Proxy(client, {
48
+ get(target, prop) {
49
+ if (prop === "chat")
50
+ return wrapOpenAIChat(clientConfig, target.chat, target, sessionOpts);
51
+ if (prop === "sessionId") return sessionOpts._resolvedSessionId;
52
+ if (prop === "tesSession") return sessionOpts._session;
53
+ if (prop === "session")
54
+ return (opts) => new OpenAISession(clientConfig, target, opts);
55
+ return target[prop];
56
+ },
57
+ });
58
+ }
59
+
60
+ function wrapOpenAIChat(clientConfig, chat, client, sessionOpts) {
61
+ return new Proxy(chat, {
62
+ get(target, prop) {
63
+ if (prop === "completions")
64
+ return wrapOpenAICompletions(
65
+ clientConfig,
66
+ target.completions,
67
+ client,
68
+ sessionOpts
69
+ );
70
+ return target[prop];
71
+ },
72
+ });
73
+ }
74
+
75
+ function wrapOpenAICompletions(clientConfig, completions, client, sessionOpts) {
76
+ return new Proxy(completions, {
77
+ get(target, prop) {
78
+ if (prop === "create") {
79
+ return async (params) => {
80
+ const result = await target.create(params);
81
+ const content = result.choices?.[0]?.message?.content;
82
+ if (content) {
83
+ result.choices[0].message.content = await rewriteUrls(
84
+ content,
85
+ clientConfig,
86
+ sessionOpts._resolvedSessionId,
87
+ sessionOpts.metadata
88
+ );
89
+ }
90
+ fireAndForgetEmit(
91
+ clientConfig,
92
+ sessionOpts,
93
+ params.messages,
94
+ result
95
+ );
96
+ return result;
97
+ };
98
+ }
99
+ return target[prop];
100
+ },
101
+ });
102
+ }
103
+
104
+ class OpenAISession extends Session {
105
+ constructor(clientConfig, client, opts) {
106
+ super(clientConfig, opts);
107
+ this._client = client;
108
+ }
109
+
110
+ async chat(params) {
111
+ const result = await this._client.chat.completions.create(params);
112
+ this.record(result);
113
+ return result;
114
+ }
115
+ }
116
+
117
+ // --- Anthropic ---
118
+
119
+ function wrapAnthropic(clientConfig, client, sessionOpts) {
120
+ return new Proxy(client, {
121
+ get(target, prop) {
122
+ if (prop === "messages")
123
+ return wrapAnthropicMessages(
124
+ clientConfig,
125
+ target.messages,
126
+ target,
127
+ sessionOpts
128
+ );
129
+ if (prop === "sessionId") return sessionOpts._resolvedSessionId;
130
+ if (prop === "tesSession") return sessionOpts._session;
131
+ if (prop === "session")
132
+ return (opts) => new AnthropicSession(clientConfig, target, opts);
133
+ return target[prop];
134
+ },
135
+ });
136
+ }
137
+
138
+ function wrapAnthropicMessages(clientConfig, messages, client, sessionOpts) {
139
+ return new Proxy(messages, {
140
+ get(target, prop) {
141
+ if (prop === "create") {
142
+ return async (params) => {
143
+ const result = await target.create(params);
144
+ if (Array.isArray(result.content)) {
145
+ for (const block of result.content) {
146
+ if (block.type === "text" && block.text) {
147
+ block.text = await rewriteUrls(
148
+ block.text,
149
+ clientConfig,
150
+ sessionOpts._resolvedSessionId,
151
+ sessionOpts.metadata
152
+ );
153
+ }
154
+ }
155
+ }
156
+ fireAndForgetEmit(
157
+ clientConfig,
158
+ sessionOpts,
159
+ params.messages,
160
+ result
161
+ );
162
+ return result;
163
+ };
164
+ }
165
+ return target[prop];
166
+ },
167
+ });
168
+ }
169
+
170
+ class AnthropicSession extends Session {
171
+ constructor(clientConfig, client, opts) {
172
+ super(clientConfig, opts);
173
+ this._client = client;
174
+ }
175
+
176
+ async chat(params) {
177
+ const result = await this._client.messages.create(params);
178
+ this.record(result);
179
+ return result;
180
+ }
181
+ }
182
+
183
+ // --- Workers AI ---
184
+
185
+ function wrapWorkersAI(clientConfig, aiBinding, sessionOpts) {
186
+ return new Proxy(aiBinding, {
187
+ get(target, prop) {
188
+ if (prop === "run") {
189
+ return async (model, params, ...rest) => {
190
+ const result = await target.run(model, params, ...rest);
191
+ if (result.response) {
192
+ result.response = await rewriteUrls(
193
+ result.response,
194
+ clientConfig,
195
+ sessionOpts._resolvedSessionId,
196
+ sessionOpts.metadata
197
+ );
198
+ }
199
+ fireAndForgetEmit(
200
+ clientConfig,
201
+ sessionOpts,
202
+ params?.messages,
203
+ result,
204
+ model
205
+ );
206
+ return result;
207
+ };
208
+ }
209
+ if (prop === "sessionId") return sessionOpts._resolvedSessionId;
210
+ if (prop === "tesSession") return sessionOpts._session;
211
+ if (prop === "session")
212
+ return (opts) => new WorkersAISession(clientConfig, target, opts);
213
+ return target[prop];
214
+ },
215
+ });
216
+ }
217
+
218
+ class WorkersAISession extends Session {
219
+ constructor(clientConfig, aiBinding, opts) {
220
+ super(clientConfig, opts);
221
+ this._ai = aiBinding;
222
+ }
223
+
224
+ async chat(model, params, ...rest) {
225
+ const result = await this._ai.run(model, params, ...rest);
226
+ this.record(result);
227
+ return result;
228
+ }
229
+ }
230
+
231
+ // --- Shared ---
232
+
233
+ /**
234
+ * Extract tool results from the messages array and attach them to recorded
235
+ * tool calls in the session. Messages contain {role:"tool", content, tool_call_id}
236
+ * entries after the app executes tools and feeds results back to the AI.
237
+ */
238
+ function extractToolResults(session, messages) {
239
+ if (!messages?.length || !session._toolCalls.length) return;
240
+
241
+ // Build map: tool_call_id -> tool name from assistant messages
242
+ const idToName = new Map();
243
+ for (const msg of messages) {
244
+ if (msg.role === "assistant" && msg.tool_calls) {
245
+ for (const tc of msg.tool_calls) {
246
+ const id = tc.id || tc.tool_call_id;
247
+ const name = tc.function?.name || tc.name;
248
+ if (id && name) idToName.set(id, name);
249
+ }
250
+ }
251
+ }
252
+
253
+ // Attach results to session tool calls
254
+ for (const msg of messages) {
255
+ if (msg.role !== "tool" || !msg.content) continue;
256
+
257
+ const callId = msg.tool_call_id;
258
+ const toolName = callId ? idToName.get(callId) : null;
259
+
260
+ // Find matching tool call in session (by name, without a result yet)
261
+ for (const tc of session._toolCalls) {
262
+ if (tc.result) continue; // already has a result
263
+ if (toolName && tc.tool !== toolName) continue;
264
+
265
+ // Parse JSON content if possible, otherwise store as string
266
+ try {
267
+ const parsed = JSON.parse(msg.content);
268
+ // Summarise arrays (e.g. product lists) to avoid bloating the event
269
+ if (Array.isArray(parsed)) {
270
+ tc.result = { count: parsed.length, sample: parsed.slice(0, 3) };
271
+ } else {
272
+ tc.result = parsed;
273
+ }
274
+ } catch {
275
+ tc.result = msg.content;
276
+ }
277
+ break;
278
+ }
279
+ }
280
+ }
281
+
282
+ function fireAndForgetEmit(clientConfig, sessionOpts, messages, result, model) {
283
+ const session = sessionOpts._session;
284
+ const normalized = session.record(result);
285
+
286
+ // Extract tool results from the messages array.
287
+ extractToolResults(session, messages);
288
+
289
+ // Capture system prompt from messages (first system message, only once)
290
+ if (!session._systemPrompt && messages?.length) {
291
+ const systemMsg = messages.find((m) => m.role === "system");
292
+ if (systemMsg?.content) {
293
+ session._systemPrompt = systemMsg.content;
294
+ }
295
+ }
296
+
297
+ // If Workers AI didn't include model in the response, use the one passed to run()
298
+ if (model && !normalized.model) {
299
+ session._model = model;
300
+ }
301
+
302
+ // When autoEmit is disabled, the caller controls event emission.
303
+ // The wrapper still tracks usage/tool calls via session.record() above.
304
+ if (sessionOpts.autoEmit === false) {
305
+ return;
306
+ }
307
+
308
+ // Accumulate tool-call rounds without emitting — only emit when there's
309
+ // actual text content (the final response in a multi-round tool loop).
310
+ // This ensures a single CHAT_TURN event per conversation turn with all
311
+ // accumulated tool calls and usage included.
312
+ if (!normalized.content && normalized.toolCalls.length > 0) {
313
+ return;
314
+ }
315
+
316
+ const userMsg =
317
+ messages?.filter?.((m) => m.role === "user")?.pop()?.content || "";
318
+ const assistantMsg = normalized.content || "";
319
+
320
+ const emitPromise = session
321
+ .emitChatTurn({ userMessage: userMsg, assistantResponse: assistantMsg })
322
+ .catch((err) => console.error("[pentatonic-ai] emit failed:", err.message));
323
+
324
+ // On Cloudflare Workers, the runtime terminates after the response is sent.
325
+ // waitUntil keeps the Worker alive for background work like event emission.
326
+ if (typeof sessionOpts.waitUntil === "function") {
327
+ sessionOpts.waitUntil(emitPromise);
328
+ }
329
+ }