@posthog/ai 5.2.3 → 6.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/anthropic/index.cjs +37 -5
- package/dist/anthropic/index.cjs.map +1 -1
- package/dist/anthropic/index.mjs +37 -5
- package/dist/anthropic/index.mjs.map +1 -1
- package/dist/gemini/index.cjs +67 -25
- package/dist/gemini/index.cjs.map +1 -1
- package/dist/gemini/index.d.ts +0 -1
- package/dist/gemini/index.mjs +67 -25
- package/dist/gemini/index.mjs.map +1 -1
- package/dist/index.cjs +384 -165
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +3 -3
- package/dist/index.mjs +385 -166
- package/dist/index.mjs.map +1 -1
- package/dist/langchain/index.cjs +29 -9
- package/dist/langchain/index.cjs.map +1 -1
- package/dist/langchain/index.d.ts +1 -0
- package/dist/langchain/index.mjs +29 -9
- package/dist/langchain/index.mjs.map +1 -1
- package/dist/openai/index.cjs +106 -5
- package/dist/openai/index.cjs.map +1 -1
- package/dist/openai/index.mjs +106 -5
- package/dist/openai/index.mjs.map +1 -1
- package/dist/vercel/index.cjs +173 -122
- package/dist/vercel/index.cjs.map +1 -1
- package/dist/vercel/index.d.ts +2 -2
- package/dist/vercel/index.mjs +174 -123
- package/dist/vercel/index.mjs.map +1 -1
- package/package.json +4 -3
package/dist/vercel/index.cjs
CHANGED
|
@@ -36,6 +36,20 @@ const truncate = str => {
|
|
|
36
36
|
return str;
|
|
37
37
|
}
|
|
38
38
|
};
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* Extract available tool calls from the request parameters.
|
|
42
|
+
* These are the tools provided to the LLM, not the tool calls in the response.
|
|
43
|
+
*/
|
|
44
|
+
const extractAvailableToolCalls = (provider, params) => {
|
|
45
|
+
{
|
|
46
|
+
// Vercel AI SDK stores tools in params.mode.tools when mode type is 'regular'
|
|
47
|
+
if (params.mode?.type === 'regular' && params.mode.tools) {
|
|
48
|
+
return params.mode.tools;
|
|
49
|
+
}
|
|
50
|
+
return null;
|
|
51
|
+
}
|
|
52
|
+
};
|
|
39
53
|
function sanitizeValues(obj) {
|
|
40
54
|
if (obj === undefined || obj === null) {
|
|
41
55
|
return obj;
|
|
@@ -143,7 +157,7 @@ const sendEventToPosthog = async ({
|
|
|
143
157
|
const mapVercelParams = params => {
|
|
144
158
|
return {
|
|
145
159
|
temperature: params.temperature,
|
|
146
|
-
|
|
160
|
+
max_output_tokens: params.maxOutputTokens,
|
|
147
161
|
top_p: params.topP,
|
|
148
162
|
frequency_penalty: params.frequencyPenalty,
|
|
149
163
|
presence_penalty: params.presencePenalty,
|
|
@@ -151,79 +165,68 @@ const mapVercelParams = params => {
|
|
|
151
165
|
stream: params.stream
|
|
152
166
|
};
|
|
153
167
|
};
|
|
154
|
-
const mapVercelPrompt =
|
|
155
|
-
// normalize single inputs into an array of messages
|
|
156
|
-
let promptsArray;
|
|
157
|
-
if (typeof prompt === 'string') {
|
|
158
|
-
promptsArray = [{
|
|
159
|
-
role: 'user',
|
|
160
|
-
content: prompt
|
|
161
|
-
}];
|
|
162
|
-
} else if (!Array.isArray(prompt)) {
|
|
163
|
-
promptsArray = [prompt];
|
|
164
|
-
} else {
|
|
165
|
-
promptsArray = prompt;
|
|
166
|
-
}
|
|
167
|
-
|
|
168
|
+
const mapVercelPrompt = messages => {
|
|
168
169
|
// Map and truncate individual content
|
|
169
|
-
const inputs =
|
|
170
|
-
let content
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
content: {
|
|
170
|
+
const inputs = messages.map(message => {
|
|
171
|
+
let content;
|
|
172
|
+
|
|
173
|
+
// Handle system role which has string content
|
|
174
|
+
if (message.role === 'system') {
|
|
175
|
+
content = [{
|
|
176
|
+
type: 'text',
|
|
177
|
+
text: truncate(String(message.content))
|
|
178
|
+
}];
|
|
179
|
+
} else {
|
|
180
|
+
// Handle other roles which have array content
|
|
181
|
+
if (Array.isArray(message.content)) {
|
|
182
|
+
content = message.content.map(c => {
|
|
183
|
+
if (c.type === 'text') {
|
|
184
|
+
return {
|
|
185
|
+
type: 'text',
|
|
186
|
+
text: truncate(c.text)
|
|
187
|
+
};
|
|
188
|
+
} else if (c.type === 'file') {
|
|
189
|
+
return {
|
|
190
|
+
type: 'file',
|
|
191
191
|
file: c.data instanceof URL ? c.data.toString() : 'raw files not supported',
|
|
192
|
-
|
|
193
|
-
}
|
|
194
|
-
}
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
192
|
+
mediaType: c.mediaType
|
|
193
|
+
};
|
|
194
|
+
} else if (c.type === 'reasoning') {
|
|
195
|
+
return {
|
|
196
|
+
type: 'reasoning',
|
|
197
|
+
text: truncate(c.reasoning)
|
|
198
|
+
};
|
|
199
|
+
} else if (c.type === 'tool-call') {
|
|
200
|
+
return {
|
|
201
|
+
type: 'tool-call',
|
|
199
202
|
toolCallId: c.toolCallId,
|
|
200
203
|
toolName: c.toolName,
|
|
201
|
-
|
|
202
|
-
}
|
|
203
|
-
}
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
type: 'tool-result',
|
|
207
|
-
content: {
|
|
204
|
+
input: c.input
|
|
205
|
+
};
|
|
206
|
+
} else if (c.type === 'tool-result') {
|
|
207
|
+
return {
|
|
208
|
+
type: 'tool-result',
|
|
208
209
|
toolCallId: c.toolCallId,
|
|
209
210
|
toolName: c.toolName,
|
|
210
|
-
|
|
211
|
+
output: c.output,
|
|
211
212
|
isError: c.isError
|
|
212
|
-
}
|
|
213
|
+
};
|
|
214
|
+
}
|
|
215
|
+
return {
|
|
216
|
+
type: 'text',
|
|
217
|
+
text: ''
|
|
213
218
|
};
|
|
214
|
-
}
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
text: truncate(p.content)
|
|
223
|
-
};
|
|
219
|
+
});
|
|
220
|
+
} else {
|
|
221
|
+
// Fallback for non-array content
|
|
222
|
+
content = [{
|
|
223
|
+
type: 'text',
|
|
224
|
+
text: truncate(String(message.content))
|
|
225
|
+
}];
|
|
226
|
+
}
|
|
224
227
|
}
|
|
225
228
|
return {
|
|
226
|
-
role:
|
|
229
|
+
role: message.role,
|
|
227
230
|
content
|
|
228
231
|
};
|
|
229
232
|
});
|
|
@@ -255,52 +258,75 @@ const mapVercelPrompt = prompt => {
|
|
|
255
258
|
return inputs;
|
|
256
259
|
};
|
|
257
260
|
const mapVercelOutput = result => {
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
}
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
}
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
261
|
+
const content = result.map(item => {
|
|
262
|
+
if (item.type === 'text') {
|
|
263
|
+
return {
|
|
264
|
+
type: 'text',
|
|
265
|
+
text: truncate(item.text)
|
|
266
|
+
};
|
|
267
|
+
}
|
|
268
|
+
if (item.type === 'tool-call') {
|
|
269
|
+
return {
|
|
270
|
+
type: 'tool-call',
|
|
271
|
+
id: item.toolCallId,
|
|
272
|
+
function: {
|
|
273
|
+
name: item.toolName,
|
|
274
|
+
arguments: item.args || JSON.stringify(item.arguments || {})
|
|
275
|
+
}
|
|
276
|
+
};
|
|
277
|
+
}
|
|
278
|
+
if (item.type === 'reasoning') {
|
|
279
|
+
return {
|
|
280
|
+
type: 'reasoning',
|
|
281
|
+
text: truncate(item.text)
|
|
282
|
+
};
|
|
283
|
+
}
|
|
284
|
+
if (item.type === 'file') {
|
|
285
|
+
// Handle files similar to input mapping - avoid large base64 data
|
|
286
|
+
let fileData;
|
|
287
|
+
if (item.data instanceof URL) {
|
|
288
|
+
fileData = item.data.toString();
|
|
289
|
+
} else if (typeof item.data === 'string') {
|
|
290
|
+
// Check if it's base64 data and potentially large
|
|
291
|
+
if (item.data.startsWith('data:') || item.data.length > 1000) {
|
|
292
|
+
fileData = `[${item.mediaType} file - ${item.data.length} bytes]`;
|
|
293
|
+
} else {
|
|
294
|
+
fileData = item.data;
|
|
295
|
+
}
|
|
296
|
+
} else {
|
|
297
|
+
fileData = `[binary ${item.mediaType} file]`;
|
|
298
|
+
}
|
|
299
|
+
return {
|
|
300
|
+
type: 'file',
|
|
301
|
+
name: 'generated_file',
|
|
302
|
+
mediaType: item.mediaType,
|
|
303
|
+
data: fileData
|
|
304
|
+
};
|
|
305
|
+
}
|
|
306
|
+
if (item.type === 'source') {
|
|
307
|
+
return {
|
|
308
|
+
type: 'source',
|
|
309
|
+
sourceType: item.sourceType,
|
|
310
|
+
id: item.id,
|
|
311
|
+
url: item.url || '',
|
|
312
|
+
title: item.title || ''
|
|
313
|
+
};
|
|
314
|
+
}
|
|
315
|
+
// Fallback for unknown types - try to extract text if possible
|
|
316
|
+
return {
|
|
317
|
+
type: 'text',
|
|
318
|
+
text: truncate(JSON.stringify(item))
|
|
319
|
+
};
|
|
320
|
+
});
|
|
321
|
+
if (content.length > 0) {
|
|
296
322
|
return [{
|
|
297
|
-
|
|
298
|
-
|
|
323
|
+
role: 'assistant',
|
|
324
|
+
content: content.length === 1 && content[0].type === 'text' ? content[0].text : content
|
|
299
325
|
}];
|
|
300
326
|
}
|
|
301
327
|
// otherwise stringify and truncate
|
|
302
328
|
try {
|
|
303
|
-
const jsonOutput = JSON.stringify(
|
|
329
|
+
const jsonOutput = JSON.stringify(result);
|
|
304
330
|
return [{
|
|
305
331
|
content: truncate(jsonOutput),
|
|
306
332
|
role: 'assistant'
|
|
@@ -326,14 +352,14 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
326
352
|
...options,
|
|
327
353
|
...mapVercelParams(params)
|
|
328
354
|
};
|
|
355
|
+
const availableTools = extractAvailableToolCalls('vercel', params);
|
|
329
356
|
try {
|
|
330
357
|
const result = await doGenerate();
|
|
331
|
-
const latency = (Date.now() - startTime) / 1000;
|
|
332
358
|
const modelId = options.posthogModelOverride ?? (result.response?.modelId ? result.response.modelId : model.modelId);
|
|
333
359
|
const provider = options.posthogProviderOverride ?? extractProvider(model);
|
|
334
360
|
const baseURL = ''; // cannot currently get baseURL from vercel
|
|
335
|
-
const content = mapVercelOutput(result);
|
|
336
|
-
|
|
361
|
+
const content = mapVercelOutput(result.content);
|
|
362
|
+
const latency = (Date.now() - startTime) / 1000;
|
|
337
363
|
const providerMetadata = result.providerMetadata;
|
|
338
364
|
const additionalTokenValues = {
|
|
339
365
|
...(providerMetadata?.openai?.reasoningTokens ? {
|
|
@@ -354,19 +380,17 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
354
380
|
model: modelId,
|
|
355
381
|
provider: provider,
|
|
356
382
|
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
357
|
-
output:
|
|
358
|
-
content,
|
|
359
|
-
role: 'assistant'
|
|
360
|
-
}],
|
|
383
|
+
output: content,
|
|
361
384
|
latency,
|
|
362
385
|
baseURL,
|
|
363
386
|
params: mergedParams,
|
|
364
387
|
httpStatus: 200,
|
|
365
388
|
usage: {
|
|
366
|
-
inputTokens: result.usage.
|
|
367
|
-
outputTokens: result.usage.
|
|
389
|
+
inputTokens: result.usage.inputTokens,
|
|
390
|
+
outputTokens: result.usage.outputTokens,
|
|
368
391
|
...additionalTokenValues
|
|
369
392
|
},
|
|
393
|
+
tools: availableTools,
|
|
370
394
|
captureImmediate: options.posthogCaptureImmediate
|
|
371
395
|
});
|
|
372
396
|
return result;
|
|
@@ -390,6 +414,7 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
390
414
|
},
|
|
391
415
|
isError: true,
|
|
392
416
|
error: truncate(JSON.stringify(error)),
|
|
417
|
+
tools: availableTools,
|
|
393
418
|
captureImmediate: options.posthogCaptureImmediate
|
|
394
419
|
});
|
|
395
420
|
throw error;
|
|
@@ -401,6 +426,7 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
401
426
|
}) => {
|
|
402
427
|
const startTime = Date.now();
|
|
403
428
|
let generatedText = '';
|
|
429
|
+
let reasoningText = '';
|
|
404
430
|
let usage = {};
|
|
405
431
|
const mergedParams = {
|
|
406
432
|
...options,
|
|
@@ -408,7 +434,9 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
408
434
|
};
|
|
409
435
|
const modelId = options.posthogModelOverride ?? model.modelId;
|
|
410
436
|
const provider = options.posthogProviderOverride ?? extractProvider(model);
|
|
437
|
+
const availableTools = extractAvailableToolCalls('vercel', params);
|
|
411
438
|
const baseURL = ''; // cannot currently get baseURL from vercel
|
|
439
|
+
|
|
412
440
|
try {
|
|
413
441
|
const {
|
|
414
442
|
stream,
|
|
@@ -416,13 +444,17 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
416
444
|
} = await doStream();
|
|
417
445
|
const transformStream = new TransformStream({
|
|
418
446
|
transform(chunk, controller) {
|
|
447
|
+
// Handle new v5 streaming patterns
|
|
419
448
|
if (chunk.type === 'text-delta') {
|
|
420
|
-
generatedText += chunk.
|
|
449
|
+
generatedText += chunk.delta;
|
|
450
|
+
}
|
|
451
|
+
if (chunk.type === 'reasoning-delta') {
|
|
452
|
+
reasoningText += chunk.delta; // New in v5
|
|
421
453
|
}
|
|
422
454
|
if (chunk.type === 'finish') {
|
|
423
455
|
usage = {
|
|
424
|
-
inputTokens: chunk.usage?.
|
|
425
|
-
outputTokens: chunk.usage?.
|
|
456
|
+
inputTokens: chunk.usage?.inputTokens,
|
|
457
|
+
outputTokens: chunk.usage?.outputTokens
|
|
426
458
|
};
|
|
427
459
|
if (chunk.providerMetadata?.openai?.reasoningTokens) {
|
|
428
460
|
usage.reasoningTokens = chunk.providerMetadata.openai.reasoningTokens;
|
|
@@ -441,6 +473,26 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
441
473
|
},
|
|
442
474
|
flush: async () => {
|
|
443
475
|
const latency = (Date.now() - startTime) / 1000;
|
|
476
|
+
// Build content array similar to mapVercelOutput structure
|
|
477
|
+
const content = [];
|
|
478
|
+
if (reasoningText) {
|
|
479
|
+
content.push({
|
|
480
|
+
type: 'reasoning',
|
|
481
|
+
text: truncate(reasoningText)
|
|
482
|
+
});
|
|
483
|
+
}
|
|
484
|
+
if (generatedText) {
|
|
485
|
+
content.push({
|
|
486
|
+
type: 'text',
|
|
487
|
+
text: truncate(generatedText)
|
|
488
|
+
});
|
|
489
|
+
}
|
|
490
|
+
|
|
491
|
+
// Structure output like mapVercelOutput does
|
|
492
|
+
const output = content.length > 0 ? [{
|
|
493
|
+
role: 'assistant',
|
|
494
|
+
content: content.length === 1 && content[0].type === 'text' ? content[0].text : content
|
|
495
|
+
}] : [];
|
|
444
496
|
await sendEventToPosthog({
|
|
445
497
|
client: phClient,
|
|
446
498
|
distinctId: options.posthogDistinctId,
|
|
@@ -448,15 +500,13 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
448
500
|
model: modelId,
|
|
449
501
|
provider: provider,
|
|
450
502
|
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
451
|
-
output:
|
|
452
|
-
content: generatedText,
|
|
453
|
-
role: 'assistant'
|
|
454
|
-
}],
|
|
503
|
+
output: output,
|
|
455
504
|
latency,
|
|
456
505
|
baseURL,
|
|
457
506
|
params: mergedParams,
|
|
458
507
|
httpStatus: 200,
|
|
459
508
|
usage,
|
|
509
|
+
tools: availableTools,
|
|
460
510
|
captureImmediate: options.posthogCaptureImmediate
|
|
461
511
|
});
|
|
462
512
|
}
|
|
@@ -484,6 +534,7 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
484
534
|
},
|
|
485
535
|
isError: true,
|
|
486
536
|
error: truncate(JSON.stringify(error)),
|
|
537
|
+
tools: availableTools,
|
|
487
538
|
captureImmediate: options.posthogCaptureImmediate
|
|
488
539
|
});
|
|
489
540
|
throw error;
|
|
@@ -499,7 +550,7 @@ const wrapVercelLanguageModel = (model, phClient, options) => {
|
|
|
499
550
|
posthogTraceId: traceId,
|
|
500
551
|
posthogDistinctId: options.posthogDistinctId
|
|
501
552
|
});
|
|
502
|
-
const wrappedModel = ai.
|
|
553
|
+
const wrappedModel = ai.wrapLanguageModel({
|
|
503
554
|
model,
|
|
504
555
|
middleware
|
|
505
556
|
});
|